gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.scheduler.thrift.aop; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.Set; import javax.inject.Inject; import javax.inject.Qualifier; import com.google.inject.Binder; import org.apache.aurora.gen.AuroraAdmin; import org.apache.aurora.gen.ExplicitReconciliationSettings; import org.apache.aurora.gen.Hosts; import org.apache.aurora.gen.InstanceKey; import org.apache.aurora.gen.JobConfiguration; import org.apache.aurora.gen.JobKey; import org.apache.aurora.gen.JobUpdateKey; import org.apache.aurora.gen.JobUpdateQuery; import org.apache.aurora.gen.JobUpdateRequest; import org.apache.aurora.gen.ResourceAggregate; import org.apache.aurora.gen.Response; import org.apache.aurora.gen.ScheduleStatus; import org.apache.aurora.gen.TaskQuery; import org.apache.aurora.scheduler.http.api.security.AuthorizingParam; import org.apache.aurora.scheduler.thrift.auth.DecoratedThrift; import org.apache.thrift.TException; /** * An injected forwarding thrift implementation that delegates to a bound mock interface. * <p> * This is required to allow AOP to take place. For more details, see * https://code.google.com/p/google-guice/wiki/AOP#Limitations */ @DecoratedThrift public class MockDecoratedThrift implements AnnotatedAuroraAdmin { @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.PARAMETER, ElementType.METHOD}) @Qualifier private @interface MockThrift { } private final AnnotatedAuroraAdmin annotatedAuroraAdmin; @Inject MockDecoratedThrift(@MockThrift AnnotatedAuroraAdmin delegate) { this.annotatedAuroraAdmin = delegate; } public static void bindForwardedMock(Binder binder, AnnotatedAuroraAdmin mockThrift) { binder.bind(AnnotatedAuroraAdmin.class).annotatedWith(MockThrift.class).toInstance(mockThrift); binder.bind(AnnotatedAuroraAdmin.class).to(MockDecoratedThrift.class); binder.bind(AuroraAdmin.Iface.class).to(MockDecoratedThrift.class); } @Override public Response getRoleSummary() throws TException { return this.annotatedAuroraAdmin.getRoleSummary(); } @Override public Response getJobSummary(String role) throws TException { return this.annotatedAuroraAdmin.getJobSummary(role); } @Override public Response getTasksStatus(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.getTasksStatus(query); } @Override public Response getTasksWithoutConfigs(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.getTasksWithoutConfigs(query); } @Override public Response getPendingReason(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.getPendingReason(query); } @Override public Response getConfigSummary(JobKey job) throws TException { return this.annotatedAuroraAdmin.getConfigSummary(job); } @Override public Response getJobs(String ownerRole) throws TException { return this.annotatedAuroraAdmin.getJobs(ownerRole); } @Override public Response getQuota(String ownerRole) throws TException { return this.annotatedAuroraAdmin.getQuota(ownerRole); } @Override public Response populateJobConfig(JobConfiguration description) throws TException { return this.annotatedAuroraAdmin.populateJobConfig(description); } @Override public Response getJobUpdateSummaries(JobUpdateQuery jobUpdateQuery) throws TException { return this.annotatedAuroraAdmin.getJobUpdateSummaries(jobUpdateQuery); } @Override public Response getJobUpdateDetails(JobUpdateKey key, JobUpdateQuery query) throws TException { return this.annotatedAuroraAdmin.getJobUpdateDetails(key, query); } @Override public Response getJobUpdateDiff(JobUpdateRequest request) throws TException { return this.annotatedAuroraAdmin.getJobUpdateDiff(request); } @Override public Response getTierConfigs() throws TException { return this.annotatedAuroraAdmin.getTierConfigs(); } @Override public Response setQuota(String ownerRole, ResourceAggregate quota) throws TException { return this.annotatedAuroraAdmin.setQuota(ownerRole, quota); } @Override public Response forceTaskState(String taskId, ScheduleStatus status) throws TException { return this.annotatedAuroraAdmin.forceTaskState(taskId, status); } @Override public Response performBackup() throws TException { return this.annotatedAuroraAdmin.performBackup(); } @Override public Response listBackups() throws TException { return this.annotatedAuroraAdmin.listBackups(); } @Override public Response stageRecovery(String backupId) throws TException { return this.annotatedAuroraAdmin.stageRecovery(backupId); } @Override public Response queryRecovery(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.queryRecovery(query); } @Override public Response deleteRecoveryTasks(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.deleteRecoveryTasks(query); } @Override public Response commitRecovery() throws TException { return this.annotatedAuroraAdmin.commitRecovery(); } @Override public Response unloadRecovery() throws TException { return this.annotatedAuroraAdmin.unloadRecovery(); } @Override public Response startMaintenance(Hosts hosts) throws TException { return this.annotatedAuroraAdmin.startMaintenance(hosts); } @Override public Response drainHosts(Hosts hosts) throws TException { return this.annotatedAuroraAdmin.drainHosts(hosts); } @Override public Response maintenanceStatus(Hosts hosts) throws TException { return this.annotatedAuroraAdmin.maintenanceStatus(hosts); } @Override public Response endMaintenance(Hosts hosts) throws TException { return this.annotatedAuroraAdmin.endMaintenance(hosts); } @Override public Response snapshot() throws TException { return this.annotatedAuroraAdmin.snapshot(); } @Override public Response triggerExplicitTaskReconciliation(ExplicitReconciliationSettings settings) throws TException { return this.annotatedAuroraAdmin.triggerExplicitTaskReconciliation(settings); } @Override public Response triggerImplicitTaskReconciliation() throws TException { return this.annotatedAuroraAdmin.triggerImplicitTaskReconciliation(); } @Override public Response pruneTasks(TaskQuery query) throws TException { return this.annotatedAuroraAdmin.pruneTasks(query); } @Override public Response createJob(@AuthorizingParam JobConfiguration arg0) throws TException { return this.annotatedAuroraAdmin.createJob(arg0); } @Override public Response scheduleCronJob(@AuthorizingParam JobConfiguration arg0) throws TException { return this.annotatedAuroraAdmin.scheduleCronJob(arg0); } @Override public Response descheduleCronJob(@AuthorizingParam JobKey arg0) throws TException { return this.annotatedAuroraAdmin.descheduleCronJob(arg0); } @Override public Response startCronJob(@AuthorizingParam JobKey arg0) throws TException { return this.annotatedAuroraAdmin.startCronJob(arg0); } @Override public Response restartShards(@AuthorizingParam JobKey arg0, Set<Integer> arg1) throws TException { return this.annotatedAuroraAdmin.restartShards(arg0, arg1); } @Override public Response killTasks(@AuthorizingParam JobKey arg0, Set<Integer> arg1, String arg2) throws TException { return this.annotatedAuroraAdmin.killTasks(arg0, arg1, arg2); } @Override public Response addInstances(@AuthorizingParam InstanceKey arg0, int arg1) throws TException { return this.annotatedAuroraAdmin.addInstances(arg0, arg1); } @Override public Response replaceCronTemplate(@AuthorizingParam JobConfiguration arg0) throws TException { return this.annotatedAuroraAdmin.replaceCronTemplate(arg0); } @Override public Response startJobUpdate(@AuthorizingParam JobUpdateRequest arg0, String arg1) throws TException { return this.annotatedAuroraAdmin.startJobUpdate(arg0, arg1); } @Override public Response pauseJobUpdate(@AuthorizingParam JobUpdateKey arg0, String arg1) throws TException { return this.annotatedAuroraAdmin.pauseJobUpdate(arg0, arg1); } @Override public Response resumeJobUpdate(@AuthorizingParam JobUpdateKey arg0, String arg1) throws TException { return this.annotatedAuroraAdmin.resumeJobUpdate(arg0, arg1); } @Override public Response abortJobUpdate(@AuthorizingParam JobUpdateKey arg0, String arg1) throws TException { return this.annotatedAuroraAdmin.abortJobUpdate(arg0, arg1); } @Override public Response pulseJobUpdate(@AuthorizingParam JobUpdateKey arg0) throws TException { return this.annotatedAuroraAdmin.pulseJobUpdate(arg0); } @Override public Response rollbackJobUpdate(@AuthorizingParam JobUpdateKey arg0, String arg1) throws TException { return this.annotatedAuroraAdmin.rollbackJobUpdate(arg0, arg1); } }
package fr.dgrandemange.cbcom.session.service.support; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import fr.dgrandemange.cbcom.exception.CBCOMBadIPDUException; import fr.dgrandemange.cbcom.exception.CBCOMSessionException; import fr.dgrandemange.cbcom.exception.CBCOMSessionStateException; import fr.dgrandemange.cbcom.model.IPDU; import fr.dgrandemange.cbcom.model.IPDUEnum; import fr.dgrandemange.cbcom.model.PI; import fr.dgrandemange.cbcom.model.PIEnum; import fr.dgrandemange.cbcom.session.model.PseudoSessionContext; import fr.dgrandemange.cbcom.session.model.TimerConfig; import fr.dgrandemange.cbcom.session.service.IPseudoSessionState; /** * Abstract state class providing default behaviors when receiving a message * (that is transition method invocation)<BR> * * @author dgrandemange * */ public abstract class PseudoSessionStateAbstractImpl implements IPseudoSessionState { /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.service.IPseudoSessionState#init(org.jpos * .jposext.cbcom.session.model.PseudoSessionContext) */ public abstract void init(PseudoSessionContext ctx); /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduCNReceived(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduCNReceived(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduACEmitted(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduACEmitted(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* (non-Javadoc) * @see fr.dgrandemange.cbcom.session.service.IPseudoSessionState#onIpduACReceived(fr.dgrandemange.cbcom.session.model.PseudoSessionContext) */ public void onIpduACReceived(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* (non-Javadoc) * @see fr.dgrandemange.cbcom.session.service.IPseudoSessionState#onIpduCNEmitted(fr.dgrandemange.cbcom.session.model.PseudoSessionContext) */ public void onIpduCNEmitted(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduDEReceived(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduDEReceived(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduDEEmitted(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduDEEmitted(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.service.IPseudoSessionState#onIpduDEToSend * (fr.dgrandemange.cbcom.session.model.PseudoSessionContext) */ public void onIpduDEToSend(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduABReceived(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduABReceived(PseudoSessionContext ctx) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @see * fr.dgrandemange.cbcom.session.server.service.support.IPseudoSessionState2 * # * onIpduABEmitted(fr.dgrandemange.cbcom.session.model.PseudoSessionContext * ) */ public void onIpduABEmitted(PseudoSessionContext ctx, int abortCode) { transNotImplementedForCurrState(); } /* * (non-Javadoc) * * @seefr.dgrandemange.cbcom.session.server.service.IPseudoSessionState# * onInvalidIpduReceived * (fr.dgrandemange.cbcom.session.model.PseudoSessionContext) */ public void onInvalidIpduReceived(PseudoSessionContext ctx) { // Send IPDU AB 0x23 onInvalidIpduReceived(ctx, 0x23); } /* * (non-Javadoc) * * @seefr.dgrandemange.cbcom.session.service.IPseudoSessionState# * onInvalidIpduReceived * (fr.dgrandemange.cbcom.session.model.PseudoSessionContext, * fr.dgrandemange.cbcom.exception.CBCOMBadIPDUException) */ public void onInvalidIpduReceived(PseudoSessionContext ctx, CBCOMBadIPDUException e) { onInvalidIpduReceived(ctx, e.getReasonEnum().getPv01()); } /** * @param ctx * @param pv01 */ protected void onInvalidIpduReceived(PseudoSessionContext ctx, int pv01) { try { sendIpduAB(ctx, pv01); } catch (CBCOMSessionException e) { // Safe to ignore } try { ctx.getChannelCallback().close(); } catch (CBCOMSessionException e) { // Safe to ignore } // Change session state to final state ctx.setSessionState(ctx.getStateFactory().getFinalState()); } /** * @param ctx * @param pv01 */ protected void emitIpduAB(PseudoSessionContext ctx, int pv01) { try { final StackTraceElement[] ste = Thread.currentThread() .getStackTrace(); String state = this.getClass().getName(); String transition = ste[2].getMethodName(); String mesg = String.format("ctxId=%d, code=0x%x [%s<-%s]", ctx.getId(), pv01, state, transition); ctx.getChannelCallback().log("cbcom-ipdu-ab", mesg); } catch (Exception e) { } // Send IPDU AB try { sendIpduAB(ctx, pv01); } catch (CBCOMSessionException e) { // Safe to ignore } onIpduABEmitted(ctx, pv01); } /** * @param ctx * @param pv01 * @throws CBCOMSessionException */ protected void sendIpduAB(PseudoSessionContext ctx, int pv01) throws CBCOMSessionException { // Prepare PI01 PI pi01 = new PI(PIEnum.PI01, new byte[] { (byte) (pv01 & 0xFF) }); // Prepare IPDU AB IPDU ipdu = new IPDU(IPDUEnum.AB, new PI[] { pi01 }, null, 0); // Channel callback to send the IPDU byte[] bIpdu = ipdu.toBytes(); ctx.getChannelCallback().send(bIpdu, false); } /** * @param transitionName * @throws CBCOMSessionStateException */ protected void transNotImplementedForCurrState() { final StackTraceElement[] ste = Thread.currentThread().getStackTrace(); String state = this.getClass().getSimpleName(); String transition = ste[2].getMethodName(); throw new CBCOMSessionStateException(state, transition); } protected void cancelAndReplaceScheduledTimerTask(PseudoSessionContext ctx, Runnable runnable, int timerValue, TimeUnit timeUnit) { @SuppressWarnings("unused") boolean previousCancelledOrDone; Future<?> taskFuture = ctx.getTaskFuture(); if (null != taskFuture) { if ((!taskFuture.isDone()) && (!taskFuture.isCancelled())) { // TODO "May interrupt if running" parameter : set to false or // true ? previousCancelledOrDone = taskFuture.cancel(false); } } ScheduledThreadPoolExecutor exec = ctx .getPseudoSessionTimerTasksExecutorProvider().provide( ScheduledThreadPoolExecutor.class); if (exec != null) { ctx.setTaskFuture(exec.schedule(runnable, timerValue, timeUnit)); } else { // Unable tp proceed emitIpduAB(ctx, 0x04); } } /** * @param taskFuture */ protected void cancelScheduledTask(Future<?> taskFuture) { if (null != taskFuture) { if (!taskFuture.isCancelled()) { @SuppressWarnings("unused") boolean cancel = taskFuture.cancel(true); } } } /** * @param ctx * @param nextStateClazz */ protected void changeState(PseudoSessionContext ctx, Class<? extends IPseudoSessionState> nextStateClazz) { IPseudoSessionState nextState = ctx.getStateFactory().create( nextStateClazz); ctx.setSessionState(nextState); nextState.init(ctx); } /** * @param ctx */ protected void initCtxId(PseudoSessionContext ctx) { ctx.setId(new java.util.Date().getTime()); } /** * @param ctx */ protected void resetCtxWorkingVars(PseudoSessionContext ctx) { ctx.setId(-1); ctx.setTimerPIs(null); // Reset timers TimerConfig timerCfg = ctx.getTimerConfig(); timerCfg.setNegotiatedTsi(timerCfg.getInitialTsi()); timerCfg.setNegotiatedTnr(timerCfg.getInitialTnr()); timerCfg.setNegotiatedTma(timerCfg.getInitialTma()); } /** * Server call this method to check the timer related PIs in the last * received IPDU<BR> * * If a timer is not valid, negotiation is NOK, and an abort code is * returned depending on the timer nature. <BR> * * If negotiation is OK, timer PIs to return to client are stored in the * context (see property timerPIs), so that it can be further be used to * populate the IPDU to respond.<BR> * * Also note that server TSI timer may be updated in the process.<BR> * * @param ctx * @param ipdu * @return 0x00 if negotiation succeed, an abort code otherwise */ protected int proceedToTimerNegotiation(PseudoSessionContext ctx, IPDU ipdu) { int abortCode = 0x00; List<PI> lstPis = ctx.getTimerPIs(); if (null == lstPis) { lstPis = new ArrayList<PI>(); ctx.setTimerPIs(lstPis); } lstPis.clear(); // Handle timers negotiation if dedicated PI are present in IPDU CN // 1. Handle TNR timer negotiation if (0x00 == abortCode) { PI pi16 = ipdu.findPiByPIEnum(PIEnum.PI16); if (null != pi16) { byte[] bPv = pi16.getParamValue(); int tnr = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); if ((tnr >= ctx.getTimerConfig().getMinTnr()) && (tnr <= ctx.getTimerConfig().getMaxTnr())) { int nvTnr = tnr; ctx.getTimerConfig().setNegotiatedTnr(nvTnr); byte[] bNewPv = ByteBuffer.allocate(4).putInt(nvTnr) .array(); PI respPi16 = new PI(PIEnum.PI16, new byte[] { bNewPv[2], bNewPv[3] }); lstPis.add(respPi16); } else { // Negotiation failed abortCode = 0x15; } } } // 2. Handle TSI timer negotiation if (0x00 == abortCode) { PI pi17 = ipdu.findPiByPIEnum(PIEnum.PI17); if (null != pi17) { byte[] bPv = pi17.getParamValue(); int tsi = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); if ((tsi >= ctx.getTimerConfig().getMinTsi()) && (tsi <= ctx.getTimerConfig().getMaxTsi())) { int nvTsi = tsi; ctx.getTimerConfig().setNegotiatedTsi(nvTsi); byte[] bNewPv = ByteBuffer.allocate(4).putInt(nvTsi) .array(); PI respPi17 = new PI(PIEnum.PI17, new byte[] { bNewPv[2], bNewPv[3] }); lstPis.add(respPi17); // TSI is a server-side timer, so we must update it in // server pseudo session context ctx.getTimerConfig().setNegotiatedTsi(nvTsi); } else { // Negotiation failed abortCode = 0x16; } } } // 3. Handle TMA timer negotiation if (0x00 == abortCode) { PI pi18 = ipdu.findPiByPIEnum(PIEnum.PI18); if (null != pi18) { byte[] bPv = pi18.getParamValue(); int tma = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); if ((tma >= ctx.getTimerConfig().getMinTma()) && (tma <= ctx.getTimerConfig().getMaxTma())) { int nvTma = tma; ctx.getTimerConfig().setNegotiatedTma(nvTma); byte[] bNewPv = ByteBuffer.allocate(4).putInt(nvTma) .array(); PI respPi18 = new PI(PIEnum.PI18, new byte[] { bNewPv[2], bNewPv[3] }); lstPis.add(respPi18); } else { // Negotiation failed abortCode = 0x17; } } } return abortCode; } /** * @param ctx * @param ipdu */ protected void updatePseudoSessionTimers(PseudoSessionContext ctx, IPDU ipdu) { PI pi16 = ipdu.findPiByPIEnum(PIEnum.PI16); PI pi17 = ipdu.findPiByPIEnum(PIEnum.PI17); PI pi18 = ipdu.findPiByPIEnum(PIEnum.PI18); if (null != pi16) { byte[] bPv = pi16.getParamValue(); int tnr = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); ; ctx.getTimerConfig().setNegotiatedTnr(tnr); } if (null != pi17) { byte[] bPv = pi17.getParamValue(); int tsi = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); ctx.getTimerConfig().setNegotiatedTsi(tsi); } if (null != pi18) { byte[] bPv = pi18.getParamValue(); int tma = ((((int) bPv[0]) & 0xFF) << 8) | (((int) bPv[1]) & 0xFF); ctx.getTimerConfig().setNegotiatedTma(tma); } } }
/*------------------------------------------------------------------------- * * Copyright (c) 2006-2011, PostgreSQL Global Development Group * * IDENTIFICATION * $PostgreSQL: pgjdbc/org/postgresql/core/VisibleBufferedInputStream.java,v 1.4 2010/08/31 18:07:39 jurka Exp $ * *------------------------------------------------------------------------- */ package org.postgresql.core; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; /** * A faster version of BufferedInputStream. Does no synchronisation and * allows direct access to the used byte[] buffer. * * @author Mikko Tiihonen */ public class VisibleBufferedInputStream extends InputStream { /** * If a direct read to byte array is called that would require * a smaller read from the wrapped stream that MINIMUM_READ then * first fill the buffer and serve the bytes from there. Larger * reads are directly done to the provided byte array. */ static final int MINIMUM_READ = 1024; /** * In how large spans is the C string zero-byte scanned. */ static final int STRING_SCAN_SPAN = 1024; /** * The wrapped input stream. */ private final InputStream wrapped; /** * The buffer. */ private byte[] buffer; /** * Current read position in the buffer. */ private int index; /** * How far is the buffer filled with valid data. */ private int endIndex; /** * Creates a new buffer around the given stream. * * @param in The stream to buffer. * @param bufferSize The initial size of the buffer. */ public VisibleBufferedInputStream(InputStream in, int bufferSize) { wrapped = in; buffer = new byte[bufferSize < MINIMUM_READ ? MINIMUM_READ : bufferSize]; } /** * {@inheritDoc} */ public int read() throws IOException { if (ensureBytes(1)) { return buffer[index++] & 0xFF; } return -1; } /** * Reads a byte from the buffer without advancing the index pointer. */ public int peek() throws IOException { if (ensureBytes(1)) { return buffer[index] & 0xFF; } return -1; } /** * Reads byte from the buffer without any checks. This method never * reads from the underlaying stream. * Before calling this method the {@link #ensureBytes} method must * have been called. * * @return The next byte from the buffer. * @throws ArrayIndexOutOfBoundsException If ensureBytes was not called * to make sure the buffer contains the byte. */ public byte readRaw() { return buffer[index++]; } /** * Ensures that the buffer contains at least n bytes. * This method invalidates the buffer and index fields. * * @param n The amount of bytes to ensure exists in buffer * @return true if required bytes are available and false if EOF * @throws IOException If reading of the wrapped stream failed. */ public boolean ensureBytes(int n) throws IOException { int required = n - endIndex + index; while (required > 0) { if (!readMore(required)) { return false; } required = n - endIndex + index; } return true; } /** * Reads more bytes into the buffer. * * @param wanted How much should be at least read. * @return True if at least some bytes were read. * @throws IOException If reading of the wrapped stream failed. */ private boolean readMore(int wanted) throws IOException { if (endIndex == index) { index = 0; endIndex = 0; } int canFit = buffer.length - endIndex; if (canFit < wanted) { // would the wanted bytes fit if we compacted the buffer // and still leave some slack if (index + canFit > wanted + MINIMUM_READ) { compact(); } else { doubleBuffer(); } canFit = buffer.length - endIndex; } int read = wrapped.read(buffer, endIndex, canFit); if (read < 0) { return false; } endIndex += read; return true; } /** * Doubles the size of the buffer. */ private void doubleBuffer() { byte[] buf = new byte[buffer.length * 2]; moveBufferTo(buf); buffer = buf; } /** * Compacts the unread bytes of the buffer to the beginning of the buffer. */ private void compact() { moveBufferTo(buffer); } /** * Moves bytes from the buffer to the begining of the destination buffer. * Also sets the index and endIndex variables. * * @param dest The destination buffer. */ private void moveBufferTo(byte[] dest) { int size = endIndex - index; System.arraycopy(buffer, index, dest, 0, size); index = 0; endIndex = size; } /** * {@inheritDoc} */ public int read(byte to[], int off, int len) throws IOException { if ((off | len | (off + len) | (to.length - (off + len))) < 0) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return 0; } // if the read would go to wrapped stream, but would result // in a small read then try read to the buffer instead int avail = endIndex - index; if (len - avail < MINIMUM_READ) { ensureBytes(len); avail = endIndex - index; } // first copy from buffer if (avail > 0) { if (len <= avail) { System.arraycopy(buffer, index, to, off, len); index += len; return len; } System.arraycopy(buffer, index, to, off, avail); len -= avail; off += avail; } int read = avail; // good place to reset index because the buffer is fully drained index = 0; endIndex = 0; // then directly from wrapped stream do { int r = wrapped.read(to, off, len); if (r <= 0) { return (read == 0) ? r : read; } read += r; off += r; len -= r; } while (len > 0); return read; } /** * {@inheritDoc} */ public long skip(long n) throws IOException { int avail = endIndex - index; if (avail >= n) { index += n; return n; } n -= avail; index = 0; endIndex = 0; return avail + wrapped.skip(n); } /** * {@inheritDoc} */ public int available() throws IOException { int avail = endIndex - index; return avail > 0 ? avail : wrapped.available(); } /** * {@inheritDoc} */ public void close() throws IOException { wrapped.close(); } /** * Returns direct handle to the used buffer. Use the {@link #ensureBytes} * to prefill required bytes the buffer and {@link #getIndex} to fetch * the current position of the buffer. * * @return The underlaying buffer. */ public byte[] getBuffer() { return buffer; } /** * Returns the current read position in the buffer. * * @return the current read position in the buffer. */ public int getIndex() { return index; } /** * Scans the length of the next null terminated string (C-style string) from * the stream. * * @return The length of the next null terminated string. * @throws IOException If reading of stream fails. * @throws EOFxception If the stream did not contain any null terminators. */ public int scanCStringLength() throws IOException { int pos = index; for (;;) { while (pos < endIndex) { if (buffer[pos++] == '\0') { return pos - index; } } if (!readMore(STRING_SCAN_SPAN)) { throw new EOFException(); } pos = index; } } }
/* ******************************************************************************* * Copyright (C) 2001-2010, International Business Machines Corporation and * * others. All Rights Reserved. * ******************************************************************************* */ /** * Port From: ICU4C v1.8.1 : format : IntlTestDecimalFormatAPI * Source File: $ICU4CRoot/source/test/intltest/dcfmapts.cpp **/ package com.ibm.icu.dev.test.format; import java.text.AttributedCharacterIterator; import java.text.FieldPosition; import java.text.Format; import java.text.ParsePosition; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; import com.ibm.icu.text.DecimalFormat; import com.ibm.icu.text.DecimalFormatSymbols; import com.ibm.icu.text.NumberFormat; // This is an API test, not a unit test. It doesn't test very many cases, and doesn't // try to test the full functionality. It just calls each function in the class and // verifies that it works on a basic level. public class IntlTestDecimalFormatAPIC extends com.ibm.icu.dev.test.TestFmwk { public static void main(String[] args) throws Exception { new IntlTestDecimalFormatAPIC().run(args); } // This test checks various generic API methods in DecimalFormat to achieve 100% API coverage. public void TestAPI() { logln("DecimalFormat API test---"); logln(""); Locale.setDefault(Locale.ENGLISH); // ======= Test constructors logln("Testing DecimalFormat constructors"); DecimalFormat def = new DecimalFormat(); final String pattern = new String("#,##0.# FF"); DecimalFormat pat = null; try { pat = new DecimalFormat(pattern); } catch (IllegalArgumentException e) { errln("ERROR: Could not create DecimalFormat (pattern)"); } DecimalFormatSymbols symbols = new DecimalFormatSymbols(Locale.FRENCH); DecimalFormat cust1 = new DecimalFormat(pattern, symbols); // ======= Test clone(), assignment, and equality logln("Testing clone() and equality operators"); Format clone = (Format) def.clone(); if (!def.equals(clone)) { errln("ERROR: Clone() failed"); } // ======= Test various format() methods logln("Testing various format() methods"); // final double d = -10456.0037; // this appears as -10456.003700000001 on NT // final double d = -1.04560037e-4; // this appears as -1.0456003700000002E-4 on NT final double d = -10456.00370000000000; // this works! final long l = 100000000; logln("" + Double.toString(d) + " is the double value"); StringBuffer res1 = new StringBuffer(); StringBuffer res2 = new StringBuffer(); StringBuffer res3 = new StringBuffer(); StringBuffer res4 = new StringBuffer(); FieldPosition pos1 = new FieldPosition(0); FieldPosition pos2 = new FieldPosition(0); FieldPosition pos3 = new FieldPosition(0); FieldPosition pos4 = new FieldPosition(0); res1 = def.format(d, res1, pos1); logln("" + Double.toString(d) + " formatted to " + res1); res2 = pat.format(l, res2, pos2); logln("" + l + " formatted to " + res2); res3 = cust1.format(d, res3, pos3); logln("" + Double.toString(d) + " formatted to " + res3); res4 = cust1.format(l, res4, pos4); logln("" + l + " formatted to " + res4); // ======= Test parse() logln("Testing parse()"); String text = new String("-10,456.0037"); ParsePosition pos = new ParsePosition(0); String patt = new String("#,##0.#"); pat.applyPattern(patt); double d2 = pat.parse(text, pos).doubleValue(); if (d2 != d) { errln( "ERROR: Roundtrip failed (via parse(" + Double.toString(d2) + " != " + Double.toString(d) + ")) for " + text); } logln(text + " parsed into " + (long) d2); // ======= Test getters and setters logln("Testing getters and setters"); final DecimalFormatSymbols syms = pat.getDecimalFormatSymbols(); def.setDecimalFormatSymbols(syms); if (!pat.getDecimalFormatSymbols().equals(def.getDecimalFormatSymbols())) { errln("ERROR: set DecimalFormatSymbols() failed"); } String posPrefix; pat.setPositivePrefix("+"); posPrefix = pat.getPositivePrefix(); logln("Positive prefix (should be +): " + posPrefix); if (posPrefix != "+") { errln("ERROR: setPositivePrefix() failed"); } String negPrefix; pat.setNegativePrefix("-"); negPrefix = pat.getNegativePrefix(); logln("Negative prefix (should be -): " + negPrefix); if (negPrefix != "-") { errln("ERROR: setNegativePrefix() failed"); } String posSuffix; pat.setPositiveSuffix("_"); posSuffix = pat.getPositiveSuffix(); logln("Positive suffix (should be _): " + posSuffix); if (posSuffix != "_") { errln("ERROR: setPositiveSuffix() failed"); } String negSuffix; pat.setNegativeSuffix("~"); negSuffix = pat.getNegativeSuffix(); logln("Negative suffix (should be ~): " + negSuffix); if (negSuffix != "~") { errln("ERROR: setNegativeSuffix() failed"); } long multiplier = 0; pat.setMultiplier(8); multiplier = pat.getMultiplier(); logln("Multiplier (should be 8): " + multiplier); if (multiplier != 8) { errln("ERROR: setMultiplier() failed"); } int groupingSize = 0; pat.setGroupingSize(2); groupingSize = pat.getGroupingSize(); logln("Grouping size (should be 2): " + (long) groupingSize); if (groupingSize != 2) { errln("ERROR: setGroupingSize() failed"); } pat.setDecimalSeparatorAlwaysShown(true); boolean tf = pat.isDecimalSeparatorAlwaysShown(); logln( "DecimalSeparatorIsAlwaysShown (should be true) is " + (tf ? "true" : "false")); if (tf != true) { errln("ERROR: setDecimalSeparatorAlwaysShown() failed"); } String funkyPat; funkyPat = pat.toPattern(); logln("Pattern is " + funkyPat); String locPat; locPat = pat.toLocalizedPattern(); logln("Localized pattern is " + locPat); // ======= Test applyPattern() logln("Testing applyPattern()"); String p1 = new String("#,##0.0#;(#,##0.0#)"); logln("Applying pattern " + p1); pat.applyPattern(p1); String s2; s2 = pat.toPattern(); logln("Extracted pattern is " + s2); if (!s2.equals(p1)) { errln("ERROR: toPattern() result did not match pattern applied"); } String p2 = new String("#,##0.0# FF;(#,##0.0# FF)"); logln("Applying pattern " + p2); pat.applyLocalizedPattern(p2); String s3; s3 = pat.toLocalizedPattern(); logln("Extracted pattern is " + s3); if (!s3.equals(p2)) { errln("ERROR: toLocalizedPattern() result did not match pattern applied"); } // ======= Test getStaticClassID() // logln("Testing instanceof()"); // try { // NumberFormat test = new DecimalFormat(); // if (! (test instanceof DecimalFormat)) { // errln("ERROR: instanceof failed"); // } // } // catch (Exception e) { // errln("ERROR: Couldn't create a DecimalFormat"); // } } public void TestRounding() { double Roundingnumber = 2.55; double Roundingnumber1 = -2.55; //+2.55 results -2.55 results double result[] = { 3, -3, 2, -2, 3, -2, 2, -3, 3, -3, 3, -3, 3, -3 }; DecimalFormat pat = new DecimalFormat(); String s = ""; s = pat.toPattern(); logln("pattern = " + s); int mode; int i = 0; String message; String resultStr; for (mode = 0; mode < 7; mode++) { pat.setRoundingMode(mode); if (pat.getRoundingMode() != mode) { errln( "SetRoundingMode or GetRoundingMode failed for mode=" + mode); } //for +2.55 with RoundingIncrement=1.0 pat.setRoundingIncrement(1.0); resultStr = pat.format(Roundingnumber); message = "round(" + (double) Roundingnumber + "," + mode + ",FALSE) with RoundingIncrement=1.0==>"; verify(message, resultStr, result[i++]); message = ""; resultStr = ""; //for -2.55 with RoundingIncrement=1.0 resultStr = pat.format(Roundingnumber1); message = "round(" + (double) Roundingnumber1 + "," + mode + ",FALSE) with RoundingIncrement=1.0==>"; verify(message, resultStr, result[i++]); message = ""; resultStr = ""; } } public void testFormatToCharacterIterator() { Number number = new Double(350.76); Number negativeNumber = new Double(-350.76); Locale us = Locale.US; // test number instance t_Format(1, number, NumberFormat.getNumberInstance(us), getNumberVectorUS()); // test percent instance t_Format(3, number, NumberFormat.getPercentInstance(us), getPercentVectorUS()); // test permille pattern DecimalFormat format = new DecimalFormat("###0.##\u2030"); t_Format(4, number, format, getPermilleVector()); // test exponential pattern with positive exponent format = new DecimalFormat("00.0#E0"); t_Format(5, number, format, getPositiveExponentVector()); // test exponential pattern with negative exponent format = new DecimalFormat("0000.0#E0"); t_Format(6, number, format, getNegativeExponentVector()); // test currency instance with US Locale t_Format(7, number, NumberFormat.getCurrencyInstance(us), getPositiveCurrencyVectorUS()); // test negative currency instance with US Locale t_Format(8, negativeNumber, NumberFormat.getCurrencyInstance(us), getNegativeCurrencyVectorUS()); // test multiple grouping seperators number = new Long(100300400); t_Format(11, number, NumberFormat.getNumberInstance(us), getNumberVector2US()); // test 0 number = new Long(0); t_Format(12, number, NumberFormat.getNumberInstance(us), getZeroVector()); } private static List<FieldContainer> getNumberVectorUS() { List<FieldContainer> v = new ArrayList<FieldContainer>(3); v.add(new FieldContainer(0, 3, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(3, 4, NumberFormat.Field.DECIMAL_SEPARATOR)); v.add(new FieldContainer(4, 6, NumberFormat.Field.FRACTION)); return v; } // private static Vector getPositiveCurrencyVectorTR() { // Vector v = new Vector(); // v.add(new FieldContainer(0, 3, NumberFormat.Field.INTEGER)); // v.add(new FieldContainer(4, 6, NumberFormat.Field.CURRENCY)); // return v; // } // // private static Vector getNegativeCurrencyVectorTR() { // Vector v = new Vector(); // v.add(new FieldContainer(0, 1, NumberFormat.Field.SIGN)); // v.add(new FieldContainer(1, 4, NumberFormat.Field.INTEGER)); // v.add(new FieldContainer(5, 7, NumberFormat.Field.CURRENCY)); // return v; // } private static List<FieldContainer> getPositiveCurrencyVectorUS() { List<FieldContainer> v = new ArrayList<FieldContainer>(4); v.add(new FieldContainer(0, 1, NumberFormat.Field.CURRENCY)); v.add(new FieldContainer(1, 4, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(4, 5, NumberFormat.Field.DECIMAL_SEPARATOR)); v.add(new FieldContainer(5, 7, NumberFormat.Field.FRACTION)); return v; } private static List<FieldContainer> getNegativeCurrencyVectorUS() { List<FieldContainer> v = new ArrayList<FieldContainer>(4); v.add(new FieldContainer(1, 2, NumberFormat.Field.CURRENCY)); v.add(new FieldContainer(2, 5, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(5, 6, NumberFormat.Field.DECIMAL_SEPARATOR)); v.add(new FieldContainer(6, 8, NumberFormat.Field.FRACTION)); return v; } private static List<FieldContainer> getPercentVectorUS() { List<FieldContainer> v = new ArrayList<FieldContainer>(5); v.add(new FieldContainer(0, 2, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(2, 3, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(2, 3, NumberFormat.Field.GROUPING_SEPARATOR)); v.add(new FieldContainer(3, 6, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(6, 7, NumberFormat.Field.PERCENT)); return v; } private static List<FieldContainer> getPermilleVector() { List<FieldContainer> v = new ArrayList<FieldContainer>(2); v.add(new FieldContainer(0, 6, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(6, 7, NumberFormat.Field.PERMILLE)); return v; } private static List<FieldContainer> getNegativeExponentVector() { List<FieldContainer> v = new ArrayList<FieldContainer>(6); v.add(new FieldContainer(0, 4, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(4, 5, NumberFormat.Field.DECIMAL_SEPARATOR)); v.add(new FieldContainer(5, 6, NumberFormat.Field.FRACTION)); v.add(new FieldContainer(6, 7, NumberFormat.Field.EXPONENT_SYMBOL)); v.add(new FieldContainer(7, 8, NumberFormat.Field.EXPONENT_SIGN)); v.add(new FieldContainer(8, 9, NumberFormat.Field.EXPONENT)); return v; } private static List<FieldContainer> getPositiveExponentVector() { List<FieldContainer> v = new ArrayList<FieldContainer>(5); v.add(new FieldContainer(0, 2, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(2, 3, NumberFormat.Field.DECIMAL_SEPARATOR)); v.add(new FieldContainer(3, 5, NumberFormat.Field.FRACTION)); v.add(new FieldContainer(5, 6, NumberFormat.Field.EXPONENT_SYMBOL)); v.add(new FieldContainer(6, 7, NumberFormat.Field.EXPONENT)); return v; } private static List<FieldContainer> getNumberVector2US() { List<FieldContainer> v = new ArrayList<FieldContainer>(7); v.add(new FieldContainer(0, 3, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(3, 4, NumberFormat.Field.GROUPING_SEPARATOR)); v.add(new FieldContainer(3, 4, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(4, 7, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(7, 8, NumberFormat.Field.GROUPING_SEPARATOR)); v.add(new FieldContainer(7, 8, NumberFormat.Field.INTEGER)); v.add(new FieldContainer(8, 11, NumberFormat.Field.INTEGER)); return v; } private static List<FieldContainer> getZeroVector() { List<FieldContainer> v = new ArrayList<FieldContainer>(1); v.add(new FieldContainer(0, 1, NumberFormat.Field.INTEGER)); return v; } private void t_Format(int count, Object object, Format format, List<FieldContainer> expectedResults) { List<FieldContainer> results = findFields(format.formatToCharacterIterator(object)); assertTrue("Test " + count + ": Format returned incorrect CharacterIterator for " + format.format(object), compare(results, expectedResults)); } /** * compares two vectors regardless of the order of their elements */ private static boolean compare(List vector1, List vector2) { return vector1.size() == vector2.size() && vector1.containsAll(vector2); } /** * finds attributes with regards to char index in this * AttributedCharacterIterator, and puts them in a vector * * @param iterator * @return a vector, each entry in this vector are of type FieldContainer , * which stores start and end indexes and an attribute this range * has */ private static List<FieldContainer> findFields(AttributedCharacterIterator iterator) { List<FieldContainer> result = new ArrayList<FieldContainer>(); while (iterator.getIndex() != iterator.getEndIndex()) { int start = iterator.getRunStart(); int end = iterator.getRunLimit(); Iterator it = iterator.getAttributes().keySet().iterator(); while (it.hasNext()) { AttributedCharacterIterator.Attribute attribute = (AttributedCharacterIterator.Attribute) it .next(); Object value = iterator.getAttribute(attribute); result.add(new FieldContainer(start, end, attribute, value)); // System.out.println(start + " " + end + ": " + attribute + ", // " + value ); // System.out.println("v.add(new FieldContainer(" + start +"," + // end +"," + attribute+ "," + value+ "));"); } iterator.setIndex(end); } return result; } protected static class FieldContainer { int start, end; AttributedCharacterIterator.Attribute attribute; Object value; // called from support_decimalformat and support_simpledateformat tests public FieldContainer(int start, int end, AttributedCharacterIterator.Attribute attribute) { this(start, end, attribute, attribute); } // called from support_messageformat tests public FieldContainer(int start, int end, AttributedCharacterIterator.Attribute attribute, int value) { this(start, end, attribute, new Integer(value)); } // called from support_messageformat tests public FieldContainer(int start, int end, AttributedCharacterIterator.Attribute attribute, Object value) { this.start = start; this.end = end; this.attribute = attribute; this.value = value; } public boolean equals(Object obj) { if (!(obj instanceof FieldContainer)) return false; FieldContainer fc = (FieldContainer) obj; return (start == fc.start && end == fc.end && attribute == fc.attribute && value.equals(fc.value)); } } /*Helper functions */ public void verify(String message, String got, double expected) { logln(message + got + " Expected : " + (long)expected); String expectedStr = ""; expectedStr=expectedStr + (long)expected; if(!got.equals(expectedStr) ) { errln("ERROR: Round() failed: " + message + got + " Expected : " + expectedStr); } } } //eof
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.ccr.action; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.MissingHistoryOperationsException; import org.elasticsearch.index.seqno.RetentionLease; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.GlobalCheckpointListeners; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardNotStartedException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.Ccr; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; public class ShardChangesAction extends ActionType<ShardChangesAction.Response> { public static final ShardChangesAction INSTANCE = new ShardChangesAction(); public static final String NAME = "indices:data/read/xpack/ccr/shard_changes"; private ShardChangesAction() { super(NAME, ShardChangesAction.Response::new); } public static class Request extends SingleShardRequest<Request> { private long fromSeqNo; private int maxOperationCount; private final ShardId shardId; private final String expectedHistoryUUID; private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_READ_POLL_TIMEOUT; private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE; private long relativeStartNanos; public Request(ShardId shardId, String expectedHistoryUUID) { super(shardId.getIndexName()); this.shardId = shardId; this.expectedHistoryUUID = expectedHistoryUUID; } Request(StreamInput in) throws IOException { super(in); fromSeqNo = in.readVLong(); maxOperationCount = in.readVInt(); shardId = new ShardId(in); expectedHistoryUUID = in.readString(); pollTimeout = in.readTimeValue(); maxBatchSize = new ByteSizeValue(in); // Starting the clock in order to know how much time is spent on fetching operations: relativeStartNanos = System.nanoTime(); } public ShardId getShard() { return shardId; } public long getFromSeqNo() { return fromSeqNo; } public void setFromSeqNo(long fromSeqNo) { this.fromSeqNo = fromSeqNo; } public int getMaxOperationCount() { return maxOperationCount; } public void setMaxOperationCount(int maxOperationCount) { this.maxOperationCount = maxOperationCount; } public ByteSizeValue getMaxBatchSize() { return maxBatchSize; } public void setMaxBatchSize(ByteSizeValue maxBatchSize) { this.maxBatchSize = maxBatchSize; } public String getExpectedHistoryUUID() { return expectedHistoryUUID; } public TimeValue getPollTimeout() { return pollTimeout; } public void setPollTimeout(final TimeValue pollTimeout) { this.pollTimeout = Objects.requireNonNull(pollTimeout, "pollTimeout"); } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (fromSeqNo < 0) { validationException = addValidationError("fromSeqNo [" + fromSeqNo + "] cannot be lower than 0", validationException); } if (maxOperationCount < 0) { validationException = addValidationError("maxOperationCount [" + maxOperationCount + "] cannot be lower than 0", validationException); } if (maxBatchSize.compareTo(ByteSizeValue.ZERO) <= 0) { validationException = addValidationError("maxBatchSize [" + maxBatchSize.getStringRep() + "] must be larger than 0", validationException); } return validationException; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVLong(fromSeqNo); out.writeVInt(maxOperationCount); shardId.writeTo(out); out.writeString(expectedHistoryUUID); out.writeTimeValue(pollTimeout); maxBatchSize.writeTo(out); } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Request request = (Request) o; return fromSeqNo == request.fromSeqNo && maxOperationCount == request.maxOperationCount && Objects.equals(shardId, request.shardId) && Objects.equals(expectedHistoryUUID, request.expectedHistoryUUID) && Objects.equals(pollTimeout, request.pollTimeout) && maxBatchSize.equals(request.maxBatchSize); } @Override public int hashCode() { return Objects.hash(fromSeqNo, maxOperationCount, shardId, expectedHistoryUUID, pollTimeout, maxBatchSize); } @Override public String toString() { return "Request{" + "fromSeqNo=" + fromSeqNo + ", maxOperationCount=" + maxOperationCount + ", shardId=" + shardId + ", expectedHistoryUUID=" + expectedHistoryUUID + ", pollTimeout=" + pollTimeout + ", maxBatchSize=" + maxBatchSize.getStringRep() + '}'; } } public static final class Response extends ActionResponse { private long mappingVersion; public long getMappingVersion() { return mappingVersion; } private long settingsVersion; public long getSettingsVersion() { return settingsVersion; } private long aliasesVersion; public long getAliasesVersion() { return aliasesVersion; } private long globalCheckpoint; public long getGlobalCheckpoint() { return globalCheckpoint; } private long maxSeqNo; public long getMaxSeqNo() { return maxSeqNo; } private long maxSeqNoOfUpdatesOrDeletes; public long getMaxSeqNoOfUpdatesOrDeletes() { return maxSeqNoOfUpdatesOrDeletes; } private Translog.Operation[] operations; public Translog.Operation[] getOperations() { return operations; } private long tookInMillis; public long getTookInMillis() { return tookInMillis; } Response() { } Response(StreamInput in) throws IOException { super(in); mappingVersion = in.readVLong(); settingsVersion = in.readVLong(); if (in.getVersion().onOrAfter(Version.V_7_3_0)) { aliasesVersion = in.readVLong(); } else { aliasesVersion = 0; } globalCheckpoint = in.readZLong(); maxSeqNo = in.readZLong(); maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); tookInMillis = in.readVLong(); } Response( final long mappingVersion, final long settingsVersion, final long aliasesVersion, final long globalCheckpoint, final long maxSeqNo, final long maxSeqNoOfUpdatesOrDeletes, final Translog.Operation[] operations, final long tookInMillis) { this.mappingVersion = mappingVersion; this.settingsVersion = settingsVersion; this.aliasesVersion = aliasesVersion; this.globalCheckpoint = globalCheckpoint; this.maxSeqNo = maxSeqNo; this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; this.operations = operations; this.tookInMillis = tookInMillis; } @Override public void writeTo(final StreamOutput out) throws IOException { out.writeVLong(mappingVersion); out.writeVLong(settingsVersion); if (out.getVersion().onOrAfter(Version.V_7_3_0)) { out.writeVLong(aliasesVersion); } out.writeZLong(globalCheckpoint); out.writeZLong(maxSeqNo); out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeArray(Translog.Operation::writeOperation, operations); out.writeVLong(tookInMillis); } @Override public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final Response that = (Response) o; return mappingVersion == that.mappingVersion && settingsVersion == that.settingsVersion && aliasesVersion == that.aliasesVersion && globalCheckpoint == that.globalCheckpoint && maxSeqNo == that.maxSeqNo && maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && Arrays.equals(operations, that.operations) && tookInMillis == that.tookInMillis; } @Override public int hashCode() { return Objects.hash( mappingVersion, settingsVersion, aliasesVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, Arrays.hashCode(operations), tookInMillis); } } public static class TransportAction extends TransportSingleShardAction<Request, Response> { private final IndicesService indicesService; @Inject public TransportAction(ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) { super(NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, Request::new, ThreadPool.Names.SEARCH); this.indicesService = indicesService; } @Override protected Response shardOperation(Request request, ShardId shardId) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.getShard().getIndex()); final IndexShard indexShard = indexService.getShard(request.getShard().id()); final SeqNoStats seqNoStats = indexShard.seqNoStats(); final Translog.Operation[] operations = getOperations( indexShard, seqNoStats.getGlobalCheckpoint(), request.getFromSeqNo(), request.getMaxOperationCount(), request.getExpectedHistoryUUID(), request.getMaxBatchSize()); // must capture after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations. final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); // must capture IndexMetadata after snapshotting operations to ensure the returned mapping version is at least as up-to-date // as the mapping version that these operations used. Here we must not use IndexMetadata from ClusterService for we expose // a new cluster state to ClusterApplier(s) before exposing it in the ClusterService. final IndexMetadata indexMetadata = indexService.getMetadata(); final long mappingVersion = indexMetadata.getMappingVersion(); final long settingsVersion = indexMetadata.getSettingsVersion(); final long aliasesVersion = indexMetadata.getAliasesVersion(); return getResponse( mappingVersion, settingsVersion, aliasesVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations, request.relativeStartNanos); } @Override protected void asyncShardOperation( final Request request, final ShardId shardId, final ActionListener<Response> listener) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.getShard().getIndex()); final IndexShard indexShard = indexService.getShard(request.getShard().id()); final SeqNoStats seqNoStats = indexShard.seqNoStats(); if (request.getFromSeqNo() > seqNoStats.getGlobalCheckpoint()) { logger.trace( "{} waiting for global checkpoint advancement from [{}] to [{}]", shardId, seqNoStats.getGlobalCheckpoint(), request.getFromSeqNo()); indexShard.addGlobalCheckpointListener( request.getFromSeqNo(), new GlobalCheckpointListeners.GlobalCheckpointListener() { @Override public Executor executor() { return threadPool.executor(Ccr.CCR_THREAD_POOL_NAME); } @Override public void accept(final long g, final Exception e) { if (g != UNASSIGNED_SEQ_NO) { assert request.getFromSeqNo() <= g : shardId + " only advanced to [" + g + "] while waiting for [" + request.getFromSeqNo() + "]"; globalCheckpointAdvanced(shardId, g, request, listener); } else { assert e != null; globalCheckpointAdvancementFailure(shardId, e, request, listener, indexShard); } } }, request.getPollTimeout()); } else { super.asyncShardOperation(request, shardId, listener); } } private void globalCheckpointAdvanced( final ShardId shardId, final long globalCheckpoint, final Request request, final ActionListener<Response> listener) { logger.trace("{} global checkpoint advanced to [{}] after waiting for [{}]", shardId, globalCheckpoint, request.getFromSeqNo()); try { super.asyncShardOperation(request, shardId, listener); } catch (final IOException caught) { listener.onFailure(caught); } } private void globalCheckpointAdvancementFailure( final ShardId shardId, final Exception e, final Request request, final ActionListener<Response> listener, final IndexShard indexShard) { logger.trace( () -> new ParameterizedMessage( "{} exception waiting for global checkpoint advancement to [{}]", shardId, request.getFromSeqNo()), e); if (e instanceof TimeoutException) { try { final IndexMetadata indexMetadata = clusterService.state().metadata().index(shardId.getIndex()); if (indexMetadata == null) { listener.onFailure(new IndexNotFoundException(shardId.getIndex())); return; } final long mappingVersion = indexMetadata.getMappingVersion(); final long settingsVersion = indexMetadata.getSettingsVersion(); final long aliasesVersion = indexMetadata.getAliasesVersion(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); listener.onResponse( getResponse( mappingVersion, settingsVersion, aliasesVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY, request.relativeStartNanos)); } catch (final Exception caught) { caught.addSuppressed(e); listener.onFailure(caught); } } else { listener.onFailure(e); } } @Override protected boolean resolveIndex(Request request) { return false; } @Override protected ShardsIterator shards(ClusterState state, InternalRequest request) { return state .routingTable() .shardRoutingTable(request.concreteIndex(), request.request().getShard().id()) .activeInitializingShardsRandomIt(); } @Override protected Writeable.Reader<Response> getResponseReader() { return Response::new; } } static final Translog.Operation[] EMPTY_OPERATIONS_ARRAY = new Translog.Operation[0]; /** * Returns at most the specified maximum number of operations from the specified from sequence number. This method will never return * operations above the specified global checkpoint. * * Also if the sum of collected operations size is above the specified maximum batch size then this method stops collecting more * operations and returns what has been collected so far. * * @param indexShard the shard * @param globalCheckpoint the global checkpoint * @param fromSeqNo the starting sequence number * @param maxOperationCount the maximum number of operations * @param expectedHistoryUUID the expected history UUID for the shard * @param maxBatchSize the maximum batch size * @return the operations * @throws IOException if an I/O exception occurs reading the operations */ static Translog.Operation[] getOperations( final IndexShard indexShard, final long globalCheckpoint, final long fromSeqNo, final int maxOperationCount, final String expectedHistoryUUID, final ByteSizeValue maxBatchSize) throws IOException { if (indexShard.state() != IndexShardState.STARTED) { throw new IndexShardNotStartedException(indexShard.shardId(), indexShard.state()); } final String historyUUID = indexShard.getHistoryUUID(); if (historyUUID.equals(expectedHistoryUUID) == false) { throw new IllegalStateException("unexpected history uuid, expected [" + expectedHistoryUUID + "], actual [" + historyUUID + "]"); } if (fromSeqNo > globalCheckpoint) { throw new IllegalStateException( "not exposing operations from [" + fromSeqNo + "] greater than the global checkpoint [" + globalCheckpoint + "]"); } int seenBytes = 0; // - 1 is needed, because toSeqNo is inclusive long toSeqNo = Math.min(globalCheckpoint, (fromSeqNo + maxOperationCount) - 1); assert fromSeqNo <= toSeqNo : "invalid range from_seqno[" + fromSeqNo + "] > to_seqno[" + toSeqNo + "]"; final List<Translog.Operation> operations = new ArrayList<>(); try (Translog.Snapshot snapshot = indexShard.newChangesSnapshot("ccr", fromSeqNo, toSeqNo, true)) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); seenBytes += op.estimateSize(); if (seenBytes > maxBatchSize.getBytes()) { break; } } } catch (MissingHistoryOperationsException e) { final Collection<RetentionLease> retentionLeases = indexShard.getRetentionLeases().leases(); final String message = "Operations are no longer available for replicating. " + "Existing retention leases [" + retentionLeases + "]; maybe increase the retention lease period setting " + "[" + IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING.getKey() + "]?"; // Make it easy to detect this error in ShardFollowNodeTask: // (adding a metadata header instead of introducing a new exception that extends ElasticsearchException) ResourceNotFoundException wrapper = new ResourceNotFoundException(message, e); wrapper.addMetadata(Ccr.REQUESTED_OPS_MISSING_METADATA_KEY, Long.toString(fromSeqNo), Long.toString(toSeqNo)); throw wrapper; } return operations.toArray(EMPTY_OPERATIONS_ARRAY); } static Response getResponse( final long mappingVersion, final long settingsVersion, final long aliasesVersion, final SeqNoStats seqNoStats, final long maxSeqNoOfUpdates, final Translog.Operation[] operations, long relativeStartNanos) { long tookInNanos = System.nanoTime() - relativeStartNanos; long tookInMillis = TimeUnit.NANOSECONDS.toMillis(tookInNanos); return new Response( mappingVersion, settingsVersion, aliasesVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, operations, tookInMillis); } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; /** * A {@link URLClassLoader} for application isolation. There are two * configuration types supported by StageClassLoader. The first * is System classes which are always delegated to the parent * and the second is application classes which are never delegated * to the parent. */ public class SDCClassLoader extends BlackListURLClassLoader { /* * Note: * if you update this, you must also update api-children-classloader.properties */ private static final String[] PACKAGES_BLACKLIST_FOR_STAGE_LIBRARIES = { "com.streamsets.pipeline.api.", "com.streamsets.pipeline.container.", "org.slf4j.", "org.apache.log4j." }; /** * Default value of the system classes if the user did not override them. * JDK classes, hadoop classes and resources, and some select third-party * classes are considered system classes, and are not loaded by the * application classloader. */ static final List<String> SYSTEM_API_CLASSES; static final List<String> SYSTEM_API_CHILDREN_CLASSES; private static String API = "api"; private static String API_CHILDREN = "api-children"; private static final String[] CLASSLOADER_TYPES = new String[] { API, API_CHILDREN }; private static final String SYSTEM_CLASSES_DEFAULT_KEY = "system.classes.default"; private static boolean debug = false; public static void setDebug(boolean debug) { SDCClassLoader.debug = debug; } public static boolean isDebug() { return debug; } static { Map<String, String> systemClassesDefaultsMap = new HashMap<>(); for (String classLoaderType : CLASSLOADER_TYPES) { String propertiesFile = classLoaderType + "-classloader.properties"; try (InputStream is = SDCClassLoader.class.getClassLoader() .getResourceAsStream(propertiesFile);) { if (is == null) { throw new ExceptionInInitializerError("properties file " + propertiesFile + " is not found"); } Properties props = new Properties(); props.load(is); // get the system classes default String systemClassesDefault = props.getProperty(SYSTEM_CLASSES_DEFAULT_KEY); if (systemClassesDefault == null) { throw new ExceptionInInitializerError("property " + SYSTEM_CLASSES_DEFAULT_KEY + " is not found"); } systemClassesDefaultsMap.put(classLoaderType, systemClassesDefault); } catch (IOException e) { throw new ExceptionInInitializerError(e); } } SYSTEM_API_CLASSES = Collections.unmodifiableList(Arrays.asList(ClassLoaderUtil.getTrimmedStrings( ClassLoaderUtil.checkNotNull(systemClassesDefaultsMap.get(API), API)))); List<String> apiChildren = new ArrayList<>(Arrays.asList(ClassLoaderUtil.getTrimmedStrings( ClassLoaderUtil.checkNotNull(systemClassesDefaultsMap.get(API_CHILDREN), API_CHILDREN)))); apiChildren.addAll(SYSTEM_API_CLASSES); SYSTEM_API_CHILDREN_CLASSES = Collections.unmodifiableList(apiChildren); } private final ClassLoader parent; private final boolean parentIsAPIClassLoader; private final SystemPackage systemPackage; private final boolean isPrivate; private final ApplicationPackage applicationPackage; public SDCClassLoader(String type, String name, List<URL> urls, ClassLoader parent, String[] blacklistedPackages, SystemPackage systemPackage, ApplicationPackage applicationPackage, boolean isPrivate, boolean parentIsAPIClassLoader, boolean isStageLibClassLoader) { super(type, name, getOrderedURLsForClassLoader(urls, isStageLibClassLoader, name), parent, blacklistedPackages); if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": urls: " + Arrays.toString(urls.toArray())); System.err.println(getClass().getSimpleName() + " " + getName() + ": system classes: " + systemPackage); } this.parent = parent; this.parentIsAPIClassLoader = parentIsAPIClassLoader; if (parent == null) { throw new IllegalArgumentException("No parent classloader!"); } if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": parent classloader: " + parent); } if (systemPackage == null) { throw new IllegalArgumentException("System classes cannot be null"); } // if the caller-specified system classes are null or empty, use the default this.systemPackage = systemPackage; if(debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": system classes: " + this.systemPackage); } this.applicationPackage = applicationPackage; this.isPrivate = isPrivate; if(debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": application packages: " + this.applicationPackage); } } /** * Arranges the urls in the following order: * <ul> * <li>stage lib jars</li> * <li>protolib jars</li> * <li>non protolib jars</li> * </ul> * * @param stageLibName * @param urls * @return */ static List<URL> bringStageAndProtoLibsToFront(String stageLibName, List<URL> urls) { List<URL> otherJars = new ArrayList<>(); List<URL> protolibJars = new ArrayList<>(); List<URL> stageLibjars = new ArrayList<>(); for (URL url : urls) { String str = url.toExternalForm(); if (str.endsWith(".jar")) { int nameIdx = str.lastIndexOf("/"); if (nameIdx > -1) { String jarName = str.substring(nameIdx + 1); if (jarName.contains("-protolib-")) { // adding only protolib jars protolibJars.add(url); } else if (jarName.contains(stageLibName)) { stageLibjars.add(url); } else { otherJars.add(url); } } else { otherJars.add(url); } } else { otherJars.add(url); } } List<URL> allJars = new ArrayList<>(); if (stageLibjars.size() != 1) { throw new ExceptionInInitializerError("Expected exactly 1 stage lib jar but found " + stageLibjars.size() + " with name " + stageLibName); } allJars.addAll(stageLibjars); allJars.addAll(protolibJars); allJars.addAll(otherJars); return allJars; } @Override public URL getResource(String name) { URL url = null; boolean isSystemPackage = systemPackage.isSystem(name); if (!isSystemPackage) { url = findResource(name); if (url == null && name.startsWith("/")) { if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": Remove leading / off " + name); } url = findResource(name.substring(1)); } } if (url == null && (isSystemPackage || !applicationPackage.isApplication(name))) { url = parent.getResource(name); } if (url != null) { if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": getResource(" + name + ")=" + url); } } return url; } @Override public Enumeration<URL> getResources(String name) throws IOException { if (debug) { System.err.println("getResources(" + name + ")"); } Enumeration<URL> result = null; if (!systemPackage.isSystem(name)) { // Search local repositories if (debug) { System.err.println(" Searching local repositories"); } result = findResources(name); if (result != null && result.hasMoreElements()) { if (debug) { System.err.println(" --> Returning result from local"); } return result; } if (applicationPackage.isApplication(name)) { if (debug) { System.err.println(" --> application class, returning empty enumeration"); } return Collections.emptyEnumeration(); } } // Delegate to parent unconditionally if (debug) { System.err.println(" Delegating to parent classloader unconditionally " + parent); } result = parent.getResources(name); if (result != null && result.hasMoreElements()) { if (debug) { List<URL> resultList = Collections.list(result); result = Collections.enumeration(resultList); System.err.println(" --> Returning result from parent: " + resultList); } return result; } // (4) Resource was not found if (debug) { System.err.println(" --> Resource not found, returning empty enumeration"); } return Collections.emptyEnumeration(); } @Override public InputStream getResourceAsStream(String name) { if (debug) { System.err.println("getResourceAsStream(" + name + ")"); } InputStream stream = null; if (!systemPackage.isSystem(name)) { // Search local repositories if (debug) { System.err.println(" Searching local repositories"); } URL url = findResource(name); if (url != null) { if (debug) { System.err.println(" --> Returning stream from local"); } try { return url.openStream(); } catch (IOException e) { // Ignore } } if (applicationPackage.isApplication(name)) { if (debug) { System.err.println(" --> application class, returning null"); } return null; } } // Delegate to parent unconditionally if (debug) { System.err.println(" Delegating to parent classloader unconditionally " + parent); } stream = parent.getResourceAsStream(name); if (stream != null) { if (debug) { System.err.println(" --> Returning stream from parent"); } return stream; } // (4) Resource was not found if (debug) { System.err.println(" --> Resource not found, returning null"); } return null; } @Override public Class<?> loadClass(String name) throws ClassNotFoundException { return this.loadClass(name, false); } @Override protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": Loading class: " + name); } Class<?> c = findLoadedClass(name); ClassNotFoundException ex = null; boolean isSystemPackage = systemPackage.isSystem(name); if (c == null && !isSystemPackage) { // Try to load class from this classloader's URLs. Note that this is like // the servlet spec, not the usual Java 2 behaviour where we ask the // parent to attempt to load first. try { c = findClass(name); if (debug && c != null) { System.err.println(getClass().getSimpleName() + " " + getName() + ": Loaded class: " + name + " "); } } catch (ClassNotFoundException e) { if (debug) { System.err.println(getClass().getSimpleName() + " " + getName() + ": " + e); } ex = e; } } // try parent classloader in the following situations: // 1. Package has been marked system // 2. parent is the API classloader // under most circumstances we do not want to try the parent classloader // for application classes, however this is not true if the parent is the api // classloader since we load the api and codahale/dropwizard metrics from there // 3. Class is not an application class if (c == null && (isSystemPackage || parentIsAPIClassLoader || !applicationPackage.isApplication(name))) { c = parent.loadClass(name); if (debug && c != null) { System.err.println(getClass().getSimpleName() + " " + getName() + ": Loaded class from parent: " + name + " "); } } if (c == null) { throw ex != null ? ex : new ClassNotFoundException(name); } if (resolve) { resolveClass(c); } return c; } public static SDCClassLoader getAPIClassLoader(List<URL> apiURLs, ClassLoader parent) { return new SDCClassLoader("api-lib", "API", apiURLs, parent, null, new SystemPackage(SYSTEM_API_CLASSES), ApplicationPackage.get(parent), false, false, false); } public static SDCClassLoader getContainerCLassLoader(List<URL> containerURLs, ClassLoader apiCL) { return new ContainerClassLoader("container-lib", "Container", containerURLs, apiCL, null, new SystemPackage(SYSTEM_API_CHILDREN_CLASSES), ApplicationPackage.get(apiCL.getParent()), false, true, false); } public static SDCClassLoader getStageClassLoader(String type, String name, List<URL> libURLs, ClassLoader apiCL) { return getStageClassLoader(type, name, libURLs, apiCL, false); } public static SDCClassLoader getStageClassLoader(String type, String name, List<URL> libURLs, ClassLoader apiCL, boolean isPrivate) { return new SDCClassLoader(type, name, libURLs, apiCL, PACKAGES_BLACKLIST_FOR_STAGE_LIBRARIES, new SystemPackage(SYSTEM_API_CHILDREN_CLASSES), ApplicationPackage.get(apiCL.getParent()), isPrivate, true, true); } public SDCClassLoader duplicateStageClassLoader() { return getStageClassLoader(getType(), getName(), urls, parent, true); } private static List<URL> getOrderedURLsForClassLoader( List<URL> urls, boolean isStageLibClassLoader, String stageLibName ) { // only for stagelib classloaders, we force stagelib and protolib JARs to be first in the classpath, so they can // override classes from its dependencies. Usecase: Hadoop native compression codecs replacement return (isStageLibClassLoader) ? bringStageAndProtoLibsToFront(stageLibName, urls) : urls; } public boolean isPrivate() { return isPrivate; } public String toString() { return String.format("SDCClassLoader[type=%s name=%s private=%b]", getType(), getName(), isPrivate); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.network; import java.io.IOException; import java.net.InetAddress; import java.net.Socket; import java.nio.channels.SelectionKey; import java.security.Principal; import org.apache.kafka.common.utils.Utils; public class KafkaChannel { private final String id; private final TransportLayer transportLayer; private final Authenticator authenticator; // Tracks accumulated network thread time. This is updated on the network thread. // The values are read and reset after each response is sent. private long networkThreadTimeNanos; private final int maxReceiveSize; private NetworkReceive receive; private Send send; // Track connection and mute state of channels to enable outstanding requests on channels to be // processed after the channel is disconnected. private boolean disconnected; private boolean muted; private ChannelState state; public KafkaChannel(String id, TransportLayer transportLayer, Authenticator authenticator, int maxReceiveSize) throws IOException { this.id = id; this.transportLayer = transportLayer; this.authenticator = authenticator; this.networkThreadTimeNanos = 0L; this.maxReceiveSize = maxReceiveSize; this.disconnected = false; this.muted = false; this.state = ChannelState.NOT_CONNECTED; } public void close() throws IOException { this.disconnected = true; Utils.closeAll(transportLayer, authenticator); } /** * Returns the principal returned by `authenticator.principal()`. */ public Principal principal() throws IOException { return authenticator.principal(); } /** * Does handshake of transportLayer and authentication using configured authenticator */ public void prepare() throws IOException { if (!transportLayer.ready()) transportLayer.handshake(); if (transportLayer.ready() && !authenticator.complete()) authenticator.authenticate(); if (ready()) state = ChannelState.READY; } public void disconnect() { disconnected = true; transportLayer.disconnect(); } public void state(ChannelState state) { this.state = state; } public ChannelState state() { return this.state; } public boolean finishConnect() throws IOException { boolean connected = transportLayer.finishConnect(); if (connected) state = ready() ? ChannelState.READY : ChannelState.AUTHENTICATE; return connected; } public boolean isConnected() { return transportLayer.isConnected(); } public String id() { return id; } public void mute() { if (!disconnected) transportLayer.removeInterestOps(SelectionKey.OP_READ); muted = true; } public void unmute() { if (!disconnected) transportLayer.addInterestOps(SelectionKey.OP_READ); muted = false; } /** * Returns true if this channel has been explicitly muted using {@link KafkaChannel#mute()} */ public boolean isMute() { return muted; } public boolean ready() { return transportLayer.ready() && authenticator.complete(); } public boolean hasSend() { return send != null; } /** * Returns the address to which this channel's socket is connected or `null` if the socket has never been connected. * * If the socket was connected prior to being closed, then this method will continue to return the * connected address after the socket is closed. */ public InetAddress socketAddress() { return transportLayer.socketChannel().socket().getInetAddress(); } public String socketDescription() { Socket socket = transportLayer.socketChannel().socket(); if (socket.getInetAddress() == null) return socket.getLocalAddress().toString(); return socket.getInetAddress().toString(); } public void setSend(Send send) { if (this.send != null) throw new IllegalStateException("Attempt to begin a send operation with prior send operation still in progress."); this.send = send; this.transportLayer.addInterestOps(SelectionKey.OP_WRITE); } public NetworkReceive read() throws IOException { NetworkReceive result = null; if (receive == null) { receive = new NetworkReceive(maxReceiveSize, id); } receive(receive); if (receive.complete()) { receive.payload().rewind(); result = receive; receive = null; } return result; } public Send write() throws IOException { Send result = null; if (send != null && send(send)) { result = send; send = null; } return result; } /** * Accumulates network thread time for this channel. */ public void addNetworkThreadTimeNanos(long nanos) { networkThreadTimeNanos += nanos; } /** * Returns accumulated network thread time for this channel and resets * the value to zero. */ public long getAndResetNetworkThreadTimeNanos() { long current = networkThreadTimeNanos; networkThreadTimeNanos = 0; return current; } private long receive(NetworkReceive receive) throws IOException { return receive.readFrom(transportLayer); } private boolean send(Send send) throws IOException { send.writeTo(transportLayer); if (send.completed()) transportLayer.removeInterestOps(SelectionKey.OP_WRITE); return send.completed(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.s3.S3FileSystemType; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.net.HostAndPort; import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; import io.airlift.configuration.DefunctConfig; import io.airlift.configuration.LegacyConfig; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.airlift.units.MinDataSize; import io.airlift.units.MinDuration; import org.joda.time.DateTimeZone; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.util.List; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import static io.airlift.units.DataSize.Unit.MEGABYTE; @DefunctConfig({ "hive.file-system-cache-ttl", "hive.max-global-split-iterator-threads", "hive.optimized-reader.enabled"}) public class HiveClientConfig { private static final Splitter SPLITTER = Splitter.on(',').trimResults().omitEmptyStrings(); private String timeZone = TimeZone.getDefault().getID(); private DataSize maxSplitSize = new DataSize(64, MEGABYTE); private int maxPartitionsPerScan = 100_000; private int maxOutstandingSplits = 1_000; private DataSize maxOutstandingSplitsSize = new DataSize(256, MEGABYTE); private int maxSplitIteratorThreads = 1_000; private int minPartitionBatchSize = 10; private int maxPartitionBatchSize = 100; private int maxInitialSplits = 200; private int splitLoaderConcurrency = 4; private DataSize maxInitialSplitSize; private int domainCompactionThreshold = 100; private boolean forceLocalScheduling; private boolean recursiveDirWalkerEnabled; private int maxConcurrentFileRenames = 20; private boolean allowCorruptWritesForTesting; private Duration metastoreCacheTtl = new Duration(0, TimeUnit.SECONDS); private Duration metastoreRefreshInterval = new Duration(0, TimeUnit.SECONDS); private long metastoreCacheMaximumSize = 10000; private long perTransactionMetastoreCacheMaximumSize = 1000; private int maxMetastoreRefreshThreads = 100; private HostAndPort metastoreSocksProxy; private Duration metastoreTimeout = new Duration(10, TimeUnit.SECONDS); private Duration ipcPingInterval = new Duration(10, TimeUnit.SECONDS); private Duration dfsTimeout = new Duration(60, TimeUnit.SECONDS); private Duration dfsConnectTimeout = new Duration(500, TimeUnit.MILLISECONDS); private int dfsConnectMaxRetries = 5; private boolean verifyChecksum = true; private String domainSocketPath; private S3FileSystemType s3FileSystemType = S3FileSystemType.PRESTO; private HiveStorageFormat hiveStorageFormat = HiveStorageFormat.RCBINARY; private HiveCompressionCodec hiveCompressionCodec = HiveCompressionCodec.GZIP; private boolean respectTableFormat = true; private boolean immutablePartitions; private int maxPartitionsPerWriter = 100; private int writeValidationThreads = 16; private List<String> resourceConfigFiles; private boolean useParquetColumnNames; private boolean parquetOptimizedReaderEnabled; private boolean parquetPredicatePushdownEnabled; private boolean assumeCanonicalPartitionKeys; private boolean useOrcColumnNames; private boolean orcBloomFiltersEnabled; private double orcDefaultBloomFilterFpp = 0.05; private DataSize orcMaxMergeDistance = new DataSize(1, MEGABYTE); private DataSize orcMaxBufferSize = new DataSize(8, MEGABYTE); private DataSize orcStreamBufferSize = new DataSize(8, MEGABYTE); private DataSize orcMaxReadBlockSize = new DataSize(16, MEGABYTE); private boolean orcLazyReadSmallRanges = true; private boolean orcOptimizedWriterEnabled; private boolean orcWriterValidate = true; private boolean rcfileOptimizedWriterEnabled = true; private boolean rcfileWriterValidate; private HiveMetastoreAuthenticationType hiveMetastoreAuthenticationType = HiveMetastoreAuthenticationType.NONE; private HdfsAuthenticationType hdfsAuthenticationType = HdfsAuthenticationType.NONE; private boolean hdfsImpersonationEnabled; private boolean skipDeletionForAlter; private boolean bucketExecutionEnabled = true; private boolean bucketWritingEnabled = true; private int fileSystemMaxCacheSize = 1000; private boolean writesToNonManagedTablesEnabled; private boolean createsOfNonManagedTablesEnabled = true; private boolean tableStatisticsEnabled = true; public int getMaxInitialSplits() { return maxInitialSplits; } @Config("hive.max-initial-splits") public HiveClientConfig setMaxInitialSplits(int maxInitialSplits) { this.maxInitialSplits = maxInitialSplits; return this; } public DataSize getMaxInitialSplitSize() { if (maxInitialSplitSize == null) { return new DataSize(maxSplitSize.getValue() / 2, maxSplitSize.getUnit()); } return maxInitialSplitSize; } @Config("hive.max-initial-split-size") public HiveClientConfig setMaxInitialSplitSize(DataSize maxInitialSplitSize) { this.maxInitialSplitSize = maxInitialSplitSize; return this; } @Min(1) public int getSplitLoaderConcurrency() { return splitLoaderConcurrency; } @Config("hive.split-loader-concurrency") public HiveClientConfig setSplitLoaderConcurrency(int splitLoaderConcurrency) { this.splitLoaderConcurrency = splitLoaderConcurrency; return this; } @Min(1) public int getDomainCompactionThreshold() { return domainCompactionThreshold; } @Config("hive.domain-compaction-threshold") @ConfigDescription("Maximum ranges to allow in a tuple domain without compacting it") public HiveClientConfig setDomainCompactionThreshold(int domainCompactionThreshold) { this.domainCompactionThreshold = domainCompactionThreshold; return this; } public boolean isForceLocalScheduling() { return forceLocalScheduling; } @Config("hive.force-local-scheduling") public HiveClientConfig setForceLocalScheduling(boolean forceLocalScheduling) { this.forceLocalScheduling = forceLocalScheduling; return this; } @Min(1) public int getMaxConcurrentFileRenames() { return maxConcurrentFileRenames; } @Config("hive.max-concurrent-file-renames") public HiveClientConfig setMaxConcurrentFileRenames(int maxConcurrentFileRenames) { this.maxConcurrentFileRenames = maxConcurrentFileRenames; return this; } @Config("hive.recursive-directories") public HiveClientConfig setRecursiveDirWalkerEnabled(boolean recursiveDirWalkerEnabled) { this.recursiveDirWalkerEnabled = recursiveDirWalkerEnabled; return this; } public boolean getRecursiveDirWalkerEnabled() { return recursiveDirWalkerEnabled; } public DateTimeZone getDateTimeZone() { return DateTimeZone.forTimeZone(TimeZone.getTimeZone(timeZone)); } @NotNull public String getTimeZone() { return timeZone; } @Config("hive.time-zone") public HiveClientConfig setTimeZone(String id) { this.timeZone = (id != null) ? id : TimeZone.getDefault().getID(); return this; } @NotNull public DataSize getMaxSplitSize() { return maxSplitSize; } @Config("hive.max-split-size") public HiveClientConfig setMaxSplitSize(DataSize maxSplitSize) { this.maxSplitSize = maxSplitSize; return this; } @Min(1) public int getMaxPartitionsPerScan() { return maxPartitionsPerScan; } @Config("hive.max-partitions-per-scan") @ConfigDescription("Maximum allowed partitions for a single table scan") public HiveClientConfig setMaxPartitionsPerScan(int maxPartitionsPerScan) { this.maxPartitionsPerScan = maxPartitionsPerScan; return this; } @Min(1) public int getMaxOutstandingSplits() { return maxOutstandingSplits; } @Config("hive.max-outstanding-splits") @ConfigDescription("Target number of buffered splits for each table scan in a query, before the scheduler tries to pause itself") public HiveClientConfig setMaxOutstandingSplits(int maxOutstandingSplits) { this.maxOutstandingSplits = maxOutstandingSplits; return this; } @MinDataSize("1MB") public DataSize getMaxOutstandingSplitsSize() { return maxOutstandingSplitsSize; } @Config("hive.max-outstanding-splits-size") @ConfigDescription("Maximum amount of memory allowed for split buffering for each table scan in a query, before the query is failed") public HiveClientConfig setMaxOutstandingSplitsSize(DataSize maxOutstandingSplits) { this.maxOutstandingSplitsSize = maxOutstandingSplits; return this; } @Min(1) public int getMaxSplitIteratorThreads() { return maxSplitIteratorThreads; } @Config("hive.max-split-iterator-threads") public HiveClientConfig setMaxSplitIteratorThreads(int maxSplitIteratorThreads) { this.maxSplitIteratorThreads = maxSplitIteratorThreads; return this; } @Deprecated public boolean getAllowCorruptWritesForTesting() { return allowCorruptWritesForTesting; } @Deprecated @Config("hive.allow-corrupt-writes-for-testing") @ConfigDescription("Allow Hive connector to write data even when data will likely be corrupt") public HiveClientConfig setAllowCorruptWritesForTesting(boolean allowCorruptWritesForTesting) { this.allowCorruptWritesForTesting = allowCorruptWritesForTesting; return this; } @NotNull public Duration getMetastoreCacheTtl() { return metastoreCacheTtl; } @MinDuration("0ms") @Config("hive.metastore-cache-ttl") public HiveClientConfig setMetastoreCacheTtl(Duration metastoreCacheTtl) { this.metastoreCacheTtl = metastoreCacheTtl; return this; } @NotNull public Duration getMetastoreRefreshInterval() { return metastoreRefreshInterval; } @MinDuration("1ms") @Config("hive.metastore-refresh-interval") public HiveClientConfig setMetastoreRefreshInterval(Duration metastoreRefreshInterval) { this.metastoreRefreshInterval = metastoreRefreshInterval; return this; } public long getMetastoreCacheMaximumSize() { return metastoreCacheMaximumSize; } @Min(1) @Config("hive.metastore-cache-maximum-size") public HiveClientConfig setMetastoreCacheMaximumSize(long metastoreCacheMaximumSize) { this.metastoreCacheMaximumSize = metastoreCacheMaximumSize; return this; } public long getPerTransactionMetastoreCacheMaximumSize() { return perTransactionMetastoreCacheMaximumSize; } @Min(1) @Config("hive.per-transaction-metastore-cache-maximum-size") public HiveClientConfig setPerTransactionMetastoreCacheMaximumSize(long perTransactionMetastoreCacheMaximumSize) { this.perTransactionMetastoreCacheMaximumSize = perTransactionMetastoreCacheMaximumSize; return this; } @Min(1) public int getMaxMetastoreRefreshThreads() { return maxMetastoreRefreshThreads; } @Config("hive.metastore-refresh-max-threads") public HiveClientConfig setMaxMetastoreRefreshThreads(int maxMetastoreRefreshThreads) { this.maxMetastoreRefreshThreads = maxMetastoreRefreshThreads; return this; } public HostAndPort getMetastoreSocksProxy() { return metastoreSocksProxy; } @Config("hive.metastore.thrift.client.socks-proxy") public HiveClientConfig setMetastoreSocksProxy(HostAndPort metastoreSocksProxy) { this.metastoreSocksProxy = metastoreSocksProxy; return this; } @NotNull public Duration getMetastoreTimeout() { return metastoreTimeout; } @Config("hive.metastore-timeout") public HiveClientConfig setMetastoreTimeout(Duration metastoreTimeout) { this.metastoreTimeout = metastoreTimeout; return this; } @Min(1) public int getMinPartitionBatchSize() { return minPartitionBatchSize; } @Config("hive.metastore.partition-batch-size.min") public HiveClientConfig setMinPartitionBatchSize(int minPartitionBatchSize) { this.minPartitionBatchSize = minPartitionBatchSize; return this; } @Min(1) public int getMaxPartitionBatchSize() { return maxPartitionBatchSize; } @Config("hive.metastore.partition-batch-size.max") public HiveClientConfig setMaxPartitionBatchSize(int maxPartitionBatchSize) { this.maxPartitionBatchSize = maxPartitionBatchSize; return this; } public List<String> getResourceConfigFiles() { return resourceConfigFiles; } @Config("hive.config.resources") public HiveClientConfig setResourceConfigFiles(String files) { this.resourceConfigFiles = (files == null) ? null : SPLITTER.splitToList(files); return this; } public HiveClientConfig setResourceConfigFiles(List<String> files) { this.resourceConfigFiles = (files == null) ? null : ImmutableList.copyOf(files); return this; } @NotNull @MinDuration("1ms") public Duration getIpcPingInterval() { return ipcPingInterval; } @Config("hive.dfs.ipc-ping-interval") public HiveClientConfig setIpcPingInterval(Duration pingInterval) { this.ipcPingInterval = pingInterval; return this; } @NotNull @MinDuration("1ms") public Duration getDfsTimeout() { return dfsTimeout; } @Config("hive.dfs-timeout") public HiveClientConfig setDfsTimeout(Duration dfsTimeout) { this.dfsTimeout = dfsTimeout; return this; } @MinDuration("1ms") @NotNull public Duration getDfsConnectTimeout() { return dfsConnectTimeout; } @Config("hive.dfs.connect.timeout") public HiveClientConfig setDfsConnectTimeout(Duration dfsConnectTimeout) { this.dfsConnectTimeout = dfsConnectTimeout; return this; } @Min(0) public int getDfsConnectMaxRetries() { return dfsConnectMaxRetries; } @Config("hive.dfs.connect.max-retries") public HiveClientConfig setDfsConnectMaxRetries(int dfsConnectMaxRetries) { this.dfsConnectMaxRetries = dfsConnectMaxRetries; return this; } public HiveStorageFormat getHiveStorageFormat() { return hiveStorageFormat; } @Config("hive.storage-format") public HiveClientConfig setHiveStorageFormat(HiveStorageFormat hiveStorageFormat) { this.hiveStorageFormat = hiveStorageFormat; return this; } public HiveCompressionCodec getHiveCompressionCodec() { return hiveCompressionCodec; } @Config("hive.compression-codec") public HiveClientConfig setHiveCompressionCodec(HiveCompressionCodec hiveCompressionCodec) { this.hiveCompressionCodec = hiveCompressionCodec; return this; } public boolean isRespectTableFormat() { return respectTableFormat; } @Config("hive.respect-table-format") @ConfigDescription("Should new partitions be written using the existing table format or the default Presto format") public HiveClientConfig setRespectTableFormat(boolean respectTableFormat) { this.respectTableFormat = respectTableFormat; return this; } public boolean isImmutablePartitions() { return immutablePartitions; } @Config("hive.immutable-partitions") @ConfigDescription("Can new data be inserted into existing partitions or existing unpartitioned tables") public HiveClientConfig setImmutablePartitions(boolean immutablePartitions) { this.immutablePartitions = immutablePartitions; return this; } @Min(1) public int getMaxPartitionsPerWriter() { return maxPartitionsPerWriter; } @Config("hive.max-partitions-per-writers") @ConfigDescription("Maximum number of partitions per writer") public HiveClientConfig setMaxPartitionsPerWriter(int maxPartitionsPerWriter) { this.maxPartitionsPerWriter = maxPartitionsPerWriter; return this; } public int getWriteValidationThreads() { return writeValidationThreads; } @Config("hive.write-validation-threads") @ConfigDescription("Number of threads used for verifying data after a write") public HiveClientConfig setWriteValidationThreads(int writeValidationThreads) { this.writeValidationThreads = writeValidationThreads; return this; } public String getDomainSocketPath() { return domainSocketPath; } @Config("hive.dfs.domain-socket-path") @LegacyConfig("dfs.domain-socket-path") public HiveClientConfig setDomainSocketPath(String domainSocketPath) { this.domainSocketPath = domainSocketPath; return this; } @NotNull public S3FileSystemType getS3FileSystemType() { return s3FileSystemType; } @Config("hive.s3-file-system-type") public HiveClientConfig setS3FileSystemType(S3FileSystemType s3FileSystemType) { this.s3FileSystemType = s3FileSystemType; return this; } public boolean isVerifyChecksum() { return verifyChecksum; } @Config("hive.dfs.verify-checksum") public HiveClientConfig setVerifyChecksum(boolean verifyChecksum) { this.verifyChecksum = verifyChecksum; return this; } @Deprecated public boolean isParquetPredicatePushdownEnabled() { return parquetPredicatePushdownEnabled; } @Deprecated @Config("hive.parquet-predicate-pushdown.enabled") public HiveClientConfig setParquetPredicatePushdownEnabled(boolean parquetPredicatePushdownEnabled) { this.parquetPredicatePushdownEnabled = parquetPredicatePushdownEnabled; return this; } @Deprecated public boolean isParquetOptimizedReaderEnabled() { return parquetOptimizedReaderEnabled; } @Deprecated @Config("hive.parquet-optimized-reader.enabled") public HiveClientConfig setParquetOptimizedReaderEnabled(boolean parquetOptimizedReaderEnabled) { this.parquetOptimizedReaderEnabled = parquetOptimizedReaderEnabled; return this; } public boolean isUseOrcColumnNames() { return useOrcColumnNames; } @Config("hive.orc.use-column-names") @ConfigDescription("Access ORC columns using names from the file") public HiveClientConfig setUseOrcColumnNames(boolean useOrcColumnNames) { this.useOrcColumnNames = useOrcColumnNames; return this; } @NotNull public DataSize getOrcMaxMergeDistance() { return orcMaxMergeDistance; } @Config("hive.orc.max-merge-distance") public HiveClientConfig setOrcMaxMergeDistance(DataSize orcMaxMergeDistance) { this.orcMaxMergeDistance = orcMaxMergeDistance; return this; } @NotNull public DataSize getOrcMaxBufferSize() { return orcMaxBufferSize; } @Config("hive.orc.max-buffer-size") public HiveClientConfig setOrcMaxBufferSize(DataSize orcMaxBufferSize) { this.orcMaxBufferSize = orcMaxBufferSize; return this; } @NotNull public DataSize getOrcStreamBufferSize() { return orcStreamBufferSize; } @Config("hive.orc.stream-buffer-size") public HiveClientConfig setOrcStreamBufferSize(DataSize orcStreamBufferSize) { this.orcStreamBufferSize = orcStreamBufferSize; return this; } @NotNull public DataSize getOrcMaxReadBlockSize() { return orcMaxReadBlockSize; } @Config("hive.orc.max-read-block-size") public HiveClientConfig setOrcMaxReadBlockSize(DataSize orcMaxReadBlockSize) { this.orcMaxReadBlockSize = orcMaxReadBlockSize; return this; } @Deprecated public boolean isOrcLazyReadSmallRanges() { return orcLazyReadSmallRanges; } // TODO remove config option once efficacy is proven @Deprecated @Config("hive.orc.lazy-read-small-ranges") @ConfigDescription("ORC read small disk ranges lazily") public HiveClientConfig setOrcLazyReadSmallRanges(boolean orcLazyReadSmallRanges) { this.orcLazyReadSmallRanges = orcLazyReadSmallRanges; return this; } public boolean isOrcBloomFiltersEnabled() { return orcBloomFiltersEnabled; } @Config("hive.orc.bloom-filters.enabled") public HiveClientConfig setOrcBloomFiltersEnabled(boolean orcBloomFiltersEnabled) { this.orcBloomFiltersEnabled = orcBloomFiltersEnabled; return this; } public double getOrcDefaultBloomFilterFpp() { return orcDefaultBloomFilterFpp; } @Config("hive.orc.default-bloom-filter-fpp") @ConfigDescription("ORC Bloom filter false positive probability") public HiveClientConfig setOrcDefaultBloomFilterFpp(double orcDefaultBloomFilterFpp) { this.orcDefaultBloomFilterFpp = orcDefaultBloomFilterFpp; return this; } @Deprecated public boolean isOrcOptimizedWriterEnabled() { return orcOptimizedWriterEnabled; } @Deprecated @Config("hive.orc.optimized-writer.enabled") public HiveClientConfig setOrcOptimizedWriterEnabled(boolean orcOptimizedWriterEnabled) { this.orcOptimizedWriterEnabled = orcOptimizedWriterEnabled; return this; } public boolean isOrcWriterValidate() { return orcWriterValidate; } @Config("hive.orc.writer.validate") @ConfigDescription("Validate ORC file after write by re-reading the whole file") public HiveClientConfig setOrcWriterValidate(boolean orcWriterValidate) { this.orcWriterValidate = orcWriterValidate; return this; } @Deprecated public boolean isRcfileOptimizedWriterEnabled() { return rcfileOptimizedWriterEnabled; } @Deprecated @Config("hive.rcfile-optimized-writer.enabled") public HiveClientConfig setRcfileOptimizedWriterEnabled(boolean rcfileOptimizedWriterEnabled) { this.rcfileOptimizedWriterEnabled = rcfileOptimizedWriterEnabled; return this; } public boolean isRcfileWriterValidate() { return rcfileWriterValidate; } @Config("hive.rcfile.writer.validate") @ConfigDescription("Validate RCFile after write by re-reading the whole file") public HiveClientConfig setRcfileWriterValidate(boolean rcfileWriterValidate) { this.rcfileWriterValidate = rcfileWriterValidate; return this; } public boolean isAssumeCanonicalPartitionKeys() { return assumeCanonicalPartitionKeys; } @Config("hive.assume-canonical-partition-keys") public HiveClientConfig setAssumeCanonicalPartitionKeys(boolean assumeCanonicalPartitionKeys) { this.assumeCanonicalPartitionKeys = assumeCanonicalPartitionKeys; return this; } public boolean isUseParquetColumnNames() { return useParquetColumnNames; } @Config("hive.parquet.use-column-names") @ConfigDescription("Access Parquet columns using names from the file") public HiveClientConfig setUseParquetColumnNames(boolean useParquetColumnNames) { this.useParquetColumnNames = useParquetColumnNames; return this; } public enum HiveMetastoreAuthenticationType { NONE, KERBEROS } @NotNull public HiveMetastoreAuthenticationType getHiveMetastoreAuthenticationType() { return hiveMetastoreAuthenticationType; } @Config("hive.metastore.authentication.type") @ConfigDescription("Hive Metastore authentication type") public HiveClientConfig setHiveMetastoreAuthenticationType(HiveMetastoreAuthenticationType hiveMetastoreAuthenticationType) { this.hiveMetastoreAuthenticationType = hiveMetastoreAuthenticationType; return this; } public enum HdfsAuthenticationType { NONE, KERBEROS, } @NotNull public HdfsAuthenticationType getHdfsAuthenticationType() { return hdfsAuthenticationType; } @Config("hive.hdfs.authentication.type") @ConfigDescription("HDFS authentication type") public HiveClientConfig setHdfsAuthenticationType(HdfsAuthenticationType hdfsAuthenticationType) { this.hdfsAuthenticationType = hdfsAuthenticationType; return this; } public boolean isHdfsImpersonationEnabled() { return hdfsImpersonationEnabled; } @Config("hive.hdfs.impersonation.enabled") @ConfigDescription("Should Presto user be impersonated when communicating with HDFS") public HiveClientConfig setHdfsImpersonationEnabled(boolean hdfsImpersonationEnabled) { this.hdfsImpersonationEnabled = hdfsImpersonationEnabled; return this; } public boolean isSkipDeletionForAlter() { return skipDeletionForAlter; } @Config("hive.skip-deletion-for-alter") @ConfigDescription("Skip deletion of old partition data when a partition is deleted and then inserted in the same transaction") public HiveClientConfig setSkipDeletionForAlter(boolean skipDeletionForAlter) { this.skipDeletionForAlter = skipDeletionForAlter; return this; } public boolean isBucketExecutionEnabled() { return bucketExecutionEnabled; } @Config("hive.bucket-execution") @ConfigDescription("Enable bucket-aware execution: only use a single worker per bucket") public HiveClientConfig setBucketExecutionEnabled(boolean bucketExecutionEnabled) { this.bucketExecutionEnabled = bucketExecutionEnabled; return this; } public boolean isBucketWritingEnabled() { return bucketWritingEnabled; } @Config("hive.bucket-writing") @ConfigDescription("Enable writing to bucketed tables") public HiveClientConfig setBucketWritingEnabled(boolean bucketWritingEnabled) { this.bucketWritingEnabled = bucketWritingEnabled; return this; } public int getFileSystemMaxCacheSize() { return fileSystemMaxCacheSize; } @Config("hive.fs.cache.max-size") @ConfigDescription("Hadoop FileSystem cache size") public HiveClientConfig setFileSystemMaxCacheSize(int fileSystemMaxCacheSize) { this.fileSystemMaxCacheSize = fileSystemMaxCacheSize; return this; } @Config("hive.non-managed-table-writes-enabled") @ConfigDescription("Enable writes to non-managed (external) tables") public HiveClientConfig setWritesToNonManagedTablesEnabled(boolean writesToNonManagedTablesEnabled) { this.writesToNonManagedTablesEnabled = writesToNonManagedTablesEnabled; return this; } public boolean getWritesToNonManagedTablesEnabled() { return writesToNonManagedTablesEnabled; } @Config("hive.non-managed-table-creates-enabled") @ConfigDescription("Enable non-managed (external) table creates") public HiveClientConfig setCreatesOfNonManagedTablesEnabled(boolean createsOfNonManagedTablesEnabled) { this.createsOfNonManagedTablesEnabled = createsOfNonManagedTablesEnabled; return this; } public boolean getCreatesOfNonManagedTablesEnabled() { return createsOfNonManagedTablesEnabled; } @Config("hive.table-statistics-enabled") @ConfigDescription("Enable use of table statistics") public HiveClientConfig setTableStatisticsEnabled(boolean tableStatisticsEnabled) { this.tableStatisticsEnabled = tableStatisticsEnabled; return this; } public boolean isTableStatisticsEnabled() { return tableStatisticsEnabled; } }
package lombok.javac; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.lang.reflect.Field; import java.util.ArrayDeque; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import javax.lang.model.type.TypeKind; import javax.tools.DiagnosticListener; import com.sun.tools.javac.code.BoundKind; import com.sun.tools.javac.code.Symbol.TypeSymbol; import com.sun.tools.javac.code.Symtab; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.code.Type.ArrayType; import com.sun.tools.javac.code.Type.CapturedType; import com.sun.tools.javac.code.Type.ClassType; import com.sun.tools.javac.code.Type.WildcardType; import com.sun.tools.javac.code.TypeTags; import com.sun.tools.javac.code.Types; import com.sun.tools.javac.comp.Attr; import com.sun.tools.javac.comp.AttrContext; import com.sun.tools.javac.comp.Enter; import com.sun.tools.javac.comp.Env; import com.sun.tools.javac.comp.MemberEnter; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.JCBlock; import com.sun.tools.javac.tree.JCTree.JCClassDecl; import com.sun.tools.javac.tree.JCTree.JCCompilationUnit; import com.sun.tools.javac.tree.JCTree.JCExpression; import com.sun.tools.javac.tree.JCTree.JCMethodDecl; import com.sun.tools.javac.tree.JCTree.JCVariableDecl; import com.sun.tools.javac.tree.TreeMaker; import com.sun.tools.javac.util.Context; import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.ListBuffer; import com.sun.tools.javac.util.Log; public class JavacResolution { private final Attr attr; private final LogDisabler logDisabler; public JavacResolution(Context context) { attr = Attr.instance(context); logDisabler = new LogDisabler(context); } /** * During resolution, the resolver will emit resolution errors, but without appropriate file names and line numbers. If these resolution errors stick around * then they will be generated AGAIN, this time with proper names and line numbers, at the end. Therefore, we want to suppress the logger. */ private static final class LogDisabler { private final Log log; private static final Field errWriterField, warnWriterField, noticeWriterField, dumpOnErrorField, promptOnErrorField, diagnosticListenerField; private static final Field deferDiagnosticsField, deferredDiagnosticsField; private PrintWriter errWriter, warnWriter, noticeWriter; private Boolean dumpOnError, promptOnError; private DiagnosticListener<?> contextDiagnosticListener, logDiagnosticListener; private final Context context; // If this is true, the fields changed. Better to print weird error messages than to fail outright. private static final boolean dontBother; private static final ThreadLocal<Queue<?>> queueCache = new ThreadLocal<Queue<?>>(); static { boolean z; Field a = null, b = null, c = null, d = null, e = null, f = null, g = null, h = null; try { a = Log.class.getDeclaredField("errWriter"); b = Log.class.getDeclaredField("warnWriter"); c = Log.class.getDeclaredField("noticeWriter"); d = Log.class.getDeclaredField("dumpOnError"); e = Log.class.getDeclaredField("promptOnError"); f = Log.class.getDeclaredField("diagListener"); z = false; a.setAccessible(true); b.setAccessible(true); c.setAccessible(true); d.setAccessible(true); e.setAccessible(true); f.setAccessible(true); } catch (Exception x) { z = true; } try { g = Log.class.getDeclaredField("deferDiagnostics"); h = Log.class.getDeclaredField("deferredDiagnostics"); g.setAccessible(true); h.setAccessible(true); } catch (Exception x) { } errWriterField = a; warnWriterField = b; noticeWriterField = c; dumpOnErrorField = d; promptOnErrorField = e; diagnosticListenerField = f; deferDiagnosticsField = g; deferredDiagnosticsField = h; dontBother = z; } LogDisabler(Context context) { this.log = Log.instance(context); this.context = context; } boolean disableLoggers() { contextDiagnosticListener = context.get(DiagnosticListener.class); context.put(DiagnosticListener.class, (DiagnosticListener<?>) null); if (dontBother) return false; boolean dontBotherInstance = false; PrintWriter dummyWriter = new PrintWriter(new OutputStream() { @Override public void write(int b) throws IOException { // Do nothing on purpose } }); if (deferDiagnosticsField != null) try { if (Boolean.TRUE.equals(deferDiagnosticsField.get(log))) { queueCache.set((Queue<?>) deferredDiagnosticsField.get(log)); Queue<?> empty = new LinkedList<Object>(); deferredDiagnosticsField.set(log, empty); } } catch (Exception e) {} if (!dontBotherInstance) try { errWriter = (PrintWriter) errWriterField.get(log); errWriterField.set(log, dummyWriter); } catch (Exception e) { dontBotherInstance = true; } if (!dontBotherInstance) try { warnWriter = (PrintWriter) warnWriterField.get(log); warnWriterField.set(log, dummyWriter); } catch (Exception e) { dontBotherInstance = true; } if (!dontBotherInstance) try { noticeWriter = (PrintWriter) noticeWriterField.get(log); noticeWriterField.set(log, dummyWriter); } catch (Exception e) { dontBotherInstance = true; } if (!dontBotherInstance) try { dumpOnError = (Boolean) dumpOnErrorField.get(log); dumpOnErrorField.set(log, false); } catch (Exception e) { dontBotherInstance = true; } if (!dontBotherInstance) try { promptOnError = (Boolean) promptOnErrorField.get(log); promptOnErrorField.set(log, false); } catch (Exception e) { dontBotherInstance = true; } if (!dontBotherInstance) try { logDiagnosticListener = (DiagnosticListener<?>) diagnosticListenerField.get(log); diagnosticListenerField.set(log, null); } catch (Exception e) { dontBotherInstance = true; } if (dontBotherInstance) enableLoggers(); return !dontBotherInstance; } void enableLoggers() { if (contextDiagnosticListener != null) { context.put(DiagnosticListener.class, contextDiagnosticListener); contextDiagnosticListener = null; } if (errWriter != null) try { errWriterField.set(log, errWriter); errWriter = null; } catch (Exception e) {} if (warnWriter != null) try { warnWriterField.set(log, warnWriter); warnWriter = null; } catch (Exception e) {} if (noticeWriter != null) try { noticeWriterField.set(log, noticeWriter); noticeWriter = null; } catch (Exception e) {} if (dumpOnError != null) try { dumpOnErrorField.set(log, dumpOnError); dumpOnError = null; } catch (Exception e) {} if (promptOnError != null) try { promptOnErrorField.set(log, promptOnError); promptOnError = null; } catch (Exception e) {} if (logDiagnosticListener != null) try { diagnosticListenerField.set(log, logDiagnosticListener); logDiagnosticListener = null; } catch (Exception e) {} if (deferDiagnosticsField != null && queueCache.get() != null) try { deferredDiagnosticsField.set(log, queueCache.get()); queueCache.set(null); } catch (Exception e) {} } } private static final class EnvFinder extends JCTree.Visitor { private Env<AttrContext> env = null; private Enter enter; private MemberEnter memberEnter; private JCTree copyAt = null; EnvFinder(Context context) { this.enter = Enter.instance(context); this.memberEnter = MemberEnter.instance(context); } Env<AttrContext> get() { return env; } JCTree copyAt() { return copyAt; } @Override public void visitTopLevel(JCCompilationUnit tree) { if (copyAt != null) return; env = enter.getTopLevelEnv(tree); } @Override public void visitClassDef(JCClassDecl tree) { if (copyAt != null) return; // The commented out stuff requires reflection tricks to avoid leaving lint unset which causes NPEs during attrib. So, we use the other one, much less code. // env = enter.classEnv((JCClassDecl) tree, env); // try { // Field f = env.info.getClass().getDeclaredField("lint"); // f.setAccessible(true); // Constructor<?> c = Lint.class.getDeclaredConstructor(Lint.class); // c.setAccessible(true); // f.set(env.info, c.newInstance(lint)); // } catch (Exception e) { // throw Lombok.sneakyThrow(e); // } env = enter.getClassEnv(tree.sym); } @Override public void visitMethodDef(JCMethodDecl tree) { if (copyAt != null) return; env = memberEnter.getMethodEnv(tree, env); copyAt = tree; } public void visitVarDef(JCVariableDecl tree) { if (copyAt != null) return; env = memberEnter.getInitEnv(tree, env); copyAt = tree; } @Override public void visitBlock(JCBlock tree) { if (copyAt != null) return; copyAt = tree; } @Override public void visitTree(JCTree that) { } } public Map<JCTree, JCTree> resolveMethodMember(JavacNode node) { ArrayDeque<JCTree> stack = new ArrayDeque<JCTree>(); { JavacNode n = node; while (n != null) { stack.push(n.get()); n = n.up(); } } logDisabler.disableLoggers(); try { EnvFinder finder = new EnvFinder(node.getContext()); while (!stack.isEmpty()) stack.pop().accept(finder); TreeMirrorMaker mirrorMaker = new TreeMirrorMaker(node); JCTree copy = mirrorMaker.copy(finder.copyAt()); attrib(copy, finder.get()); return mirrorMaker.getOriginalToCopyMap(); } finally { logDisabler.enableLoggers(); } } public void resolveClassMember(JavacNode node) { ArrayDeque<JCTree> stack = new ArrayDeque<JCTree>(); { JavacNode n = node; while (n != null) { stack.push(n.get()); n = n.up(); } } logDisabler.disableLoggers(); try { EnvFinder finder = new EnvFinder(node.getContext()); while (!stack.isEmpty()) stack.pop().accept(finder); attrib(node.get(), finder.get()); } finally { logDisabler.enableLoggers(); } } private void attrib(JCTree tree, Env<AttrContext> env) { if (tree instanceof JCBlock) attr.attribStat(tree, env); else if (tree instanceof JCMethodDecl) attr.attribStat(((JCMethodDecl)tree).body, env); else if (tree instanceof JCVariableDecl) attr.attribStat(tree, env); else throw new IllegalStateException("Called with something that isn't a block, method decl, or variable decl"); } public static class TypeNotConvertibleException extends Exception { public TypeNotConvertibleException(String msg) { super(msg); } } public static Type ifTypeIsIterableToComponent(Type type, JavacAST ast) { Types types = Types.instance(ast.getContext()); Symtab syms = Symtab.instance(ast.getContext()); Type boundType = types.upperBound(type); Type elemTypeIfArray = types.elemtype(boundType); if (elemTypeIfArray != null) return elemTypeIfArray; Type base = types.asSuper(boundType, syms.iterableType.tsym); if (base == null) return syms.objectType; List<Type> iterableParams = base.allparams(); return iterableParams.isEmpty() ? syms.objectType : types.upperBound(iterableParams.head); } public static JCExpression typeToJCTree(Type type, TreeMaker maker, JavacAST ast, boolean allowVoid) throws TypeNotConvertibleException { return typeToJCTree(type, maker, ast, false, allowVoid); } public static JCExpression createJavaLangObject(TreeMaker maker, JavacAST ast) { JCExpression out = maker.Ident(ast.toName("java")); out = maker.Select(out, ast.toName("lang")); out = maker.Select(out, ast.toName("Object")); return out; } private static JCExpression typeToJCTree(Type type, TreeMaker maker, JavacAST ast, boolean allowCompound, boolean allowVoid) throws TypeNotConvertibleException { int dims = 0; Type type0 = type; while (type0 instanceof ArrayType) { dims++; type0 = ((ArrayType)type0).elemtype; } JCExpression result = typeToJCTree0(type0, maker, ast, allowCompound, allowVoid); while (dims > 0) { result = maker.TypeArray(result); dims--; } return result; } private static JCExpression typeToJCTree0(Type type, TreeMaker maker, JavacAST ast, boolean allowCompound, boolean allowVoid) throws TypeNotConvertibleException { // NB: There's such a thing as maker.Type(type), but this doesn't work very well; it screws up anonymous classes, captures, and adds an extra prefix dot for some reason too. // -- so we write our own take on that here. if (type.tag == Javac.getCtcInt(TypeTags.class, "BOT")) return createJavaLangObject(maker, ast); if (type.tag == Javac.getCtcInt(TypeTags.class, "VOID")) return allowVoid ? primitiveToJCTree(type.getKind(), maker) : createJavaLangObject(maker, ast); if (type.isPrimitive()) return primitiveToJCTree(type.getKind(), maker); if (type.isErroneous()) throw new TypeNotConvertibleException("Type cannot be resolved"); TypeSymbol symbol = type.asElement(); List<Type> generics = type.getTypeArguments(); JCExpression replacement = null; if (symbol == null) throw new TypeNotConvertibleException("Null or compound type"); if (symbol.name.length() == 0) { // Anonymous inner class if (type instanceof ClassType) { List<Type> ifaces = ((ClassType)type).interfaces_field; Type supertype = ((ClassType)type).supertype_field; if (ifaces != null && ifaces.length() == 1) { return typeToJCTree(ifaces.get(0), maker, ast, allowCompound, allowVoid); } if (supertype != null) return typeToJCTree(supertype, maker, ast, allowCompound, allowVoid); } throw new TypeNotConvertibleException("Anonymous inner class"); } if (type instanceof CapturedType || type instanceof WildcardType) { Type lower, upper; if (type instanceof WildcardType) { upper = ((WildcardType)type).getExtendsBound(); lower = ((WildcardType)type).getSuperBound(); } else { lower = type.getLowerBound(); upper = type.getUpperBound(); } if (allowCompound) { if (lower == null || lower.tag == Javac.getCtcInt(TypeTags.class, "BOT")) { if (upper == null || upper.toString().equals("java.lang.Object")) { return maker.Wildcard(maker.TypeBoundKind(BoundKind.UNBOUND), null); } return maker.Wildcard(maker.TypeBoundKind(BoundKind.EXTENDS), typeToJCTree(upper, maker, ast, false, false)); } else { return maker.Wildcard(maker.TypeBoundKind(BoundKind.SUPER), typeToJCTree(lower, maker, ast, false, false)); } } if (upper != null) { return typeToJCTree(upper, maker, ast, allowCompound, allowVoid); } return createJavaLangObject(maker, ast); } String qName = symbol.getQualifiedName().toString(); if (qName.isEmpty()) throw new TypeNotConvertibleException("unknown type"); if (qName.startsWith("<")) throw new TypeNotConvertibleException(qName); String[] baseNames = symbol.getQualifiedName().toString().split("\\."); replacement = maker.Ident(ast.toName(baseNames[0])); for (int i = 1; i < baseNames.length; i++) { replacement = maker.Select(replacement, ast.toName(baseNames[i])); } if (generics != null && !generics.isEmpty()) { ListBuffer<JCExpression> args = ListBuffer.lb(); for (Type t : generics) args.append(typeToJCTree(t, maker, ast, true, false)); replacement = maker.TypeApply(replacement, args.toList()); } return replacement; } private static JCExpression primitiveToJCTree(TypeKind kind, TreeMaker maker) throws TypeNotConvertibleException { switch (kind) { case BYTE: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "BYTE")); case CHAR: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "CHAR")); case SHORT: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "SHORT")); case INT: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "INT")); case LONG: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "LONG")); case FLOAT: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "FLOAT")); case DOUBLE: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "DOUBLE")); case BOOLEAN: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "BOOLEAN")); case VOID: return maker.TypeIdent(Javac.getCtcInt(TypeTags.class, "VOID")); case NULL: case NONE: case OTHER: default: throw new TypeNotConvertibleException("Nulltype"); } } }
package com.google.android.play.drawer; import android.accounts.Account; import android.content.Context; import android.content.res.Resources; import android.graphics.BitmapFactory; import android.graphics.drawable.Drawable; import android.support.v4.view.ViewCompat; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.ListView; import android.widget.Switch; import android.widget.TextView; import com.android.vending.R; import com.android.volley.Response.ErrorListener; import com.android.volley.Response.Listener; import com.android.volley.VolleyError; import com.google.android.finsky.protos.Common.Image; import com.google.android.finsky.protos.DocV2; import com.google.android.finsky.protos.PlusProfileResponse; import com.google.android.play.R.color; import com.google.android.play.R.dimen; import com.google.android.play.R.drawable; import com.google.android.play.R.layout; import com.google.android.play.R.string; import com.google.android.play.dfe.api.PlayDfeApi; import com.google.android.play.dfe.api.PlayDfeApiProvider; import com.google.android.play.image.BitmapLoader; import com.google.android.play.image.FifeImageView; import com.google.android.play.utils.DocV2Utils; import com.google.android.play.utils.PlayUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public final class PlayDrawerAdapter extends BaseAdapter { private final Map<String, DocV2> mAccountDocV2s = new HashMap(); public boolean mAccountListExpanded; private final BitmapLoader mBitmapLoader; private Context mContext; public Account mCurrentAccount; boolean mCurrentAvatarClickable; public boolean mDownloadOnlyEnabled; public PlayDrawerLayout.PlayDrawerDownloadSwitchConfig mDownloadSwitchConfig; public boolean mHasAccounts; private final LayoutInflater mInflater; private final Set<String> mIsAccountDocLoaded = new HashSet(); private boolean mIsMiniProfile; private ListView mListView; public Account[] mNonCurrentAccounts = new Account[0]; private PlayDfeApiProvider mPlayDfeApiProvider; private PlayDrawerLayout.PlayDrawerContentClickListener mPlayDrawerContentClickListener; private PlayDrawerLayout mPlayDrawerLayout; public final List<PlayDrawerLayout.PlayDrawerPrimaryAction> mPrimaryActions = new ArrayList(); private int mProfileContainerPosition; public final List<PlayDrawerLayout.PlayDrawerSecondaryAction> mSecondaryActions = new ArrayList(); public boolean mShowDownloadOnlyToggle; public PlayDrawerAdapter(Context paramContext, boolean paramBoolean1, PlayDrawerLayout.PlayDrawerContentClickListener paramPlayDrawerContentClickListener, PlayDfeApiProvider paramPlayDfeApiProvider, BitmapLoader paramBitmapLoader, PlayDrawerLayout paramPlayDrawerLayout, ListView paramListView, boolean paramBoolean2) { this.mContext = paramContext; this.mInflater = LayoutInflater.from(paramContext); this.mPlayDfeApiProvider = paramPlayDfeApiProvider; this.mBitmapLoader = paramBitmapLoader; this.mPlayDrawerContentClickListener = paramPlayDrawerContentClickListener; this.mPlayDrawerLayout = paramPlayDrawerLayout; this.mListView = paramListView; this.mProfileContainerPosition = -1; this.mAccountListExpanded = paramBoolean1; this.mIsMiniProfile = paramBoolean2; } private View getPrimaryActionView(View paramView, ViewGroup paramViewGroup, final PlayDrawerLayout.PlayDrawerPrimaryAction paramPlayDrawerPrimaryAction, boolean paramBoolean1, boolean paramBoolean2) { Resources localResources = paramViewGroup.getResources(); int i; View localView; label23: TextView localTextView; Drawable localDrawable1; label72: label84: Drawable localDrawable2; label101: label122: Drawable localDrawable3; label152: int j; label170: int k; if (paramBoolean1) { i = R.layout.play_drawer_primary_action_active; if (paramView == null) { localView = this.mInflater.inflate(i, paramViewGroup, false); } localView = paramView; localTextView = (TextView)localView; localTextView.setText(paramPlayDrawerPrimaryAction.actionText); if (paramPlayDrawerPrimaryAction.iconResId <= 0) { break label389; } if ((!paramBoolean1) || (paramPlayDrawerPrimaryAction.activeIconResId <= 0)) { break label321; } localDrawable1 = localResources.getDrawable(paramPlayDrawerPrimaryAction.activeIconResId); if (!paramBoolean2) { break label335; } localDrawable1.setAlpha(66); if (!paramPlayDrawerPrimaryAction.hasNotifications) { break label346; } localDrawable2 = localResources.getDrawable(R.drawable.play_dot_notification); if (!PlayUtils.useLtr(this.mContext)) { break label352; } localTextView.setCompoundDrawablesWithIntrinsicBounds(localDrawable1, null, localDrawable2, null); if (paramPlayDrawerPrimaryAction.secondaryIconResId > 0) { localDrawable3 = localResources.getDrawable(paramPlayDrawerPrimaryAction.secondaryIconResId); if (!paramBoolean2) { break label366; } localDrawable3.setAlpha(66); Drawable[] arrayOfDrawable = localTextView.getCompoundDrawables(); if (ViewCompat.getLayoutDirection(localTextView) != 0) { break label377; } j = 1; if (j != 0) { break label383; } k = 0; label178: arrayOfDrawable[k] = localDrawable3; localTextView.setCompoundDrawablesWithIntrinsicBounds(arrayOfDrawable[0], arrayOfDrawable[1], arrayOfDrawable[2], arrayOfDrawable[3]); } label206: if ((!paramBoolean1) || (paramPlayDrawerPrimaryAction.activeTextColorResId <= 0)) { break label401; } localTextView.setTextColor(localResources.getColor(paramPlayDrawerPrimaryAction.activeTextColorResId)); } for (;;) { localTextView.setOnClickListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onPrimaryActionClicked(paramPlayDrawerPrimaryAction); PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } }); PlayUtils.useLtr(this.mContext); localTextView.setGravity(8388627); if (!paramPlayDrawerPrimaryAction.isChild) { break label438; } setPaddingStart(localTextView, localResources.getDimensionPixelSize(R.dimen.play_drawer_child_item_left_padding)); return localTextView; if (paramBoolean2) { i = R.layout.play_drawer_primary_action_disabled; break; } i = R.layout.play_drawer_primary_action_regular; break; label305: break label23; label321: localDrawable1 = localResources.getDrawable(paramPlayDrawerPrimaryAction.iconResId); break label72; label335: localDrawable1.setAlpha(255); break label84; label346: localDrawable2 = null; break label101; label352: localTextView.setCompoundDrawablesWithIntrinsicBounds(localDrawable2, null, localDrawable1, null); break label122; label366: localDrawable3.setAlpha(255); break label152; label377: j = 0; break label170; label383: k = 2; break label178; label389: localTextView.setCompoundDrawablesWithIntrinsicBounds(null, null, null, null); break label206; label401: if (paramBoolean2) { localTextView.setTextColor(localResources.getColor(R.color.play_disabled_grey)); } else { localTextView.setTextColor(localResources.getColor(R.color.play_fg_primary)); } } label438: setPaddingStart(localTextView, localResources.getDimensionPixelSize(R.dimen.play_drawer_item_left_padding)); return localTextView; } private static void setPaddingStart(TextView paramTextView, int paramInt) { ViewCompat.setPaddingRelative(paramTextView, paramInt, paramTextView.getPaddingTop(), ViewCompat.getPaddingEnd(paramTextView), paramTextView.getPaddingBottom()); } private void toggleAccountsList() { if (!this.mAccountListExpanded) {} for (boolean bool = true;; bool = false) { this.mAccountListExpanded = bool; notifyDataSetChanged(); return; } } public final boolean areAllItemsEnabled() { return false; } public final void collapseAccountListIfNeeded() { if ((this.mNonCurrentAccounts.length > 0) && (this.mAccountListExpanded)) { toggleAccountsList(); } } public final int getCount() { if (this.mHasAccounts) {} for (int i = 1; !this.mAccountListExpanded; i = 0) { int j = 1 + (i + 1 + this.mPrimaryActions.size()); if (this.mShowDownloadOnlyToggle) { j++; } return j + this.mSecondaryActions.size(); } return i + this.mNonCurrentAccounts.length; } public final Object getItem(int paramInt) { String str; if (this.mHasAccounts) { if (paramInt == 0) { str = this.mCurrentAccount.name; } } int k; do { do { return str; paramInt--; if (this.mAccountListExpanded) { break; } str = null; } while (paramInt == 0); int i = paramInt - 1; int j = this.mPrimaryActions.size(); if (i < j) { return this.mPrimaryActions.get(i); } k = i - j; if (this.mShowDownloadOnlyToggle) { if (k == 0) { return this.mDownloadSwitchConfig; } k--; } str = null; } while (k == 0); int m = k - 1; return this.mSecondaryActions.get(m); return this.mNonCurrentAccounts[paramInt]; } public final long getItemId(int paramInt) { return paramInt; } public final int getItemViewType(int paramInt) { int i = 6; if (this.mHasAccounts) { if (paramInt == 0) { if (this.mIsMiniProfile) { i = 9; } } } int k; do { int j; PlayDrawerLayout.PlayDrawerPrimaryAction localPlayDrawerPrimaryAction; do { return i; return 0; paramInt--; if (this.mAccountListExpanded) { break label162; } if (paramInt == 0) { return 2; } j = paramInt - 1; if (j >= this.mPrimaryActions.size()) { break; } localPlayDrawerPrimaryAction = (PlayDrawerLayout.PlayDrawerPrimaryAction)this.mPrimaryActions.get(j); } while (localPlayDrawerPrimaryAction.isSeparator); if ((this.mShowDownloadOnlyToggle) && (this.mDownloadOnlyEnabled) && (!localPlayDrawerPrimaryAction.isAvailableInDownloadOnly)) { return 5; } if (localPlayDrawerPrimaryAction.isActive) { return 3; } return 4; k = j - this.mPrimaryActions.size(); } while (k == 0); int m = k - 1; if ((this.mShowDownloadOnlyToggle) && (m == 0)) { return 8; } return 7; label162: if (this.mIsMiniProfile) { return 10; } return 1; } public final View getView(int paramInt, View paramView, ViewGroup paramViewGroup) { int i = getItemViewType(paramInt); Object localObject = getItem(paramInt); switch (i) { default: throw new UnsupportedOperationException("View type " + i + " not supported"); case 0: View localView5; final PlayDrawerProfileInfoView localPlayDrawerProfileInfoView; final Account localAccount4; final String str3; FifeImageView localFifeImageView; if (paramView != null) { localView5 = paramView; localPlayDrawerProfileInfoView = (PlayDrawerProfileInfoView)localView5; this.mProfileContainerPosition = paramInt; localAccount4 = this.mCurrentAccount; str3 = this.mCurrentAccount.name; localPlayDrawerProfileInfoView.configure(localAccount4, this.mNonCurrentAccounts, this.mAccountDocV2s, this.mBitmapLoader); boolean bool = this.mCurrentAvatarClickable; localPlayDrawerProfileInfoView.mProfileAvatarImage.setEnabled(bool); localFifeImageView = localPlayDrawerProfileInfoView.mProfileAvatarImage; if (!bool) { break label344; } } for (int m = 1;; m = 2) { ViewCompat.setImportantForAccessibility(localFifeImageView, m); this.mPlayDfeApiProvider.getPlayDfeApi(localAccount4).getPlusProfile(new Response.Listener()new Response.ErrorListener {}, new Response.ErrorListener() { public final void onErrorResponse(VolleyError paramAnonymousVolleyError) {} }, true); for (int n = 0; n < this.mNonCurrentAccounts.length; n++) { Account localAccount5 = this.mNonCurrentAccounts[n]; final String str4 = localAccount5.name; if (!this.mIsAccountDocLoaded.contains(str4)) { this.mPlayDfeApiProvider.getPlayDfeApi(localAccount5).getPlusProfile(new Response.Listener()new Response.ErrorListener {}, new Response.ErrorListener() { public final void onErrorResponse(VolleyError paramAnonymousVolleyError) {} }, true); } } localView5 = this.mInflater.inflate(R.layout.play_drawer_profile_info, paramViewGroup, false); break; } localPlayDrawerProfileInfoView.setAccountListExpanded(this.mAccountListExpanded); localPlayDrawerProfileInfoView.mOnAvatarClickedListener = new PlayDrawerProfileInfoView.OnAvatarClickedListener() { public final void onAvatarClicked(Account paramAnonymousAccount) { if (paramAnonymousAccount == PlayDrawerAdapter.this.mCurrentAccount) { boolean bool = PlayDrawerAdapter.this.mIsAccountDocLoaded.contains(PlayDrawerAdapter.this.mCurrentAccount.name); if (PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onCurrentAccountClicked(bool, (DocV2)PlayDrawerAdapter.this.mAccountDocV2s.get(PlayDrawerAdapter.this.mCurrentAccount.name))) { PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } return; } PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onSecondaryAccountClicked(paramAnonymousAccount.name); PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } }; if (this.mNonCurrentAccounts.length > 0) { localPlayDrawerProfileInfoView.setAccountListEnabled(true); localPlayDrawerProfileInfoView.setAccountTogglerListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerLayout.PlayDrawerContentClickListener localPlayDrawerContentClickListener = PlayDrawerAdapter.this.mPlayDrawerContentClickListener; if (!PlayDrawerAdapter.this.mAccountListExpanded) {} for (boolean bool = true;; bool = false) { localPlayDrawerContentClickListener.onAccountListToggleButtonClicked(bool); PlayDrawerAdapter.this.toggleAccountsList(); return; } } }); return localPlayDrawerProfileInfoView; } localPlayDrawerProfileInfoView.setAccountListEnabled(false); localPlayDrawerProfileInfoView.setAccountTogglerListener(null); return localPlayDrawerProfileInfoView; case 1: Account localAccount3 = (Account)localObject; View localView4; PlayDrawerAccountRow localPlayDrawerAccountRow; final String str2; DocV2 localDocV2; BitmapLoader localBitmapLoader; if (paramView != null) { localView4 = paramView; localPlayDrawerAccountRow = (PlayDrawerAccountRow)localView4; str2 = localAccount3.name; localDocV2 = (DocV2)this.mAccountDocV2s.get(str2); localBitmapLoader = this.mBitmapLoader; localPlayDrawerAccountRow.mAccountName.setText(str2); localPlayDrawerAccountRow.setContentDescription(localPlayDrawerAccountRow.getResources().getString(R.string.play_drawer_content_description_switch_account, new Object[] { str2 })); if (localDocV2 != null) { break label564; } localPlayDrawerAccountRow.mAvatar.setLocalImageBitmap(BitmapFactory.decodeResource(localPlayDrawerAccountRow.getResources(), R.drawable.ic_profile_none)); } for (;;) { localPlayDrawerAccountRow.setOnClickListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onSecondaryAccountClicked(str2); PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } }); return localPlayDrawerAccountRow; localView4 = this.mInflater.inflate(R.layout.play_drawer_account_row, paramViewGroup, false); break; Common.Image localImage = DocV2Utils.getFirstImageOfType(localDocV2, 4); localPlayDrawerAccountRow.mAvatar.setImage(localImage.imageUrl, localImage.supportsFifeUrlOptions, localBitmapLoader); } case 10: Account localAccount2 = (Account)localObject; if (paramView != null) {} for (View localView3 = paramView;; localView3 = this.mInflater.inflate(R.layout.play_drawer_mini_account_row, paramViewGroup, false)) { PlayDrawerMiniAccountRow localPlayDrawerMiniAccountRow = (PlayDrawerMiniAccountRow)localView3; final String str1 = localAccount2.name; localPlayDrawerMiniAccountRow.mAccountName.setText(str1); localPlayDrawerMiniAccountRow.setContentDescription(localPlayDrawerMiniAccountRow.getResources().getString(R.string.play_drawer_content_description_switch_account, new Object[] { str1 })); localPlayDrawerMiniAccountRow.setOnClickListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onSecondaryAccountClicked(str1); PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } }); return localPlayDrawerMiniAccountRow; } case 2: if (paramView != null) { return paramView; } return this.mInflater.inflate(R.layout.play_drawer_primary_actions_top_spacing, paramViewGroup, false); case 3: return getPrimaryActionView(paramView, paramViewGroup, (PlayDrawerLayout.PlayDrawerPrimaryAction)localObject, true, false); case 4: return getPrimaryActionView(paramView, paramViewGroup, (PlayDrawerLayout.PlayDrawerPrimaryAction)localObject, false, false); case 5: return getPrimaryActionView(paramView, paramViewGroup, (PlayDrawerLayout.PlayDrawerPrimaryAction)localObject, false, true); case 6: if (paramView != null) { return paramView; } return this.mInflater.inflate(R.layout.play_drawer_secondary_actions_top_separator, paramViewGroup, false); case 8: PlayDrawerDownloadSwitchRow localPlayDrawerDownloadSwitchRow; if (paramView == null) { localPlayDrawerDownloadSwitchRow = (PlayDrawerDownloadSwitchRow)this.mInflater.inflate(R.layout.play_drawer_download_toggle, paramViewGroup, false); PlayDrawerLayout.PlayDrawerDownloadSwitchConfig localPlayDrawerDownloadSwitchConfig = this.mDownloadSwitchConfig; localPlayDrawerDownloadSwitchRow.mCheckedTextColor = localPlayDrawerDownloadSwitchConfig.checkedTextColor; int j = localPlayDrawerDownloadSwitchConfig.thumbDrawableId; int k = localPlayDrawerDownloadSwitchConfig.trackDrawableId; if (PlayDrawerDownloadSwitchRow.SUPPORTS_STYLED_SWITCH) { if (k != -1) { localPlayDrawerDownloadSwitchRow.mSwitch.setTrackResource(k); } if (j != -1) { localPlayDrawerDownloadSwitchRow.mSwitch.setThumbResource(j); } localPlayDrawerDownloadSwitchRow.mSwitch.setContentDescription(localPlayDrawerDownloadSwitchConfig.actionText); } localPlayDrawerDownloadSwitchRow.mActionTextView.setText(localPlayDrawerDownloadSwitchConfig.actionText); TextView localTextView2 = localPlayDrawerDownloadSwitchRow.mActionTextView; PlayUtils.useLtr(localPlayDrawerDownloadSwitchRow.getContext()); localTextView2.setGravity(8388627); localPlayDrawerDownloadSwitchRow.mListener = new PlayDrawerDownloadSwitchRow.OnCheckedChangeListener() { public final void onCheckedChanged$77cff6e2(boolean paramAnonymousBoolean) { PlayDrawerAdapter.access$1002(PlayDrawerAdapter.this, paramAnonymousBoolean); PlayDrawerAdapter.this.notifyDataSetChanged(); } }; } for (;;) { localPlayDrawerDownloadSwitchRow.setCheckedNoCallbacks(this.mDownloadOnlyEnabled); return localPlayDrawerDownloadSwitchRow; localPlayDrawerDownloadSwitchRow = (PlayDrawerDownloadSwitchRow)paramView; } case 7: label344: final PlayDrawerLayout.PlayDrawerSecondaryAction localPlayDrawerSecondaryAction = (PlayDrawerLayout.PlayDrawerSecondaryAction)localObject; label564: if (paramView != null) {} for (View localView2 = paramView;; localView2 = this.mInflater.inflate(R.layout.play_drawer_secondary_action, paramViewGroup, false)) { TextView localTextView1 = (TextView)localView2; localTextView1.setText(localPlayDrawerSecondaryAction.actionText); localTextView1.setOnClickListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerAdapter.this.mPlayDrawerContentClickListener.onSecondaryActionClicked(localPlayDrawerSecondaryAction); PlayDrawerAdapter.this.mPlayDrawerLayout.closeDrawer(); } }); PlayUtils.useLtr(this.mContext); localTextView1.setGravity(8388627); return localTextView1; } } if (paramView != null) {} PlayDrawerMiniProfileInfoView localPlayDrawerMiniProfileInfoView; for (View localView1 = paramView;; localView1 = this.mInflater.inflate(R.layout.play_drawer_mini_profile_info_view, paramViewGroup, false)) { localPlayDrawerMiniProfileInfoView = (PlayDrawerMiniProfileInfoView)localView1; this.mProfileContainerPosition = paramInt; localPlayDrawerMiniProfileInfoView.setAccountListExpanded(this.mAccountListExpanded); Account localAccount1 = this.mCurrentAccount; localPlayDrawerMiniProfileInfoView.mDisplayName.setText(localAccount1.name); if (this.mNonCurrentAccounts.length <= 0) { break; } localPlayDrawerMiniProfileInfoView.setAccountListEnabled(true); localPlayDrawerMiniProfileInfoView.setAccountTogglerListener(new View.OnClickListener() { public final void onClick(View paramAnonymousView) { PlayDrawerLayout.PlayDrawerContentClickListener localPlayDrawerContentClickListener = PlayDrawerAdapter.this.mPlayDrawerContentClickListener; if (!PlayDrawerAdapter.this.mAccountListExpanded) {} for (boolean bool = true;; bool = false) { localPlayDrawerContentClickListener.onAccountListToggleButtonClicked(bool); PlayDrawerAdapter.this.toggleAccountsList(); return; } } }); return localPlayDrawerMiniProfileInfoView; } localPlayDrawerMiniProfileInfoView.setAccountListEnabled(false); localPlayDrawerMiniProfileInfoView.setAccountTogglerListener(null); return localPlayDrawerMiniProfileInfoView; } public final int getViewTypeCount() { return 11; } public final boolean hasStableIds() { return false; } public final boolean isEnabled(int paramInt) { switch (getItemViewType(paramInt)) { default: return true; } return false; } } /* Location: F:\apktool\apktool\Google_Play_Store6.0.5\classes-dex2jar.jar * Qualified Name: com.google.android.play.drawer.PlayDrawerAdapter * JD-Core Version: 0.7.0.1 */
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.partition; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemorySegmentFactory; import org.apache.flink.core.memory.MemorySegmentProvider; import org.apache.flink.runtime.io.network.ConnectionID; import org.apache.flink.runtime.io.network.ConnectionManager; import org.apache.flink.runtime.io.network.PartitionRequestClient; import org.apache.flink.runtime.io.network.metrics.InputChannelMetrics; import org.apache.flink.runtime.io.network.partition.consumer.InputChannelBuilder; import org.apache.flink.runtime.io.network.partition.consumer.LocalInputChannel; import org.apache.flink.runtime.io.network.partition.consumer.RemoteInputChannel; import org.apache.flink.runtime.io.network.partition.consumer.SingleInputGate; import org.apache.flink.runtime.io.network.partition.consumer.SingleInputGateBuilder; import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.Collection; import java.util.Collections; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Some utility methods used for testing InputChannels and InputGates. */ public class InputChannelTestUtils { /** * Creates a result partition manager that ignores all IDs, and simply returns the given * subpartitions in sequence. */ public static ResultPartitionManager createResultPartitionManager(final ResultSubpartition[] sources) throws Exception { final Answer<ResultSubpartitionView> viewCreator = new Answer<ResultSubpartitionView>() { private int num = 0; @Override public ResultSubpartitionView answer(InvocationOnMock invocation) throws Throwable { BufferAvailabilityListener channel = (BufferAvailabilityListener) invocation.getArguments()[2]; return sources[num++].createReadView(channel); } }; ResultPartitionManager manager = mock(ResultPartitionManager.class); when(manager.createSubpartitionView( any(ResultPartitionID.class), anyInt(), any(BufferAvailabilityListener.class))) .thenAnswer(viewCreator); return manager; } public static SingleInputGate createSingleInputGate(int numberOfChannels) { return new SingleInputGateBuilder().setNumberOfChannels(numberOfChannels).build(); } public static ConnectionManager createDummyConnectionManager() throws Exception { final PartitionRequestClient mockClient = mock(PartitionRequestClient.class); final ConnectionManager connManager = mock(ConnectionManager.class); when(connManager.createPartitionRequestClient(any(ConnectionID.class))).thenReturn(mockClient); return connManager; } public static LocalInputChannel createLocalInputChannel( SingleInputGate inputGate, ResultPartitionManager partitionManager) { return createLocalInputChannel(inputGate, partitionManager, 0, 0); } public static LocalInputChannel createLocalInputChannel( SingleInputGate inputGate, int channelIndex, ResultPartitionManager partitionManager) { return InputChannelBuilder.newBuilder() .setChannelIndex(channelIndex) .setPartitionManager(partitionManager) .buildLocalAndSetToGate(inputGate); } public static LocalInputChannel createLocalInputChannel( SingleInputGate inputGate, ResultPartitionManager partitionManager, int initialBackoff, int maxBackoff) { return InputChannelBuilder.newBuilder() .setPartitionManager(partitionManager) .setInitialBackoff(initialBackoff) .setMaxBackoff(maxBackoff) .buildLocalAndSetToGate(inputGate); } public static RemoteInputChannel createRemoteInputChannel( SingleInputGate inputGate, int channelIndex, ConnectionManager connectionManager) { return InputChannelBuilder.newBuilder() .setChannelIndex(channelIndex) .setConnectionManager(connectionManager) .buildRemoteAndSetToGate(inputGate); } public static RemoteInputChannel createRemoteInputChannel( SingleInputGate inputGate, PartitionRequestClient client, MemorySegmentProvider memorySegmentProvider) { return InputChannelBuilder.newBuilder() .setConnectionManager(mockConnectionManagerWithPartitionRequestClient(client)) .setMemorySegmentProvider(memorySegmentProvider) .buildRemoteAndSetToGate(inputGate); } public static ConnectionManager mockConnectionManagerWithPartitionRequestClient(PartitionRequestClient client) { return new ConnectionManager() { @Override public int start() { return -1; } @Override public PartitionRequestClient createPartitionRequestClient(ConnectionID connectionId) { return client; } @Override public void closeOpenChannelConnections(ConnectionID connectionId) { } @Override public int getNumberOfActiveConnections() { return 0; } @Override public void shutdown() { } }; } public static InputChannelMetrics newUnregisteredInputChannelMetrics() { return new InputChannelMetrics(UnregisteredMetricGroups.createUnregisteredTaskMetricGroup().getIOMetricGroup()); } // ------------------------------------------------------------------------ /** This class is not meant to be instantiated. */ private InputChannelTestUtils() {} /** * Test stub for {@link MemorySegmentProvider}. */ public static class StubMemorySegmentProvider implements MemorySegmentProvider { private static final MemorySegmentProvider INSTANCE = new StubMemorySegmentProvider(); public static MemorySegmentProvider getInstance() { return INSTANCE; } private StubMemorySegmentProvider() { } @Override public Collection<MemorySegment> requestMemorySegments() { return Collections.emptyList(); } @Override public void recycleMemorySegments(Collection<MemorySegment> segments) { } } /** * {@link MemorySegmentProvider} that provides unpooled {@link MemorySegment}s. */ public static class UnpooledMemorySegmentProvider implements MemorySegmentProvider { private final int pageSize; public UnpooledMemorySegmentProvider(int pageSize) { this.pageSize = pageSize; } @Override public Collection<MemorySegment> requestMemorySegments() { return Collections.singletonList(MemorySegmentFactory.allocateUnpooledSegment(pageSize)); } @Override public void recycleMemorySegments(Collection<MemorySegment> segments) { } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.parameters; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.trans.RowStepCollector; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransHopMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.dummytrans.DummyTransMeta; import org.pentaho.di.trans.steps.getvariable.GetVariableMeta; import org.pentaho.di.trans.steps.getvariable.GetVariableMeta.FieldDefinition; import junit.framework.TestCase; /** * Test class for parameters in transformations. * * @author Sven Boden */ public class ParameterSimpleTransTest extends TestCase { public RowMetaInterface createResultRowMetaInterface1() { RowMetaInterface rm = new RowMeta(); ValueMetaInterface[] valuesMeta = { new ValueMetaString( "PARAM1" ), new ValueMetaString( "PARAM2" ), }; for ( int i = 0; i < valuesMeta.length; i++ ) { rm.addValueMeta( valuesMeta[i] ); } return rm; } public List<RowMetaAndData> createResultData1() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface1(); Object[] r1 = new Object[] { "ParamValue1", "PARAMVALUE2" }; list.add( new RowMetaAndData( rm, r1 ) ); return list; } public RowMetaInterface createResultRowMetaInterface2() { RowMetaInterface rm = new RowMeta(); ValueMetaInterface[] valuesMeta = { new ValueMetaString( "PARAM1" ), new ValueMetaString( "PARAM2" ), }; for ( int i = 0; i < valuesMeta.length; i++ ) { rm.addValueMeta( valuesMeta[i] ); } return rm; } public List<RowMetaAndData> createResultData2() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface2(); Object[] r1 = new Object[] { "ParamValue1", "default2" }; list.add( new RowMetaAndData( rm, r1 ) ); return list; } public RowMetaInterface createResultRowMetaInterface3() { RowMetaInterface rm = new RowMeta(); ValueMetaInterface[] valuesMeta = { new ValueMetaString( "${JAVA_HOME}" ), new ValueMetaString( "PARAM2" ), }; for ( int i = 0; i < valuesMeta.length; i++ ) { rm.addValueMeta( valuesMeta[i] ); } return rm; } public List<RowMetaAndData> createResultData3() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface3(); Object[] r1 = new Object[] { "${JAVA_HOME}", "default2" }; list.add( new RowMetaAndData( rm, r1 ) ); return list; } public RowMetaInterface createResultRowMetaInterface5() { RowMetaInterface rm = new RowMeta(); ValueMetaInterface[] valuesMeta = { new ValueMetaString( "PARAM1" ), new ValueMetaString( "PARAM2" ), }; for ( int i = 0; i < valuesMeta.length; i++ ) { rm.addValueMeta( valuesMeta[i] ); } return rm; } public List<RowMetaAndData> createResultData5() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface5(); Object[] r1 = new Object[] { "default1", "PARAMVALUE2" }; list.add( new RowMetaAndData( rm, r1 ) ); return list; } public RowMetaInterface createResultRowMetaInterface6() { RowMetaInterface rm = new RowMeta(); ValueMetaInterface[] valuesMeta = { new ValueMetaString( "PARAM1" ), new ValueMetaString( "PARAM2" ), }; for ( int i = 0; i < valuesMeta.length; i++ ) { rm.addValueMeta( valuesMeta[i] ); } return rm; } public List<RowMetaAndData> createResultData6() { List<RowMetaAndData> list = new ArrayList<RowMetaAndData>(); RowMetaInterface rm = createResultRowMetaInterface5(); Object[] r1 = new Object[] { "", "PARAMVALUE2" }; list.add( new RowMetaAndData( rm, r1 ) ); return list; } /** * Check the 2 lists comparing the rows in order. If they are not the same fail the test. * * @param rows1 * first row set to compare * @param rows2 * second row set to compare */ public void checkRows( List<RowMetaAndData> rows1, List<RowMetaAndData> rows2 ) { int idx = 1; if ( rows1.size() != rows2.size() ) { fail( "Number of rows is not the same: " + rows1.size() + " and " + rows2.size() ); } Iterator<RowMetaAndData> it1 = rows1.iterator(); Iterator<RowMetaAndData> it2 = rows2.iterator(); while ( it1.hasNext() && it2.hasNext() ) { RowMetaAndData rm1 = it1.next(); RowMetaAndData rm2 = it2.next(); Object[] r1 = rm1.getData(); Object[] r2 = rm2.getData(); if ( rm1.size() != rm2.size() ) { fail( "row nr " + idx + " is not equal" ); } int[] fields = new int[rm1.size()]; for ( int ydx = 0; ydx < rm1.size(); ydx++ ) { fields[ydx] = ydx; } try { if ( rm1.getRowMeta().compare( r1, r2, fields ) != 0 ) { fail( "row nr " + idx + " is not equal" ); } } catch ( KettleValueException e ) { fail( "row nr " + idx + " is not equal" ); } idx++; } } /** * Test case for parameters using a simple transformation. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans1() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans1" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "PARAM1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "", "Parameter 2" ); trans.setParameterValue( "Param1", "ParamValue1" ); trans.setParameterValue( "PARAM2", "PARAMVALUE2" ); trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData1(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } /** * Test case for parameters using a simple transformation. Here 1 parameter is not provided as value, so the default * will be used. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans2() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans2" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "Param1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "default1", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "default2", "Parameter 2" ); trans.setParameterValue( "Param1", "ParamValue1" ); // PARAM2 is not set trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData2(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } /** * Test case for parameters using a simple transformation. Here blocking some unwise usage of parameters. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans3() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans3" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "PARAM1", "PARAM2" }; String[] varName = { "${JAVA_HOME}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "${JAVA_HOME}", "default1", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "default2", "Parameter 2" ); trans.setParameterValue( "${JAVA_HOME}", "param1" ); // PARAM2 is not set trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData3(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } /** * Test case for parameters using a simple transformation. Check whether parameters override variables. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans4() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans4" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "PARAM1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "", "Parameter 2" ); trans.setParameterValue( "Param1", "ParamValue1" ); trans.setParameterValue( "PARAM2", "PARAMVALUE2" ); // See whether this variable overrides the parameter... it should NOT. trans.setVariable( "Param1", "Variable1" ); trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData1(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } /** * Test case for parameters using a simple transformation. Check whether parameters override variables. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans5() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans4" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "PARAM1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "default1", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "", "Parameter 2" ); trans.setParameterValue( "PARAM2", "PARAMVALUE2" ); // See whether this variable overrides the parameter... it should NOT. Param1 // is defined but not set, so defaults should kick in. trans.setVariable( "Param1", "Variable1" ); trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData5(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } /** * Test case for parameters using a simple transformation. Check whether parameters override variables. * * @throws Exception * exception on any problem. */ public void testParameterSimpleTrans6() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "parameter_simple_trans4" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create a get variables step... // String getVariablesStepname = "get variables step"; GetVariableMeta gvm = new GetVariableMeta(); // Set the information of the get variables step. String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm ); StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm ); transMeta.addStep( getVariablesStep ); // // Generate 1 row // String[] fieldName = { "PARAM1", "PARAM2" }; String[] varName = { "${Param1}", "%%PARAM2%%" }; int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING }; int[] length = { -1, -1 }; int[] precision = { -1, -1 }; String[] format = { "", "" }; String[] currency = { "", "" }; String[] decimal = { "", "" }; String[] grouping = { "", "" }; int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE }; FieldDefinition[] fields = new FieldDefinition[fieldName.length]; for ( int i = 0; i < fields.length; i++ ) { FieldDefinition field = new FieldDefinition(); field.setFieldName( fieldName[i] ); field.setVariableString( varName[i] ); field.setFieldType( fieldType[i] ); field.setFieldLength( length[i] ); field.setFieldPrecision( precision[i] ); field.setFieldFormat( format[i] ); field.setCurrency( currency[i] ); field.setDecimal( decimal[i] ); field.setGroup( grouping[i] ); field.setTrimType( trimType[i] ); fields[i] = field; } gvm.setFieldDefinitions( fields ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.addParameterDefinition( "Param1", "", "Parameter 1" ); trans.addParameterDefinition( "PARAM2", "", "Parameter 2" ); trans.setParameterValue( "PARAM2", "PARAMVALUE2" ); // See whether this variable overrides the parameter... it should NOT. Param1 // is defined but not set. And no default... so the variable will be set to "". not // to "Variable1" trans.setVariable( "Param1", "Variable1" ); trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector endRc = new RowStepCollector(); si.addRowListener( endRc ); trans.startThreads(); trans.waitUntilFinished(); // Now check whether the output is still as we expect. List<RowMetaAndData> goldenImageRows = createResultData6(); List<RowMetaAndData> resultRows1 = endRc.getRowsWritten(); checkRows( resultRows1, goldenImageRows ); } }
package com.github.lotsabackscatter.masonry; import static com.google.common.base.Objects.firstNonNull; import static com.google.common.base.Preconditions.checkNotNull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import org.json.JSONArray; import org.json.JSONException; import com.vaadin.annotations.JavaScript; import com.vaadin.annotations.StyleSheet; import com.vaadin.ui.AbstractJavaScriptComponent; import com.vaadin.ui.JavaScriptFunction; /** * A Vaadin Component representing a Masonry Card Layout. * * @author watsond */ @JavaScript({ "vaadin://addons/masonry/js/thirdparty/jquery.min.js", "vaadin://addons/masonry/js/thirdparty/masonry.pkgd.min.js", "vaadin://addons/masonry/js/thirdparty/jquery.visible.js", "vaadin://addons/masonry/js/prototypes.js", "vaadin://addons/masonry/js/masonry_connector.js" }) @StyleSheet({ "vaadin://addons/masonry/css/animate.css", "vaadin://addons/masonry/css/hover.css", "vaadin://addons/masonry/css/styles.css" }) public class MasonryCards extends AbstractJavaScriptComponent { private static final long serialVersionUID = 1L; /** * Listeners mapped by id, fired when an image is clicked. */ private final Map<String, ClickListener> listeners = new HashMap<String, ClickListener>(); /** * Listeners mapped by id, fired when an edit button is clicked. */ private final Map<String, ClickListener> editClicklisteners = new HashMap<String, ClickListener>(); /** * Listeners mapped by id, fired when a reply button is clicked. */ private final Map<String, ClickListener> replyClicklisteners = new HashMap<String, ClickListener>(); /** * The {@link LoadRequester} called when more cards are required. */ private LoadRequester loadRequester = null; /** * Constructor of the class. */ public MasonryCards() { addFunction("onClick", new ClickResponder(listeners)); addFunction("onEditClick", new ClickResponder(editClicklisteners)); addFunction("onReplyClick", new ClickResponder(replyClicklisteners)); addFunction("loadMore", new LoadMoreResponder()); } /** * Adds a card to the Masonry component. * * @param id * the unique id of the card * @param name * the name of the card * @param description * the description of the card * @param url * the url of the image * @param cardColour * the css colour of the card * @param clickListener * the listener, fired when the image is clicked */ public void addCard(@Nonnull String id, @Nonnull String name, @Nonnull String description, @CheckForNull String url, @CheckForNull String cardColour, @CheckForNull ClickListener clickListener) { addCard(id, name, description, url, cardColour, clickListener, Collections.<Comment> emptyList()); } /** * Adds a card to the Masonry component. * * @param id * the unique id of the card * @param name * the name of the card * @param description * the description of the card * @param url * the url of the image * @param cardColour * the css colour of the card * @param clickListener * the listener, fired when the image is clicked * @param comments * the comments of the card */ public void addCard(@Nonnull String id, @Nonnull String name, @Nonnull String description, @CheckForNull String url, @CheckForNull String cardColour, @CheckForNull ClickListener clickListener, @CheckForNull List<Comment> comments) { addCard(id, name, description, url, cardColour, clickListener, null, null, comments); } /** * Adds a card to the Masonry component. * * @param id * the unique id of the card * @param name * the name of the card * @param description * the description of the card * @param url * the url of the image * @param cardColour * the css colour of the card * @param clickListener * the listener, fired when the image is clicked * @param editClickListener * the listener, fired when the edit button is clicked * @param replyClickListener * the listener, fired when the reply button is clicked * @param comments * the comments of the card */ public void addCard(@Nonnull String id, @Nonnull String name, @Nonnull String description, @CheckForNull String url, @CheckForNull String cardColour, @CheckForNull ClickListener clickListener, @CheckForNull ClickListener editClickListener, @CheckForNull ClickListener replyClickListener, @CheckForNull List<Comment> comments) { checkNotNull(id); checkNotNull(name); checkNotNull(description); if (clickListener != null) { listeners.put(id, clickListener); } if (editClickListener != null) { editClicklisteners.put(id, editClickListener); } if (replyClickListener != null) { replyClicklisteners.put(id, replyClickListener); } List<Map<String, String>> commentsList = new ArrayList<Map<String, String>>(); if (comments != null) { for (Comment comment : comments) { commentsList.add(comment.toMap()); } } firstNonNull(cardColour, "white"); callFunction("addCard", id, name, description, url, cardColour, commentsList); } /** * Updates the card with the given id. * * @param id * the unique id of the card * @param name * the name of the card * @param description * the description of the card * @param url * the url of the image * @param cardColour * the css colour of the card * @param clickListener * the listener, fired when the image is clicked * @param editClickListener * the listener, fired when the edit button is clicked * @param replyClickListener * the listener, fired when the reply button is clicked * @param comments * the comments of the card */ public void updateCard(@Nonnull String id, @Nonnull String name, @Nonnull String description, @CheckForNull String url, @CheckForNull String cardColour, @CheckForNull ClickListener clickListener, @CheckForNull ClickListener editClickListener, @CheckForNull ClickListener replyClickListener, @CheckForNull List<Comment> comments) { checkNotNull(id); checkNotNull(name); checkNotNull(description); if (clickListener != null) { listeners.put(id, clickListener); } if (editClickListener != null) { editClicklisteners.put(id, editClickListener); } if (replyClickListener != null) { replyClicklisteners.put(id, replyClickListener); } List<Map<String, String>> commentsList = new ArrayList<Map<String, String>>(); if (comments != null) { for (Comment comment : comments) { commentsList.add(comment.toMap()); } } firstNonNull(cardColour, "white"); callFunction("updateCard", id, name, description, url, cardColour, commentsList); } /** * Forces a relayout of the Masonry component. */ public void relayout() { callFunction("reMasonry"); } /** * Sets the {@link LoadRequester}. * * @param loadRequester * called when Masonry component requests more cards.<br> * I.e When the component is scrolled to the bottom. */ public void setLoadRequester(@Nonnull LoadRequester loadRequester) { this.loadRequester = loadRequester; } /** * {@inheritDoc} */ @Override protected MasonryState getState() { return (MasonryState) super.getState(); } /** * A function (callable by Javascript) that when called, loads more cards. */ private final class LoadMoreResponder implements JavaScriptFunction { private static final long serialVersionUID = 1L; @Override public void call(JSONArray arguments) throws JSONException { if (loadRequester != null) { loadRequester.loadMore(); } callFunction("onLoadComplete"); } } /** * A function (callable by Javascript) that when called with a given id, * fires the corresponding {@link ClickListener}. */ private final class ClickResponder implements JavaScriptFunction { private static final long serialVersionUID = 1L; @Nonnull private final Map<String, ClickListener> listeners; /** * Constructor of the class. * * @param listeners */ public ClickResponder(@Nonnull Map<String, ClickListener> listeners) { this.listeners = listeners; } /** * {@inheritDoc} */ @Override public void call(JSONArray arguments) throws JSONException { String id = arguments.getString(0); click(id); } /** * Fires the {@link ClickListener} that corresponds to the given id. * * @param id * the id */ private void click(String id) { final ClickListener clickListener = listeners.get(id); if (clickListener != null) { clickListener.onClick(); } } } }
package com.planet_ink.coffee_mud.core; import java.util.*; import com.planet_ink.coffee_mud.core.interfaces.CMObject; import com.planet_ink.coffee_mud.core.interfaces.Environmental; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class CMParms { private CMParms(){super();} private static CMParms inst=new CMParms(); public static CMParms instance(){return inst;} public static String combine(Vector commands, int startAt, int endAt) { StringBuffer Combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=startAt;commandIndex<endAt;commandIndex++) Combined.append(commands.elementAt(commandIndex).toString()+" "); return Combined.toString().trim(); } public static String combineWithQuotes(Vector commands, int startAt, int endAt) { StringBuffer Combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=startAt;commandIndex<endAt;commandIndex++) { String s=commands.elementAt(commandIndex).toString(); if(s.indexOf(" ")>=0) s="\""+s+"\""; Combined.append(s+" "); } return Combined.toString().trim(); } public static void sortVector(Vector V) { Vector V2=new Vector(new TreeSet(V)); V.clear(); V.addAll(V2); V.trimToSize(); } public static String combineAfterIndexWithQuotes(Vector commands, String match) { StringBuffer Combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=0;commandIndex<0;commandIndex++) { String s=(String)commands.elementAt(commandIndex); if(s.indexOf(" ")>=0) s="\""+s+"\""; Combined.append(s+" "); } return Combined.toString().trim(); } public static String combineWithQuotes(Vector commands, int startAt) { StringBuffer Combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=startAt;commandIndex<commands.size();commandIndex++) { String s=commands.elementAt(commandIndex).toString(); if(s.indexOf(" ")>=0) s="\""+s+"\""; Combined.append(s+" "); } return Combined.toString().trim(); } public static String combineWithTabs(Vector commands, int startAt) { return combineWithX(commands,"\t",startAt); } public static String combineWithX(Vector commands, String X, int startAt) { StringBuffer Combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=startAt;commandIndex<commands.size();commandIndex++) { String s=commands.elementAt(commandIndex).toString(); Combined.append(s+X); } return Combined.toString().trim(); } public static String combine(Vector<?> commands, int startAt) { StringBuffer combined=new StringBuffer(""); if(commands!=null) for(int commandIndex=startAt;commandIndex<commands.size();commandIndex++) combined.append(commands.elementAt(commandIndex).toString()+" "); return combined.toString().trim(); } public static String combine(HashSet<?> flags, int startAt) { StringBuffer combined=new StringBuffer(""); if(flags!=null) for(Iterator<?> i=flags.iterator();i.hasNext();) combined.append(i.next().toString()+" "); return combined.toString().trim(); } public static Vector parse(String str) { return parse(str,-1); } public static Vector paramParse(String str) { Vector commands=parse(str); for(int i=0;i<commands.size();i++) { String s=(String)commands.elementAt(i); if(s.startsWith("=")&&(s.length()>1)&&(i>0)) { String prev=(String)commands.elementAt(i-1); commands.setElementAt(prev+s,i-1); commands.removeElementAt(i); i--; } else if(s.endsWith("=")&&(s.length()>1)&&(i<(commands.size()-1))) { String next=(String)commands.elementAt(i+1); commands.setElementAt(s+next,i); commands.removeElementAt(i+1); } else if(s.equals("=")&&((i>0)&&(i<(commands.size()-1)))) { String prev=(String)commands.elementAt(i-1); String next=(String)commands.elementAt(i+1); commands.setElementAt(prev+"="+next,i-1); commands.removeElementAt(i); commands.removeElementAt(i+1); i--; } } return commands; } public static Vector parse(String str, int upTo) { Vector commands=new Vector(); if(str==null) return commands; str=str.trim(); while(!str.equals("")) { int spaceIndex=str.indexOf(" "); int strIndex=str.indexOf("\""); String CMD=""; if((strIndex>=0)&&((strIndex<spaceIndex)||(spaceIndex<0))) { int endStrIndex=str.indexOf("\"",strIndex+1); if(endStrIndex>strIndex) { CMD=str.substring(strIndex+1,endStrIndex).trim(); str=str.substring(endStrIndex+1).trim(); } else { CMD=str.substring(strIndex+1).trim(); str=""; } } else if(spaceIndex>=0) { CMD=str.substring(0,spaceIndex).trim(); str=str.substring(spaceIndex+1).trim(); } else { CMD=str.trim(); str=""; } if(!CMD.equals("")) { commands.addElement(CMD); if((upTo>=0)&&(commands.size()>=upTo)) { if(str.length()>0) commands.addElement(str); break; } } } return commands; } public static Vector parseCommas(String s, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf(","); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf(","); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static Vector parseCommadFlags(String s, String[] flags) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf(","); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((s2.length()>0)&&(CMParms.containsIgnoreCase(flags, s2))) V.addElement(flags[CMParms.indexOfIgnoreCase(flags, s2)]); x=s.indexOf(","); } if((s.length()>0)&&(CMParms.containsIgnoreCase(flags, s))) V.addElement(flags[CMParms.indexOfIgnoreCase(flags, s)]); return V; } public static Vector parseTabs(String s, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf("\t"); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf("\t"); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static Vector parseAny(String s, String delimeter, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf(delimeter); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+delimeter.length()).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf(delimeter); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static Vector parseAnyWords(String s, String delimeter, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; delimeter=delimeter.toUpperCase(); int x=s.toUpperCase().indexOf(delimeter); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+delimeter.length()).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf(delimeter); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static Vector parseSquiggles(String s) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf("~"); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); V.addElement(s2); x=s.indexOf("~"); } return V; } public static Vector parseSentences(String s) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf("."); while(x>=0) { String s2=s.substring(0,x+1); s=s.substring(x+1); V.addElement(s2); x=s.indexOf("."); } return V; } public static Vector parseSquiggleDelimited(String s, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf("~"); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((s2.length()>0)||(!ignoreNulls)) V.addElement(s2); x=s.indexOf("~"); } if((s.length()>0)||(!ignoreNulls)) V.addElement(s); return V; } public static Vector parseSemicolons(String s, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf(";"); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf(";"); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static Vector parseSpaces(String s, boolean ignoreNulls) { Vector V=new Vector(); if((s==null)||(s.length()==0)) return V; int x=s.indexOf(" "); while(x>=0) { String s2=s.substring(0,x).trim(); s=s.substring(x+1).trim(); if((!ignoreNulls)||(s2.length()>0)) V.addElement(s2); x=s.indexOf(" "); } if((!ignoreNulls)||(s.trim().length()>0)) V.addElement(s.trim()); return V; } public static int numBits(String s) { return ((Integer)getBitWork(s,Integer.MAX_VALUE,2)).intValue(); } public static String cleanBit(String s) { if(s.length()==0) return s; if((s.charAt(0)==' ')||(s.charAt(s.length()-1)==' ')) s=s.trim(); if(s.length()<2) return s.replace('\'','`'); if(s.charAt(0)=='\'') { if(s.charAt(s.length()-1)=='\'') return s.substring(1,s.length()-1).replace('\'','`'); return s.substring(1).replace('\'','`'); } if(s.charAt(0)=='`') { if(s.charAt(s.length()-1)=='`') return s.substring(1,s.length()-1).replace('\'','`'); return s.substring(1).replace('\'','`'); } return s.replace('\'','`'); } public static String getCleanBit(String s, int which) { return cleanBit(getBit(s,which));} public static String getPastBitClean(String s, int which) { return cleanBit(getPastBit(s,which));} public static String getPastBit(String s, int which) { return (String)getBitWork(s,which,1); } public static String getBit(String s, int which) { return (String)getBitWork(s,which,0); } public static Object getBitWork(String s, int which, int op) { int currOne=0; int start=-1; char q=' '; char[] cs=s.toCharArray(); for(int c=0;c<cs.length;c++) switch(start) { case -1: switch(cs[c]) { case ' ': case '\t': case '\n': case '\r': break; case '\'': case '`': q=cs[c]; start=c; break; default: q=' '; start=c; break; } break; default: if(cs[c]==q) { if((q!=' ') &&(c<cs.length-1) &&(!Character.isWhitespace(cs[c+1]))) break; if(which==currOne) { switch(op) { case 0: if(q==' ') return new String(cs,start,c-start); return new String(cs,start,c-start+1); case 1: return new String(cs,c+1,cs.length-c-1).trim(); } } currOne++; start=-1; } break; } switch(op) { case 0: if(start<0) return ""; return new String(cs,start,cs.length-start); case 1: return ""; default: if(start<0) return Integer.valueOf(currOne); return Integer.valueOf(currOne+1); } } public static String getParmStr(String text, String key, String defaultVal) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='=')) { if((text.charAt(x)=='+')||(text.charAt(x)=='-')) return defaultVal; x++; } if(x<text.length()) { boolean endWithQuote=false; while((x<text.length())&&(!Character.isLetterOrDigit(text.charAt(x)))) { if(text.charAt(x)=='\"') { endWithQuote=true; x++; break; } x++; } if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length()) &&((!endWithQuote)&&(!Character.isWhitespace(text.charAt(x)))&&(text.charAt(x)!=';')&&(text.charAt(x)!=',')) ||((endWithQuote)&&(text.charAt(x)!='\"'))) x++; return text.substring(0,x).trim(); } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return defaultVal; } private static int[] makeIntArray(int x, int y){ int[] xy=new int[2]; xy[0]=x;xy[1]=y;return xy;} public static int[] getParmCompare(String text, String key, int value) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length()) &&(text.charAt(x)!='>') &&(text.charAt(x)!='<') &&(text.charAt(x)!='!') &&(text.charAt(x)!='=')) x++; if(x<text.length()-1) { char comp=text.charAt(x); boolean andEqual=(text.charAt(x)=='='); if(text.charAt(x+1)=='='){ x++; andEqual=true;} if(x<text.length()-1) { while((x<text.length())&&(!Character.isDigit(text.charAt(x)))) x++; if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length())&&(Character.isDigit(text.charAt(x)))) x++; int found=CMath.s_int(text.substring(0,x)); if(andEqual&&(found==value)) return makeIntArray(comp,(comp=='!')?-1:1); switch(comp) { case '>': return makeIntArray(comp,(value>found)?1:-1); case '<': return makeIntArray(comp,(value<found)?1:-1); case '!': makeIntArray(comp,1); } return makeIntArray(comp,0); } } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return makeIntArray('\0',0); } private static int strIndex(Vector V, String str, int start) { if(str.indexOf(' ')<0) return V.indexOf(str,start); Vector V2=CMParms.parse(str); if(V2.size()==0) return -1; int x=V.indexOf(V2.firstElement(),start); boolean found=false; while((x>=0)&&((x+V2.size())<=V.size())&&(!found)) { found=true; for(int v2=1;v2<V2.size();v2++) if(!V.elementAt(x+v2).equals(V2.elementAt(v2))) { found=false; break;} if(!found) x=V.indexOf(V2.firstElement(),x+1); } if(found) return x; return -1; } private static int stringContains(Vector V, char combiner, StringBuffer buf, int lastIndex) { String str=buf.toString().trim(); if(str.length()==0) return lastIndex; buf.setLength(0); switch(combiner) { case '&': lastIndex=strIndex(V,str,0); return lastIndex; case '|': if(lastIndex>=0) return lastIndex; return strIndex(V,str,0); case '>': if(lastIndex<0) return lastIndex; return strIndex(V,str,lastIndex<0?0:lastIndex+1); case '<': { if(lastIndex<0) return lastIndex; int newIndex=strIndex(V,str,0); if(newIndex<lastIndex) return newIndex; return -1; } } return -1; } private static int stringContains(Vector V, char[] str, int[] index, int depth) { StringBuffer buf=new StringBuffer(""); int lastIndex=0; boolean quoteMode=false; char combiner='&'; for(int i=index[0];i<str.length;i++) { switch(str[i]) { case ')': if((depth>0)&&(!quoteMode)) { index[0]=i; return stringContains(V,combiner,buf,lastIndex); } buf.append(str[i]); break; case ' ': buf.append(str[i]); break; case '&': case '|': case '>': case '<': if(quoteMode) buf.append(str[i]); else { lastIndex=stringContains(V,combiner,buf,lastIndex); combiner=str[i]; } break; case '(': if(!quoteMode) { lastIndex=stringContains(V,combiner,buf,lastIndex); index[0]=i+1; int newIndex=stringContains(V,str,index,depth+1); i=index[0]; switch(combiner) { case '&': if((lastIndex<0)||(newIndex<0)) lastIndex=-1; break; case '|': if(newIndex>=0) lastIndex=newIndex; break; case '>': if(newIndex<=lastIndex) lastIndex=-1; else lastIndex=newIndex; break; case '<': if((newIndex<0)||(newIndex>=lastIndex)) lastIndex=-1; else lastIndex=newIndex; break; } } else buf.append(str[i]); break; case '\"': quoteMode=(!quoteMode); break; case '\\': if(i<str.length-1) { buf.append(str[i+1]); i++; } break; default: if(Character.isLetter(str[i])) buf.append(Character.toLowerCase(str[i])); else buf.append(str[i]); break; } } return stringContains(V,combiner,buf,lastIndex); } public static Hashtable parseEQParms(String str, String[] parmList) { Hashtable h=new Hashtable(); int lastEQ=-1; String lastParm=null; for(int x=0;x<str.length();x++) { char c=Character.toUpperCase(str.charAt(x)); if(Character.isLetter(c)) for(int p=0;p<parmList.length;p++) if((Character.toUpperCase(parmList[p].charAt(0)) == c) &&((str.length()-x) >= parmList[p].length()) &&(str.substring(x,x+parmList[p].length()).equalsIgnoreCase(parmList[p]))) { int chkX=x+parmList[p].length(); while((chkX<str.length())&&(Character.isWhitespace(str.charAt(chkX)))) chkX++; if((chkX<str.length())&&(str.charAt(chkX)=='=')) { chkX++; if((lastParm!=null)&&(lastEQ>0)) { String val=str.substring(lastEQ,x).trim(); if(val.startsWith("\"")&&(val.endsWith("\""))) val=val.substring(1,val.length()-1).trim(); h.put(lastParm,val); } lastParm=parmList[p]; x=chkX; lastEQ=chkX; } } } if((lastParm!=null)&&(lastEQ>0)) { String val=str.substring(lastEQ).trim(); if(val.startsWith("\"")&&(val.endsWith("\""))) val=val.substring(1,val.length()-1).trim(); h.put(lastParm,val); } return h; } public static Hashtable parseEQParms(String parms) { Hashtable h=new Hashtable(); int state=0; int start=-1; String parmName=null; int lastPossibleStart=-1; boolean lastWasWhitespace=false; StringBuffer str=new StringBuffer(parms); for(int x=0;x<=str.length();x++) { char c=(x==str.length())?'\n':str.charAt(x); switch(state) { case 0: if((c=='_')||(Character.isLetter(c))) { start=x; state=1; parmName=null; } break; case 1: if(c=='=') { parmName=str.substring(start,x).toUpperCase().trim(); state=2; } else if(Character.isWhitespace(c)) { parmName=str.substring(start,x).toUpperCase().trim(); start=x; } break; case 2: if((c=='\"')||(c=='\n')) { state=3; start=x+1; lastPossibleStart=start; } else if(c=='=') { // do nothing, this is a do-over } else if(!Character.isWhitespace(c)) { lastWasWhitespace=false; state=4; start=x; lastPossibleStart=start; } break; case 3: if(c=='\\') str.deleteCharAt(x); else if(c=='\"') { state=0; h.put(parmName,str.substring(start,x)); parmName=null; } break; case 4: if(c=='\\') str.deleteCharAt(x); else if(c=='=') { String value=str.substring(start,x).trim(); if(value.length()==0) state=2; else { h.put(parmName,str.substring(start,lastPossibleStart).trim()); parmName=str.substring(lastPossibleStart,x).toUpperCase().trim(); state=2; } } else if(c=='\n') { state=0; h.put(parmName,str.substring(start,x)); parmName=null; } else if(Character.isWhitespace(c)) lastWasWhitespace=true; else if(lastWasWhitespace) { lastWasWhitespace=false; lastPossibleStart=x; } break; } } return h; } public static Hashtable parseEQParms(Vector parms, int start, int end) { Hashtable h=new Hashtable(); for(int x=0;x<parms.size();x++) h.putAll(parseEQParms((String)parms.elementAt(x))); return h; } public static int stringContains(String str1, String str2) { StringBuffer buf1=new StringBuffer(str1.toLowerCase()); for(int i=buf1.length()-1;i>=0;i--) if((buf1.charAt(i)!=' ') &&(buf1.charAt(i)!='\'') &&(buf1.charAt(i)!='\"') &&(buf1.charAt(i)!='`') &&(!Character.isLetterOrDigit(buf1.charAt(i)))) buf1.setCharAt(i,' '); Vector V=CMParms.parse(buf1.toString()); return stringContains(V,str2.toCharArray(),new int[]{0},0); } public static int getParmPlus(String text, String key) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='+')&&(text.charAt(x)!='-')) { if(text.charAt(x)=='=') return 0; x++; } if(x<text.length()) { char pm=text.charAt(x); while((x<text.length())&&(!Character.isDigit(text.charAt(x)))) x++; if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length())&&(Character.isDigit(text.charAt(x)))) x++; if(pm=='+') return CMath.s_int(text.substring(0,x)); return -CMath.s_int(text.substring(0,x)); } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return 0; } public static double getParmDoublePlus(String text, String key) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='+')&&(text.charAt(x)!='-')) { if(text.charAt(x)=='=') return 0.0; x++; } if(x<text.length()) { char pm=text.charAt(x); while((x<text.length()) &&(!Character.isDigit(text.charAt(x))) &&(text.charAt(x)!='.')) x++; if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length()) &&((Character.isDigit(text.charAt(x)))||(text.charAt(x)=='.'))) x++; if(text.substring(0,x).indexOf(".")<0) { if(pm=='+') return (double)CMath.s_int(text.substring(0,x)); return (double)(-CMath.s_int(text.substring(0,x))); } if(pm=='+') return CMath.s_double(text.substring(0,x)); return -CMath.s_double(text.substring(0,x)); } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return 0.0; } public static double getParmDouble(String text, String key, double defaultValue) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='=')) x++; if(x<text.length()) { while((x<text.length()) &&(!Character.isDigit(text.charAt(x))) &&(text.charAt(x)!='.')) x++; if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length()) &&((Character.isDigit(text.charAt(x)))||(text.charAt(x)=='.'))) x++; if(text.substring(0,x).indexOf(".")<0) return (double)CMath.s_long(text.substring(0,x)); return CMath.s_double(text.substring(0,x)); } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return defaultValue; } public static int getParmInt(String text, String key, int defaultValue) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='=')&&(!Character.isDigit(text.charAt(x)))) { if((text.charAt(x)=='+')||(text.charAt(x)=='-')) return defaultValue; x++; } if((x<text.length())&&(text.charAt(x)=='=')) { while((x<text.length())&&(!Character.isDigit(text.charAt(x)))) x++; if(x<text.length()) { text=text.substring(x); x=0; while((x<text.length())&&(Character.isDigit(text.charAt(x)))) x++; return CMath.s_int(text.substring(0,x)); } } x=-1; } else x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return defaultValue; } public static boolean getParmBool(String text, String key, boolean defaultValue) { int x=text.toUpperCase().indexOf(key.toUpperCase()); while(x>=0) { if((x==0)||(!Character.isLetter(text.charAt(x-1)))) { while((x<text.length())&&(text.charAt(x)!='=')) x++; if((x<text.length())&&(text.charAt(x)=='=')) { String s=text.substring(x+1).trim(); if(Character.toUpperCase(s.charAt(0))=='T') return true; if(Character.toUpperCase(s.charAt(0))=='T') return false; } } x=text.toUpperCase().indexOf(key.toUpperCase(),x+1); } return defaultValue; } public static String[] toStringArray(Vector V) { if((V==null)||(V.size()==0)){ String[] s=new String[0]; return s; } String[] s=new String[V.size()]; for(int v=0;v<V.size();v++) s[v]=V.elementAt(v).toString(); return s; } public static String[] toStringArray(Object[] O) { if(O==null) return new String[0]; String[] s=new String[O.length]; for(int o=0;o<O.length;o++) s[o]=(O[o]!=null)?O[o].toString():""; return s; } public static long[] toLongArray(Vector V) { if((V==null)||(V.size()==0)){ long[] s=new long[0]; return s; } long[] s=new long[V.size()]; for(int v=0;v<V.size();v++) s[v]=CMath.s_long(V.elementAt(v).toString()); return s; } public static int[] toIntArray(Vector V) { if((V==null)||(V.size()==0)){ int[] s=new int[0]; return s; } int[] s=new int[V.size()]; for(int v=0;v<V.size();v++) s[v]=CMath.s_int(V.elementAt(v).toString()); return s; } public static String toSemicolonList(byte[] bytes) { StringBuffer str=new StringBuffer(""); for(int b=0;b<bytes.length;b++) str.append(Byte.toString(bytes[b])+(b<(bytes.length-1)?";":"")); return str.toString(); } public static String toSemicolonList(String[] bytes) { StringBuffer str=new StringBuffer(""); for(int b=0;b<bytes.length;b++) str.append(bytes[b]+(b<(bytes.length-1)?";":"")); return str.toString(); } public static String toSemicolonList(Object[] bytes) { StringBuffer str=new StringBuffer(""); for(int b=0;b<bytes.length;b++) str.append(bytes[b]+(b<(bytes.length-1)?";":"")); return str.toString(); } public static String toSemicolonList(Enumeration bytes) { StringBuffer str=new StringBuffer(""); Object o; for(;bytes.hasMoreElements();) { o=(Object)bytes.nextElement(); str.append(o.toString()+(bytes.hasMoreElements()?";":"")); } return str.toString(); } public static String toSemicolonList(Vector bytes) { StringBuffer str=new StringBuffer(""); for(int b=0;b<bytes.size();b++) str.append(bytes.elementAt(b)+(b<(bytes.size()-1)?";":"")); return str.toString(); } public static String toSafeSemicolonList(Vector list) { return toSafeSemicolonList(list.toArray()); } public static String toSafeSemicolonList(Object[] list) { StringBuffer buf1=new StringBuffer(""); StringBuffer s=null; for(int l=0;l<list.length;l++) { s=new StringBuffer(list[l].toString()); for(int i=0;i<s.length();i++) switch(s.charAt(i)) { case '\\': case ';': s.insert(i,'\\'); i++; break; } buf1.append(s.toString()); if(l<list.length-1) buf1.append(';'); } return buf1.toString(); } public static Vector parseSafeSemicolonList(String list, boolean ignoreNulls) { if(list==null) return new Vector(); StringBuffer buf1=new StringBuffer(list); int lastDex=0; Vector V=new Vector(); for(int l=0;l<buf1.length();l++) switch(buf1.charAt(l)) { case '\\': buf1.delete(l,l+1); break; case ';': if((!ignoreNulls)||(lastDex<l)) V.addElement(buf1.substring(lastDex,l)); lastDex=l+1; break; } if((!ignoreNulls)||(lastDex<buf1.length())); V.addElement(buf1.substring(lastDex,buf1.length())); return V; } public static byte[] fromByteList(String str) { Vector V=CMParms.parseSemicolons(str,true); if(V.size()>0) { byte[] bytes=new byte[V.size()]; for(int b=0;b<V.size();b++) bytes[b]=Byte.parseByte((String)V.elementAt(b)); return bytes; } return new byte[0]; } public static String[] toStringArray(HashSet V) { if((V==null)||(V.size()==0)){ String[] s=new String[0]; return s; } String[] s=new String[V.size()]; int v=0; for(Iterator i=V.iterator();i.hasNext();) s[v++]=(i.next()).toString(); return s; } public static String toStringList(String[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(Object[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(Enumeration e) { if(!e.hasMoreElements()) return ""; StringBuffer s=new StringBuffer(""); Object o=null; for(;e.hasMoreElements();) { o=e.nextElement(); s.append(", "+o); } if(s.length()==0) return ""; return s.toString().substring(2); } public static String toEnvironmentalStringList(Enumeration e) { if(!e.hasMoreElements()) return ""; StringBuffer s=new StringBuffer(""); Environmental o=null; for(;e.hasMoreElements();) { o=(Environmental)e.nextElement(); s.append(", "+o.name()); } if(s.length()==0) return ""; return s.toString().substring(2); } public static String toCMObjectStringList(Enumeration e) { if(!e.hasMoreElements()) return ""; StringBuffer s=new StringBuffer(""); CMObject o=null; for(;e.hasMoreElements();) { o=(CMObject)e.nextElement(); s.append(", "+o.ID()); } if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(long[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(boolean[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(byte[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+((int)V[v])); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(char[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+((long)V[v])); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(int[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(double[] V) { if((V==null)||(V.length==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.length;v++) s.append(", "+V[v]); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(Vector V) { if((V==null)||(V.size()==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(int v=0;v<V.size();v++) s.append(", "+V.elementAt(v).toString()); if(s.length()==0) return ""; return s.toString().substring(2); } public static String toStringList(HashSet V) { if((V==null)||(V.size()==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(Iterator i=V.iterator();i.hasNext();) s.append(", "+i.next().toString()); if(s.length()==0) return ""; return s.toString().substring(2); } public static boolean equalVectors(Vector V1, Vector V2) { if((V1==null)&&(V2==null)) return true; if((V1==null)||(V2==null)) return false; if(V1.size()!=V2.size()) return false; for(int v=0;v<V1.size();v++) if(!V1.elementAt(v).equals(V2.elementAt(v))) return false; return true; } public static Hashtable makeHashtable(String[][] O) { Hashtable H =new Hashtable(O!=null?O.length:0); if(O!=null) for(int o=0;o<O.length;o++) H.put(O[o][0].toUpperCase().trim(),O[o][1]); return H; } public static Hashtable makeHashtable(Object[][] O) { Hashtable H =new Hashtable(O!=null?O.length:0); if(O!=null) for(int o=0;o<O.length;o++) H.put(O[o][0],O[o][1]); return H; } public static Vector makeVector(Object[] O) { Vector V=new Vector(O!=null?O.length:0); if(O!=null) for(int s=0;s<O.length;s++) V.addElement(O[s]); return V; } public static Vector makeVector(Enumeration e) { Vector V=new Vector(); if(e!=null) for(;e.hasMoreElements();) V.addElement(e.nextElement()); return V; } public static Vector makeVector(String[] O) { Vector V=new Vector(O!=null?O.length:0); if(O!=null) for(int s=0;s<O.length;s++) V.addElement(O[s]); return V; } public static HashSet makeHashSet(Object[] O) { HashSet V=new HashSet(); if(O!=null) for(int s=0;s<O.length;s++) V.add(O[s]); return V; } public static HashSet makeHashSet(String[] O) { HashSet V=new HashSet(); if(O!=null) for(int s=0;s<O.length;s++) V.add(O[s]); return V; } public static HashSet makeHashSet(Vector O) { HashSet V=new HashSet(); if(O!=null) for(int s=0;s<O.size();s++) V.add(O.elementAt(s)); return V; } public static HashSet makeHashSet(Enumeration E) { HashSet V=new HashSet(); if(E!=null) for(;E.hasMoreElements();) V.add(E.nextElement()); return V; } public static Vector makeVector() { return new Vector();} public static Vector makeVector(Object O) { Vector V=new Vector(); V.addElement(O); return V;} public static Vector makeVector(Object O, Object O2) { Vector V=new Vector(); V.addElement(O); V.addElement(O2); return V;} public static Vector makeVector(Object O, Object O2, Object O3) { Vector V=new Vector(); V.addElement(O); V.addElement(O2); V.addElement(O3); return V;} public static Vector makeVector(Object O, Object O2, Object O3, Object O4) { Vector V=new Vector(); V.addElement(O); V.addElement(O2); V.addElement(O3); V.addElement(O4); return V;} public static HashSet makeHashSet(){return new HashSet();} public static HashSet makeHashSet(Object O) {HashSet H=new HashSet(); H.add(O); return H;} public static HashSet makeHashSet(Object O, Object O2) {HashSet H=new HashSet(); H.add(O); H.add(O2); return H;} public static HashSet makeHashSet(Object O, Object O2, Object O3) {HashSet H=new HashSet(); H.add(O); H.add(O2); H.add(O3); return H;} public static HashSet makeHashSet(Object O, Object O2, Object O3, Object O4) {HashSet H=new HashSet(); H.add(O); H.add(O2); H.add(O3); H.add(O4); return H;} public static String[] toStringArray(Hashtable V) { if((V==null)||(V.size()==0)){ String[] s=new String[0]; return s; } String[] s=new String[V.size()]; int v=0; for(Enumeration e=V.keys();e.hasMoreElements();) { String KEY=(String)e.nextElement(); s[v]=(String)V.get(KEY); v++; } return s; } public static String[] appendToArray(String[] front, String[] back) { if(back==null) return front; if(front==null) return back; if(back.length==0) return front; String[] newa = Arrays.copyOf(front, front.length + back.length); for(int i=0;i<back.length;i++) newa[newa.length-1-i]=back[back.length-1-i]; return newa; } public static void addToVector(Vector from, Vector to) { if(from!=null) for(int i=0;i<from.size();i++) to.addElement(from.elementAt(i)); } public static void delFromVector(Vector del, Vector from) { if(del!=null) for(int i=0;i<del.size();i++) from.removeElement(del.elementAt(i)); } public static boolean vectorOfStringContainsIgnoreCase(Vector V, String s) { for(int v=0;v<V.size();v++) if(s.equalsIgnoreCase((String)V.elementAt(v))) return true; return false; } public static String toStringList(Hashtable V) { if((V==null)||(V.size()==0)){ return ""; } StringBuffer s=new StringBuffer(""); for(Enumeration e=V.keys();e.hasMoreElements();) { String KEY=(String)e.nextElement(); s.append(KEY+"="+(V.get(KEY).toString())+"/"); } return s.toString(); } public static Vector copyVector(Vector V) { Vector V2=new Vector(); for(int v=0;v<V.size();v++) { Object h=V.elementAt(v); if(h instanceof Vector) V2.addElement(copyVector((Vector)h)); else V2.addElement(h); } return V2; } public static int indexOf(String[] supported, String expertise) { if(supported==null) return -1; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].equals(expertise)) return i; return -1; } public static int indexOfIgnoreCase(Enumeration supported, String key) { if(supported==null) return -1; int index = -1; for(;supported.hasMoreElements();) { if(supported.nextElement().toString().equalsIgnoreCase(key)) return index; index++; } return -1; } public static int indexOf(int[] supported, int x) { if(supported==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i]==x) return i; return -1; } public static int indexOf(long[] supported, long x) { if(supported==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i]==x) return i; return -1; } public static int indexOf(Enumeration supported, Object key) { if(supported==null) return -1; int index = -1; for(;supported.hasMoreElements();) { if(supported.nextElement().equals(key)) return index; index++; } return -1; } public static int indexOfIgnoreCase(Iterator supported, String key) { if(supported==null) return -1; int index = -1; for(;supported.hasNext();) { if(supported.next().toString().equalsIgnoreCase(key)) return index; index++; } return -1; } public static int indexOf(Iterator supported, Object key) { if(supported==null) return -1; int index = -1; for(;supported.hasNext();) { if(supported.next().equals(key)) return index; index++; } return -1; } public static int indexOfIgnoreCase(String[] supported, String expertise) { if(supported==null) return -1; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].equalsIgnoreCase(expertise)) return i; return -1; } public static boolean contains(String[] supported, String expertise) { return indexOf(supported,expertise)>=0;} public static boolean containsIgnoreCase(String[] supported, String expertise) { return indexOfIgnoreCase(supported,expertise)>=0;} public static int indexOf(Object[] supported, Object expertise) { if(supported==null) return -1; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].equals(expertise)) return i; return -1; } public static boolean contains(Object[] supported, Object expertise) { return indexOf(supported,expertise)>=0;} public static boolean contains(int[] supported, int x) { return indexOf(supported,x)>=0;} public static int startsWith(String[] supported, String expertise) { if(supported==null) return 0; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].startsWith(expertise)) return i; return -1; } public static int startsWithIgnoreCase(String[] supported, String expertise) { if(supported==null) return 0; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].toUpperCase().startsWith(expertise.toUpperCase())) return i; return -1; } public static boolean startsAnyWith(String[] supported, String expertise) { return startsWith(supported,expertise)>=0; } public static boolean startsAnyWithIgnoreCase(String[] supported, String expertise) { return startsWithIgnoreCase(supported,expertise)>=0; } public static int endsWith(String[] supported, String expertise) { if(supported==null) return 0; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].endsWith(expertise)) return i; return -1; } public static int endsWithIgnoreCase(String[] supported, String expertise) { if(supported==null) return 0; if(expertise==null) return -1; for(int i=0;i<supported.length;i++) if(supported[i].toUpperCase().endsWith(expertise.toUpperCase())) return i; return -1; } public static boolean endsAnyWith(String[] supported, String expertise) { return endsWith(supported,expertise)>=0; } public static boolean endsAnyWithIgnoreCase(String[] supported, String expertise) { return endsWithIgnoreCase(supported,expertise)>=0; } public static Vector denumerate(Enumeration e) { Vector V=new Vector(); for(;e.hasMoreElements();) V.addElement(e.nextElement()); return V; } /** constant value representing an undefined/unimplemented miscText/parms format.*/ public static final String FORMAT_UNDEFINED="{UNDEFINED}"; /** constant value representing an always empty miscText/parms format.*/ public static final String FORMAT_EMPTY="{EMPTY}"; }
/* * ConnectBot: simple, powerful, open-source SSH client for Android * Copyright 2007 Kenny Root, Jeffrey Sharkey * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sk.vx.connectbot.bean; import de.mud.terminal.VDUBuffer; /** * @author Kenny Root * Keep track of a selection area for the terminal copying mechanism. * If the orientation is flipped one way, swap the bottom and top or * left and right to keep it in the correct orientation. */ public class SelectionArea { private int top; private int bottom; private int left; private int right; private int maxColumns; private int maxRows; private boolean selectingOrigin; public SelectionArea() { reset(); } public final void reset() { top = left = bottom = right = 0; selectingOrigin = true; } /** * @param columns * @param rows */ public void setBounds(int columns, int rows) { maxColumns = columns - 1; maxRows = rows - 1; } private int checkBounds(int value, int max) { if (value < 0) return 0; else if (value > max) return max; else return value; } public boolean isSelectingOrigin() { return selectingOrigin; } public void finishSelectingOrigin() { selectingOrigin = false; } public void decrementRow() { if (selectingOrigin) setTop(top - 1); else setBottom(bottom - 1); } public void incrementRow() { if (selectingOrigin) setTop(top + 1); else setBottom(bottom + 1); } public void setRow(int row) { if (selectingOrigin) setTop(row); else setBottom(row); } private void setTop(int top) { this.top = bottom = checkBounds(top, maxRows); } public int getTop() { return Math.min(top, bottom); } private void setBottom(int bottom) { this.bottom = checkBounds(bottom, maxRows); } public int getBottom() { return Math.max(top, bottom); } public void decrementColumn() { if (selectingOrigin) setLeft(left - 1); else setRight(right - 1); } public void incrementColumn() { if (selectingOrigin) setLeft(left + 1); else setRight(right + 1); } public void setColumn(int column) { if (selectingOrigin) setLeft(column); else setRight(column); } private void setLeft(int left) { this.left = right = checkBounds(left, maxColumns); } public int getLeft() { return Math.min(left, right); } private void setRight(int right) { this.right = checkBounds(right, maxColumns); } public int getRight() { return Math.max(left, right); } public String copyFrom(VDUBuffer vb) { int size = (getRight() - getLeft() + 1) * (getBottom() - getTop() + 1); StringBuffer buffer = new StringBuffer(size); for(int y = getTop(); y <= getBottom(); y++) { int lastNonSpace = buffer.length(); for (int x = getLeft(); x <= getRight(); x++) { // only copy printable chars char c = vb.getChar(x, y); if (!Character.isDefined(c) || (Character.isISOControl(c) && c != '\t')) c = ' '; if (c != ' ') lastNonSpace = buffer.length(); buffer.append(c); } // Don't leave a bunch of spaces in our copy buffer. if (buffer.length() > lastNonSpace) buffer.delete(lastNonSpace + 1, buffer.length()); if (y != bottom) buffer.append("\n"); } return buffer.toString(); } @Override public String toString() { StringBuilder buffer = new StringBuilder(); buffer.append("SelectionArea[top="); buffer.append(top); buffer.append(", bottom="); buffer.append(bottom); buffer.append(", left="); buffer.append(left); buffer.append(", right="); buffer.append(right); buffer.append(", maxColumns="); buffer.append(maxColumns); buffer.append(", maxRows="); buffer.append(maxRows); buffer.append(", isSelectingOrigin="); buffer.append(isSelectingOrigin()); buffer.append("]"); return buffer.toString(); } }
/** */ package gluemodel.substationStandard.LNNodes.LNGroupZ.util; import gluemodel.substationStandard.LNNodes.DomainLNs.DomainLN; import gluemodel.substationStandard.LNNodes.LNGroupZ.*; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.util.Switch; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see gluemodel.substationStandard.LNNodes.LNGroupZ.LNGroupZPackage * @generated */ public class LNGroupZSwitch<T> extends Switch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static LNGroupZPackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public LNGroupZSwitch() { if (modelPackage == null) { modelPackage = LNGroupZPackage.eINSTANCE; } } /** * Checks whether this is a switch for the given package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param ePackage the package in question. * @return whether this is a switch for the given package. * @generated */ @Override protected boolean isSwitchFor(EPackage ePackage) { return ePackage == modelPackage; } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ @Override protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case LNGroupZPackage.GROUP_Z: { GroupZ groupZ = (GroupZ)theEObject; T result = caseGroupZ(groupZ); if (result == null) result = caseDomainLN(groupZ); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZAXN: { ZAXN zaxn = (ZAXN)theEObject; T result = caseZAXN(zaxn); if (result == null) result = caseGroupZ(zaxn); if (result == null) result = caseDomainLN(zaxn); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZGIL: { ZGIL zgil = (ZGIL)theEObject; T result = caseZGIL(zgil); if (result == null) result = caseGroupZ(zgil); if (result == null) result = caseDomainLN(zgil); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZBAT: { ZBAT zbat = (ZBAT)theEObject; T result = caseZBAT(zbat); if (result == null) result = caseGroupZ(zbat); if (result == null) result = caseDomainLN(zbat); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZLIN: { ZLIN zlin = (ZLIN)theEObject; T result = caseZLIN(zlin); if (result == null) result = caseGroupZ(zlin); if (result == null) result = caseDomainLN(zlin); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZMOT: { ZMOT zmot = (ZMOT)theEObject; T result = caseZMOT(zmot); if (result == null) result = caseGroupZ(zmot); if (result == null) result = caseDomainLN(zmot); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZREA: { ZREA zrea = (ZREA)theEObject; T result = caseZREA(zrea); if (result == null) result = caseGroupZ(zrea); if (result == null) result = caseDomainLN(zrea); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZBSH: { ZBSH zbsh = (ZBSH)theEObject; T result = caseZBSH(zbsh); if (result == null) result = caseGroupZ(zbsh); if (result == null) result = caseDomainLN(zbsh); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZRRC: { ZRRC zrrc = (ZRRC)theEObject; T result = caseZRRC(zrrc); if (result == null) result = caseGroupZ(zrrc); if (result == null) result = caseDomainLN(zrrc); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZSAR: { ZSAR zsar = (ZSAR)theEObject; T result = caseZSAR(zsar); if (result == null) result = caseGroupZ(zsar); if (result == null) result = caseDomainLN(zsar); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZTCF: { ZTCF ztcf = (ZTCF)theEObject; T result = caseZTCF(ztcf); if (result == null) result = caseGroupZ(ztcf); if (result == null) result = caseDomainLN(ztcf); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZTCR: { ZTCR ztcr = (ZTCR)theEObject; T result = caseZTCR(ztcr); if (result == null) result = caseGroupZ(ztcr); if (result == null) result = caseDomainLN(ztcr); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZCAB: { ZCAB zcab = (ZCAB)theEObject; T result = caseZCAB(zcab); if (result == null) result = caseGroupZ(zcab); if (result == null) result = caseDomainLN(zcab); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZCAP: { ZCAP zcap = (ZCAP)theEObject; T result = caseZCAP(zcap); if (result == null) result = caseGroupZ(zcap); if (result == null) result = caseDomainLN(zcap); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZCON: { ZCON zcon = (ZCON)theEObject; T result = caseZCON(zcon); if (result == null) result = caseGroupZ(zcon); if (result == null) result = caseDomainLN(zcon); if (result == null) result = defaultCase(theEObject); return result; } case LNGroupZPackage.ZGEN: { ZGEN zgen = (ZGEN)theEObject; T result = caseZGEN(zgen); if (result == null) result = caseGroupZ(zgen); if (result == null) result = caseDomainLN(zgen); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>Group Z</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Group Z</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseGroupZ(GroupZ object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZAXN</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZAXN</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZAXN(ZAXN object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZGIL</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZGIL</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZGIL(ZGIL object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZBAT</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZBAT</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZBAT(ZBAT object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZLIN</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZLIN</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZLIN(ZLIN object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZMOT</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZMOT</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZMOT(ZMOT object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZREA</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZREA</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZREA(ZREA object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZBSH</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZBSH</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZBSH(ZBSH object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZRRC</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZRRC</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZRRC(ZRRC object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZSAR</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZSAR</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZSAR(ZSAR object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZTCF</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZTCF</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZTCF(ZTCF object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZTCR</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZTCR</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZTCR(ZTCR object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZCAB</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZCAB</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZCAB(ZCAB object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZCAP</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZCAP</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZCAP(ZCAP object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZCON</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZCON</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZCON(ZCON object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ZGEN</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ZGEN</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseZGEN(ZGEN object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Domain LN</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Domain LN</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseDomainLN(DomainLN object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ @Override public T defaultCase(EObject object) { return null; } } //LNGroupZSwitch
/** * This class is generated by jOOQ */ package edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.records; import edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.Rating; import java.sql.Timestamp; import javax.annotation.Generated; import org.jooq.Field; import org.jooq.Record1; import org.jooq.Record9; import org.jooq.Row9; import org.jooq.impl.UpdatableRecordImpl; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.7.3" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class RatingRecord extends UpdatableRecordImpl<RatingRecord> implements Record9<Integer, Integer, Integer, Timestamp, Integer, Integer, String, Integer, Integer> { private static final long serialVersionUID = 1645104049; /** * Setter for <code>crowdcontrol.Rating.id_rating</code>. */ public void setIdRating(Integer value) { setValue(0, value); } /** * Getter for <code>crowdcontrol.Rating.id_rating</code>. */ public Integer getIdRating() { return (Integer) getValue(0); } /** * Setter for <code>crowdcontrol.Rating.experiment</code>. */ public void setExperiment(Integer value) { setValue(1, value); } /** * Getter for <code>crowdcontrol.Rating.experiment</code>. */ public Integer getExperiment() { return (Integer) getValue(1); } /** * Setter for <code>crowdcontrol.Rating.answer_r</code>. */ public void setAnswerR(Integer value) { setValue(2, value); } /** * Getter for <code>crowdcontrol.Rating.answer_r</code>. */ public Integer getAnswerR() { return (Integer) getValue(2); } /** * Setter for <code>crowdcontrol.Rating.timestamp</code>. */ public void setTimestamp(Timestamp value) { setValue(3, value); } /** * Getter for <code>crowdcontrol.Rating.timestamp</code>. */ public Timestamp getTimestamp() { return (Timestamp) getValue(3); } /** * Setter for <code>crowdcontrol.Rating.rating</code>. */ public void setRating(Integer value) { setValue(4, value); } /** * Getter for <code>crowdcontrol.Rating.rating</code>. */ public Integer getRating() { return (Integer) getValue(4); } /** * Setter for <code>crowdcontrol.Rating.reservation</code>. */ public void setReservation(Integer value) { setValue(5, value); } /** * Getter for <code>crowdcontrol.Rating.reservation</code>. */ public Integer getReservation() { return (Integer) getValue(5); } /** * Setter for <code>crowdcontrol.Rating.feedback</code>. */ public void setFeedback(String value) { setValue(6, value); } /** * Getter for <code>crowdcontrol.Rating.feedback</code>. */ public String getFeedback() { return (String) getValue(6); } /** * Setter for <code>crowdcontrol.Rating.worker_id</code>. */ public void setWorkerId(Integer value) { setValue(7, value); } /** * Getter for <code>crowdcontrol.Rating.worker_id</code>. */ public Integer getWorkerId() { return (Integer) getValue(7); } /** * Setter for <code>crowdcontrol.Rating.quality</code>. */ public void setQuality(Integer value) { setValue(8, value); } /** * Getter for <code>crowdcontrol.Rating.quality</code>. */ public Integer getQuality() { return (Integer) getValue(8); } // ------------------------------------------------------------------------- // Primary key information // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Record1<Integer> key() { return (Record1) super.key(); } // ------------------------------------------------------------------------- // Record9 type implementation // ------------------------------------------------------------------------- /** * {@inheritDoc} */ @Override public Row9<Integer, Integer, Integer, Timestamp, Integer, Integer, String, Integer, Integer> fieldsRow() { return (Row9) super.fieldsRow(); } /** * {@inheritDoc} */ @Override public Row9<Integer, Integer, Integer, Timestamp, Integer, Integer, String, Integer, Integer> valuesRow() { return (Row9) super.valuesRow(); } /** * {@inheritDoc} */ @Override public Field<Integer> field1() { return Rating.RATING.ID_RATING; } /** * {@inheritDoc} */ @Override public Field<Integer> field2() { return Rating.RATING.EXPERIMENT; } /** * {@inheritDoc} */ @Override public Field<Integer> field3() { return Rating.RATING.ANSWER_R; } /** * {@inheritDoc} */ @Override public Field<Timestamp> field4() { return Rating.RATING.TIMESTAMP; } /** * {@inheritDoc} */ @Override public Field<Integer> field5() { return Rating.RATING.RATING_; } /** * {@inheritDoc} */ @Override public Field<Integer> field6() { return Rating.RATING.RESERVATION; } /** * {@inheritDoc} */ @Override public Field<String> field7() { return Rating.RATING.FEEDBACK; } /** * {@inheritDoc} */ @Override public Field<Integer> field8() { return Rating.RATING.WORKER_ID; } /** * {@inheritDoc} */ @Override public Field<Integer> field9() { return Rating.RATING.QUALITY; } /** * {@inheritDoc} */ @Override public Integer value1() { return getIdRating(); } /** * {@inheritDoc} */ @Override public Integer value2() { return getExperiment(); } /** * {@inheritDoc} */ @Override public Integer value3() { return getAnswerR(); } /** * {@inheritDoc} */ @Override public Timestamp value4() { return getTimestamp(); } /** * {@inheritDoc} */ @Override public Integer value5() { return getRating(); } /** * {@inheritDoc} */ @Override public Integer value6() { return getReservation(); } /** * {@inheritDoc} */ @Override public String value7() { return getFeedback(); } /** * {@inheritDoc} */ @Override public Integer value8() { return getWorkerId(); } /** * {@inheritDoc} */ @Override public Integer value9() { return getQuality(); } /** * {@inheritDoc} */ @Override public RatingRecord value1(Integer value) { setIdRating(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value2(Integer value) { setExperiment(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value3(Integer value) { setAnswerR(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value4(Timestamp value) { setTimestamp(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value5(Integer value) { setRating(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value6(Integer value) { setReservation(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value7(String value) { setFeedback(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value8(Integer value) { setWorkerId(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord value9(Integer value) { setQuality(value); return this; } /** * {@inheritDoc} */ @Override public RatingRecord values(Integer value1, Integer value2, Integer value3, Timestamp value4, Integer value5, Integer value6, String value7, Integer value8, Integer value9) { value1(value1); value2(value2); value3(value3); value4(value4); value5(value5); value6(value6); value7(value7); value8(value8); value9(value9); return this; } // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- /** * Create a detached RatingRecord */ public RatingRecord() { super(Rating.RATING); } /** * Create a detached, initialised RatingRecord */ public RatingRecord(Integer idRating, Integer experiment, Integer answerR, Timestamp timestamp, Integer rating, Integer reservation, String feedback, Integer workerId, Integer quality) { super(Rating.RATING); setValue(0, idRating); setValue(1, experiment); setValue(2, answerR); setValue(3, timestamp); setValue(4, rating); setValue(5, reservation); setValue(6, feedback); setValue(7, workerId); setValue(8, quality); } }
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import edu.uci.ics.hyracks.api.exceptions.HyracksDataException; import edu.uci.ics.hyracks.api.io.FileReference; import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex; import edu.uci.ics.hyracks.storage.am.common.api.IndexException; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences; import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeIndexFactory; import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider; public class LSMRTreeFileManager extends AbstractLSMIndexFileManager { private static final String RTREE_STRING = "r"; private static final String BTREE_STRING = "b"; private final TreeIndexFactory<? extends ITreeIndex> rtreeFactory; private final TreeIndexFactory<? extends ITreeIndex> btreeFactory; private static FilenameFilter btreeFilter = new FilenameFilter() { public boolean accept(File dir, String name) { return !name.startsWith(".") && name.endsWith(BTREE_STRING); } }; private static FilenameFilter rtreeFilter = new FilenameFilter() { public boolean accept(File dir, String name) { return !name.startsWith(".") && name.endsWith(RTREE_STRING); } }; public LSMRTreeFileManager(IFileMapProvider fileMapProvider, FileReference file, TreeIndexFactory<? extends ITreeIndex> rtreeFactory, TreeIndexFactory<? extends ITreeIndex> btreeFactory) { super(fileMapProvider, file, null); this.rtreeFactory = rtreeFactory; this.btreeFactory = btreeFactory; } @Override public LSMComponentFileReferences getRelFlushFileReference() { Date date = new Date(); String ts = formatter.format(date); String baseName = baseDir + ts + SPLIT_STRING + ts; // Begin timestamp and end timestamp are identical since it is a flush return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING), createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING)); } @Override public LSMComponentFileReferences getRelMergeFileReference(String firstFileName, String lastFileName) throws HyracksDataException { String[] firstTimestampRange = firstFileName.split(SPLIT_STRING); String[] lastTimestampRange = lastFileName.split(SPLIT_STRING); String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1]; // Get the range of timestamps by taking the earliest and the latest // timestamps return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING), createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING)); } @Override public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException { List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>(); ArrayList<ComparableFileName> allRTreeFiles = new ArrayList<ComparableFileName>(); ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>(); ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>(); // Create a transaction filter <- to hide transaction components-> FilenameFilter transactionFilter = getTransactionFileFilter(false); // Gather files. cleanupAndGetValidFilesInternal(getCompoundFilter(transactionFilter, btreeFilter), btreeFactory, allBTreeFiles); HashSet<String> btreeFilesSet = new HashSet<String>(); for (ComparableFileName cmpFileName : allBTreeFiles) { int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING); btreeFilesSet.add(cmpFileName.fileName.substring(0, index)); } validateFiles(btreeFilesSet, allRTreeFiles, getCompoundFilter(transactionFilter, rtreeFilter), rtreeFactory); validateFiles(btreeFilesSet, allBloomFilterFiles, getCompoundFilter(transactionFilter, bloomFilterFilter), null); // Sanity check. if (allRTreeFiles.size() != allBTreeFiles.size() || allBTreeFiles.size() != allBloomFilterFiles.size()) { throw new HyracksDataException( "Unequal number of valid RTree, BTree, and Bloom Filter files found. Aborting cleanup."); } // Trivial cases. if (allRTreeFiles.isEmpty() || allBTreeFiles.isEmpty() || allBloomFilterFiles.isEmpty()) { return validFiles; } if (allRTreeFiles.size() == 1 && allBTreeFiles.size() == 1 && allBloomFilterFiles.size() == 1) { validFiles.add(new LSMComponentFileReferences(allRTreeFiles.get(0).fileRef, allBTreeFiles.get(0).fileRef, allBloomFilterFiles.get(0).fileRef)); return validFiles; } // Sorts files names from earliest to latest timestamp. Collections.sort(allRTreeFiles); Collections.sort(allBTreeFiles); Collections.sort(allBloomFilterFiles); List<ComparableFileName> validComparableRTreeFiles = new ArrayList<ComparableFileName>(); ComparableFileName lastRTree = allRTreeFiles.get(0); validComparableRTreeFiles.add(lastRTree); List<ComparableFileName> validComparableBTreeFiles = new ArrayList<ComparableFileName>(); ComparableFileName lastBTree = allBTreeFiles.get(0); validComparableBTreeFiles.add(lastBTree); List<ComparableFileName> validComparableBloomFilterFiles = new ArrayList<ComparableFileName>(); ComparableFileName lastBloomFilter = allBloomFilterFiles.get(0); validComparableBloomFilterFiles.add(lastBloomFilter); for (int i = 1; i < allRTreeFiles.size(); i++) { ComparableFileName currentRTree = allRTreeFiles.get(i); ComparableFileName currentBTree = allBTreeFiles.get(i); ComparableFileName currentBloomFilter = allBloomFilterFiles.get(i); // Current start timestamp is greater than last stop timestamp. if (currentRTree.interval[0].compareTo(lastRTree.interval[1]) > 0 && currentBTree.interval[0].compareTo(lastBTree.interval[1]) > 0 && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[1]) > 0) { validComparableRTreeFiles.add(currentRTree); validComparableBTreeFiles.add(currentBTree); validComparableBloomFilterFiles.add(currentBloomFilter); lastRTree = currentRTree; lastBTree = currentBTree; lastBloomFilter = currentBloomFilter; } else if (currentRTree.interval[0].compareTo(lastRTree.interval[0]) >= 0 && currentRTree.interval[1].compareTo(lastRTree.interval[1]) <= 0 && currentBTree.interval[0].compareTo(lastBTree.interval[0]) >= 0 && currentBTree.interval[1].compareTo(lastBTree.interval[1]) <= 0 && currentBloomFilter.interval[0].compareTo(lastBloomFilter.interval[0]) >= 0 && currentBloomFilter.interval[1].compareTo(lastBloomFilter.interval[1]) <= 0) { // Invalid files are completely contained in last interval. File invalidRTreeFile = new File(currentRTree.fullPath); invalidRTreeFile.delete(); File invalidBTreeFile = new File(currentBTree.fullPath); invalidBTreeFile.delete(); File invalidBloomFilterFile = new File(currentBloomFilter.fullPath); invalidBloomFilterFile.delete(); } else { // This scenario should not be possible. throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals."); } } // Sort valid files in reverse lexicographical order, such that newer // files come first. Collections.sort(validComparableRTreeFiles, recencyCmp); Collections.sort(validComparableBTreeFiles, recencyCmp); Collections.sort(validComparableBloomFilterFiles, recencyCmp); Iterator<ComparableFileName> rtreeFileIter = validComparableRTreeFiles.iterator(); Iterator<ComparableFileName> btreeFileIter = validComparableBTreeFiles.iterator(); Iterator<ComparableFileName> bloomFilterFileIter = validComparableBloomFilterFiles.iterator(); while (rtreeFileIter.hasNext() && btreeFileIter.hasNext()) { ComparableFileName cmpRTreeFileName = rtreeFileIter.next(); ComparableFileName cmpBTreeFileName = btreeFileIter.next(); ComparableFileName cmpBloomFilterFileName = bloomFilterFileIter.next(); validFiles.add(new LSMComponentFileReferences(cmpRTreeFileName.fileRef, cmpBTreeFileName.fileRef, cmpBloomFilterFileName.fileRef)); } return validFiles; } @Override public LSMComponentFileReferences getNewTransactionFileReference() throws IOException { Date date = new Date(); String ts = formatter.format(date); // Create transaction lock file Files.createFile(Paths.get(baseDir + TRANSACTION_PREFIX + ts)); String baseName = baseDir + ts + SPLIT_STRING + ts; return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING), createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING)); } @Override public LSMComponentFileReferences getTransactionFileReferenceForCommit() throws HyracksDataException { FilenameFilter transactionFilter; File dir = new File(baseDir); String[] files = dir.list(transactionFileNameFilter); if (files.length == 0) return null; if (files.length != 1) { throw new HyracksDataException("More than one transaction lock found:" + files.length); } else { transactionFilter = getTransactionFileFilter(true); String txnFileName = dir.getPath() + File.separator + files[0]; // get the actual transaction files files = dir.list(transactionFilter); if (files.length < 3) { throw new HyracksDataException("LSM Rtree transaction has less than 3 files :" + files.length); } try { Files.delete(Paths.get(txnFileName)); } catch (IOException e) { throw new HyracksDataException("Failed to delete transaction lock :" + txnFileName); } } File rTreeFile = null; File bTreeFile = null; File bloomFilterFile = null; for (String fileName : files) { if (fileName.endsWith(BTREE_STRING)) { bTreeFile = new File(dir.getPath() + File.separator + fileName); } else if (fileName.endsWith(RTREE_STRING)) { rTreeFile = new File(dir.getPath() + File.separator + fileName); } else if (fileName.endsWith(BLOOM_FILTER_STRING)) { bloomFilterFile = new File(dir.getPath() + File.separator + fileName); } else { throw new HyracksDataException("unrecognized file found = " + fileName); } } FileReference rTreeFileRef = new FileReference(rTreeFile); FileReference bTreeFileRef = new FileReference(bTreeFile); FileReference bloomFilterFileRef = new FileReference(bloomFilterFile); return new LSMComponentFileReferences(rTreeFileRef, bTreeFileRef, bloomFilterFileRef); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.plugin.dot.preview; import com.intellij.codeHighlighting.BackgroundEditorHighlighter; import com.intellij.icons.AllIcons; import com.intellij.openapi.Disposable; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.fileChooser.FileChooserFactory; import com.intellij.openapi.fileChooser.FileSaverDescriptor; import com.intellij.openapi.fileChooser.FileSaverDialog; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorLocation; import com.intellij.openapi.fileEditor.FileEditorState; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.UserDataHolderBase; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileWrapper; import com.intellij.ui.JBColor; import com.intellij.ui.colorpicker.CommonButton; import com.intellij.ui.components.JBPanel; import com.intellij.util.Alarm; import guru.nidi.graphviz.engine.Format; import guru.nidi.graphviz.engine.Graphviz; import guru.nidi.graphviz.engine.GraphvizException; import guru.nidi.graphviz.model.MutableGraph; import guru.nidi.graphviz.parse.Parser; import guru.nidi.graphviz.parse.ParserException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.plugin.dot.DotIcons; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.image.BufferedImage; import java.beans.PropertyChangeListener; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import static com.intellij.openapi.util.text.StringUtil.trimStart; public class GraphPreviewFileEditor extends UserDataHolderBase implements FileEditor { private final static long PARSING_CALL_TIMEOUT_MS = 1000L; @NotNull private final Alarm myPooledAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, this); @NotNull private final ImagePanel myPanel; public GraphPreviewFileEditor(@NotNull VirtualFile file, Project project) { myPanel = new ImagePanel(project); Document myDocument = FileDocumentManager.getInstance().getDocument(file); if (myDocument != null) { myPanel.addImage(myDocument); myDocument.addDocumentListener(new DocumentListener() { @Override public void documentChanged(@NotNull final DocumentEvent e) { myPooledAlarm.cancelAllRequests(); myPooledAlarm.addRequest(() -> myPanel.paintGraph(myPanel.getGraphics()), PARSING_CALL_TIMEOUT_MS); } }, this); } myPanel.addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent componentEvent) { myPanel.bufferedImage = null; myPooledAlarm.cancelAllRequests(); myPooledAlarm.addRequest(() -> myPanel.paintGraph(myPanel.getGraphics()), PARSING_CALL_TIMEOUT_MS); } }); } @NotNull @Override public JComponent getComponent() { return myPanel; } @Nullable @Override public JComponent getPreferredFocusedComponent() { return myPanel; } @NotNull @Override public String getName() { return "Graph Preview"; } @Override public void setState(@NotNull FileEditorState state) { } @Override public boolean isModified() { return false; } @Override public boolean isValid() { return true; } @Override public void deselectNotify() { } @Override public void addPropertyChangeListener(@NotNull PropertyChangeListener listener) { } @Override public void removePropertyChangeListener(@NotNull PropertyChangeListener listener) { } @Nullable @Override public BackgroundEditorHighlighter getBackgroundHighlighter() { return null; } @Nullable @Override public FileEditorLocation getCurrentLocation() { return null; } @Override public void dispose() { myPanel.dispose(); } @Override public void selectNotify() { } public static class ImagePanel extends JBPanel implements Disposable { public BufferedImage bufferedImage; private Document document; final JLabel noPreviewReason; private MutableGraph current; public ImagePanel(Project project) { final GridBagLayout layout = new GridBagLayout(); setLayout(layout); final JToolBar toolBar = new JToolBar(); toolBar.setLayout(new FlowLayout(FlowLayout.LEFT)); toolBar.setMaximumSize(new Dimension(100, 50)); final JPanel actionsToolBar = new JPanel(); toolBar.add(actionsToolBar); final CommonButton copyToClipboard = new CommonButton(AllIcons.Actions.Copy) { @Override public boolean isEnabled() { return bufferedImage != null; } }; copyToClipboard.setDisabledIcon(IconLoader.getDisabledIcon(AllIcons.Actions.Copy)); copyToClipboard.addActionListener(it -> { if (bufferedImage != null) { CopyPasteManager.getInstance().setContents(new ImageTransferable(bufferedImage)); } }); copyToClipboard.setToolTipText("Copy graph preview image to clipboard"); actionsToolBar.add(copyToClipboard); final CommonButton saveAs = new CommonButton(DotIcons.SAVE) { @Override public boolean isEnabled() { return bufferedImage != null; } }; saveAs.setDisabledIcon(IconLoader.getDisabledIcon(DotIcons.SAVE)); saveAs.addActionListener(it -> { if (bufferedImage != null) { FileSaverDescriptor descriptor = new FileSaverDescriptor("Save Graph Preview Image", "", "png"); final FileSaverDialog saveFileDialog = FileChooserFactory.getInstance().createSaveFileDialog(descriptor, project); final VirtualFileWrapper save = saveFileDialog.save((VirtualFile) null, null); if (save != null) { try { ImageIO.write(bufferedImage, "png", save.getFile()); } catch (IOException e) { e.printStackTrace(); } } } }); saveAs.setToolTipText("Save graph preview image to PNG file"); actionsToolBar.add(saveAs); noPreviewReason = new JLabel(""); noPreviewReason.setHorizontalAlignment(SwingConstants.CENTER); GridBagConstraints c = new GridBagConstraints(); c.gridx = 0; c.gridy = 0; c.anchor = GridBagConstraints.NORTH; c.fill = GridBagConstraints.HORIZONTAL; c.weightx = 1; c.weighty = 1; add(toolBar, c); layout.setConstraints(toolBar, c); c.weightx = 2; c.weighty = 2; c.anchor = GridBagConstraints.LAST_LINE_END; c.fill = GridBagConstraints.HORIZONTAL; c.gridy = 2; add(noPreviewReason, c); noPreviewReason.setVisible(false); } public synchronized void addImage(@NotNull Document document) { this.document = document; bufferedImage = null; } private void paintGraph(Graphics g) { String text = document.getText(); if (text.startsWith("#!")) { text = trimStart(text, text.substring(0, text.indexOf("\n"))); } try (InputStream dot = new ByteArrayInputStream(text.getBytes(StandardCharsets.UTF_8))) { final MutableGraph mutableGraph = new Parser().read(dot); if (bufferedImage == null || !mutableGraph.equals(current)) { Graphviz graphviz = Graphviz.fromGraph(mutableGraph); bufferedImage = graphviz.width(this.getWidth() - 100).height(this.getHeight() - 100).render(Format.PNG).toImage(); current = mutableGraph; } noPreviewReason.setVisible(false); if (bufferedImage != null) { noPreviewReason.setVisible(false); g.setColor(JBColor.WHITE); g.fillRect(50, 75, this.getWidth() - 100, this.getHeight() - 100); g.drawImage(bufferedImage, 50, 75, this.getWidth() - 100, this.getHeight() - 100, this); } } catch (IOException | ParserException | GraphvizException | NoClassDefFoundError e) { noPreviewReason.setText("<html><font color='grey'>Problem: " + e.getMessage() + "</font></html>"); noPreviewReason.setVisible(true); bufferedImage = null; if (g != null) { g.setColor(JBColor.WHITE); g.fillRect(50, 75, this.getWidth() - 100, this.getHeight() - 100); } } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); paintGraph(g); } @Override public void dispose() { } } private static class ImageTransferable implements Transferable { private final BufferedImage myImage; public ImageTransferable(@NotNull BufferedImage image) { myImage = image; } @Override public DataFlavor[] getTransferDataFlavors() { return new DataFlavor[]{DataFlavor.imageFlavor}; } @Override public boolean isDataFlavorSupported(DataFlavor dataFlavor) { return DataFlavor.imageFlavor.equals(dataFlavor); } @Override public Object getTransferData(DataFlavor dataFlavor) throws UnsupportedFlavorException { if (!DataFlavor.imageFlavor.equals(dataFlavor)) { throw new UnsupportedFlavorException(dataFlavor); } return myImage; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process; import org.apache.tinkerpop.gremlin.AbstractGremlinSuite; import org.apache.tinkerpop.gremlin.process.traversal.CoreTraversalTest; import org.apache.tinkerpop.gremlin.process.traversal.TraversalEngine; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.BranchTest; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.ChooseTest; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.LocalTest; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.UnionTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.AndTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.CoinTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.CyclicPathTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.DedupTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.DropTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.FilterTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.HasTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.IsTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.OrTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.RangeTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.SampleTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.SimplePathTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.TailTest; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.WhereTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.AddEdgeTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.AddVertexTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.CoalesceTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.ConstantTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.FlatMapTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.FoldTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.LoopsTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MapKeysTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MapTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MapValuesTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MatchTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MaxTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MeanTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.MinTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.OrderTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.PathTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.PropertiesTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.SelectTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.SumTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.UnfoldTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.ValueMapTest; import org.apache.tinkerpop.gremlin.process.traversal.step.map.VertexTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.AggregateTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.ExplainTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.GroupCountTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.GroupTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.GroupTestV3d0; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.InjectTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.SackTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.SideEffectCapTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.SideEffectTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.StoreTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.SubgraphTest; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.TreeTest; import org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.ElementIdStrategyProcessTest; import org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.EventStrategyProcessTest; import org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.PartitionStrategyProcessTest; import org.apache.tinkerpop.gremlin.process.traversal.strategy.decoration.SubgraphStrategyProcessTest; import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategyProcessTest; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.StructureStandardSuite; import org.junit.runners.model.InitializationError; import org.junit.runners.model.RunnerBuilder; /** * The {@code ProcessStandardSuite} is a JUnit test runner that executes the Gremlin Test Suite over a * {@link Graph} implementation. This test suite covers traversal operations and should be implemented by vendors * to validate that their implementations are compliant with the Gremlin language. * <p/> * For more information on the usage of this suite, please see {@link StructureStandardSuite}. * * @author Stephen Mallette (http://stephen.genoprime.com) */ public class ProcessStandardSuite extends AbstractGremlinSuite { /** * This list of tests in the suite that will be executed as part of this suite. */ private static final Class<?>[] allTests = new Class<?>[]{ // branch BranchTest.Traversals.class, ChooseTest.Traversals.class, LocalTest.Traversals.class, RepeatTest.Traversals.class, UnionTest.Traversals.class, // filter AndTest.Traversals.class, CoinTest.Traversals.class, CyclicPathTest.Traversals.class, DedupTest.Traversals.class, DropTest.Traversals.class, FilterTest.Traversals.class, HasTest.Traversals.class, IsTest.Traversals.class, OrTest.Traversals.class, RangeTest.Traversals.class, SampleTest.Traversals.class, SimplePathTest.Traversals.class, TailTest.Traversals.class, WhereTest.Traversals.class, // map AddEdgeTest.Traversals.class, AddVertexTest.Traversals.class, CoalesceTest.Traversals.class, ConstantTest.Traversals.class, CountTest.Traversals.class, FlatMapTest.Traversals.class, FoldTest.Traversals.class, GraphTest.Traversals.class, LoopsTest.Traversals.class, MapTest.Traversals.class, MapKeysTest.Traversals.class, MapValuesTest.Traversals.class, MatchTest.CountMatchTraversals.class, MatchTest.GreedyMatchTraversals.class, MaxTest.Traversals.class, MeanTest.Traversals.class, MinTest.Traversals.class, SumTest.Traversals.class, OrderTest.Traversals.class, PathTest.Traversals.class, ProfileTest.Traversals.class, PropertiesTest.Traversals.class, SelectTest.Traversals.class, VertexTest.Traversals.class, UnfoldTest.Traversals.class, ValueMapTest.Traversals.class, // sideEffect AggregateTest.Traversals.class, ExplainTest.Traversals.class, GroupTest.Traversals.class, GroupTestV3d0.Traversals.class, GroupCountTest.Traversals.class, InjectTest.Traversals.class, SackTest.Traversals.class, SideEffectCapTest.Traversals.class, SideEffectTest.Traversals.class, StoreTest.Traversals.class, SubgraphTest.Traversals.class, TreeTest.Traversals.class, // compliance CoreTraversalTest.class, // decorations ElementIdStrategyProcessTest.class, EventStrategyProcessTest.class, ReadOnlyStrategyProcessTest.class, PartitionStrategyProcessTest.class, SubgraphStrategyProcessTest.class }; /** * A list of the minimum set of base tests that Gremlin flavors should implement to be compliant with Gremlin. */ private static final Class<?>[] testsToEnforce = new Class<?>[]{ // branch BranchTest.class, ChooseTest.class, LocalTest.class, RepeatTest.class, UnionTest.class, // filter AndTest.class, CoinTest.class, CyclicPathTest.class, DedupTest.class, DropTest.class, FilterTest.class, HasTest.class, IsTest.class, OrTest.class, RangeTest.class, SampleTest.class, SimplePathTest.class, TailTest.class, WhereTest.class, // map AddEdgeTest.class, AddVertexTest.class, CoalesceTest.class, ConstantTest.class, CountTest.class, FlatMapTest.class, FoldTest.class, LoopsTest.class, MapTest.class, MapKeysTest.class, MapValuesTest.class, MatchTest.class, MaxTest.class, MeanTest.class, MinTest.class, SumTest.class, OrderTest.class, PathTest.class, PropertiesTest.class, SelectTest.class, VertexTest.class, UnfoldTest.class, ValueMapTest.class, // sideEffect AggregateTest.class, GroupTest.class, GroupCountTest.class, InjectTest.class, SackTest.class, SideEffectCapTest.class, SideEffectTest.class, StoreTest.class, SubgraphTest.class, TreeTest.class, }; /** * This constructor is used by JUnit and will run this suite with its concrete implementations of the * {@code testsToEnforce}. */ public ProcessStandardSuite(final Class<?> klass, final RunnerBuilder builder) throws InitializationError { super(klass, builder, allTests, testsToEnforce, false, TraversalEngine.Type.STANDARD); } /** * This constructor is used by Gremlin flavor implementers who supply their own implementations of the * {@code testsToEnforce}. */ public ProcessStandardSuite(final Class<?> klass, final RunnerBuilder builder, final Class<?>[] testsToExecute) throws InitializationError { super(klass, builder, testsToExecute, testsToEnforce, true, TraversalEngine.Type.STANDARD); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.notebook; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.dep.DependencyResolver; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.interpreter.InterpreterFactory; import org.apache.zeppelin.interpreter.InterpreterOption; import org.apache.zeppelin.interpreter.InterpreterOutput; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.mock.MockInterpreter1; import org.apache.zeppelin.interpreter.mock.MockInterpreter2; import org.apache.zeppelin.notebook.repo.NotebookRepo; import org.apache.zeppelin.notebook.repo.VFSNotebookRepo; import org.apache.zeppelin.scheduler.Job; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.scheduler.JobListener; import org.apache.zeppelin.scheduler.SchedulerFactory; import org.apache.zeppelin.search.SearchService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sonatype.aether.RepositoryException; public class NotebookTest implements JobListenerFactory{ private static final Logger logger = LoggerFactory.getLogger(NotebookTest.class); private File tmpDir; private ZeppelinConfiguration conf; private SchedulerFactory schedulerFactory; private File notebookDir; private Notebook notebook; private NotebookRepo notebookRepo; private InterpreterFactory factory; private DependencyResolver depResolver; @Before public void setUp() throws Exception { tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis()); tmpDir.mkdirs(); new File(tmpDir, "conf").mkdirs(); notebookDir = new File(tmpDir + "/notebook"); notebookDir.mkdirs(); System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), tmpDir.getAbsolutePath()); System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); System.setProperty(ConfVars.ZEPPELIN_INTERPRETERS.getVarName(), "org.apache.zeppelin.interpreter.mock.MockInterpreter1,org.apache.zeppelin.interpreter.mock.MockInterpreter2"); conf = ZeppelinConfiguration.create(); this.schedulerFactory = new SchedulerFactory(); MockInterpreter1.register("mock1", "org.apache.zeppelin.interpreter.mock.MockInterpreter1"); MockInterpreter2.register("mock2", "org.apache.zeppelin.interpreter.mock.MockInterpreter2"); depResolver = new DependencyResolver(tmpDir.getAbsolutePath() + "/local-repo"); factory = new InterpreterFactory(conf, new InterpreterOption(false), null, null, depResolver); SearchService search = mock(SearchService.class); notebookRepo = new VFSNotebookRepo(conf); notebook = new Notebook(conf, notebookRepo, schedulerFactory, factory, this, search); } @After public void tearDown() throws Exception { delete(tmpDir); } @Test public void testSelectingReplImplementation() throws IOException { Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); // run with defatul repl Paragraph p1 = note.addParagraph(); Map config = p1.getConfig(); config.put("enabled", true); p1.setConfig(config); p1.setText("hello world"); note.run(p1.getId()); while(p1.isTerminated()==false || p1.getResult()==null) Thread.yield(); assertEquals("repl1: hello world", p1.getResult().message()); // run with specific repl Paragraph p2 = note.addParagraph(); p2.setConfig(config); p2.setText("%mock2 hello world"); note.run(p2.getId()); while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield(); assertEquals("repl2: hello world", p2.getResult().message()); } @Test public void testReloadAllNotes() throws IOException { File srcDir = new File("src/test/resources/2A94M5J1Z"); File destDir = new File(notebookDir.getAbsolutePath() + "/2A94M5J1Z"); // copy the notebook try { FileUtils.copyDirectory(srcDir, destDir); } catch (IOException e) { logger.error(e.toString(), e); } // doesn't have copied notebook in memory before reloading List<Note> notes = notebook.getAllNotes(); assertEquals(notes.size(), 0); // load copied notebook on memory when reloadAllNotes() is called Note copiedNote = notebookRepo.get("2A94M5J1Z"); notebook.reloadAllNotes(); notes = notebook.getAllNotes(); assertEquals(notes.size(), 1); assertEquals(notes.get(0).id(), copiedNote.id()); assertEquals(notes.get(0).getName(), copiedNote.getName()); assertEquals(notes.get(0).getParagraphs(), copiedNote.getParagraphs()); // delete the notebook FileUtils.deleteDirectory(destDir); // keep notebook in memory before reloading notes = notebook.getAllNotes(); assertEquals(notes.size(), 1); // delete notebook from notebook list when reloadAllNotes() is called notebook.reloadAllNotes(); notes = notebook.getAllNotes(); assertEquals(notes.size(), 0); } @Test public void testPersist() throws IOException, SchedulerException, RepositoryException { Note note = notebook.createNote(); // run with default repl Paragraph p1 = note.addParagraph(); Map config = p1.getConfig(); config.put("enabled", true); p1.setConfig(config); p1.setText("hello world"); note.persist(); Notebook notebook2 = new Notebook( conf, notebookRepo, schedulerFactory, new InterpreterFactory(conf, null, null, null, depResolver), this, null); assertEquals(1, notebook2.getAllNotes().size()); } @Test public void testClearParagraphOutput() throws IOException, SchedulerException{ Note note = notebook.createNote(); Paragraph p1 = note.addParagraph(); Map config = p1.getConfig(); config.put("enabled", true); p1.setConfig(config); p1.setText("hello world"); note.run(p1.getId()); while(p1.isTerminated()==false || p1.getResult()==null) Thread.yield(); assertEquals("repl1: hello world", p1.getResult().message()); // clear paragraph output/result note.clearParagraphOutput(p1.getId()); assertNull(p1.getResult()); } @Test public void testRunAll() throws IOException { Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); Paragraph p1 = note.addParagraph(); Map config = p1.getConfig(); config.put("enabled", true); p1.setConfig(config); p1.setText("p1"); Paragraph p2 = note.addParagraph(); Map config1 = p2.getConfig(); p2.setConfig(config1); p2.setText("p2"); assertEquals(null, p2.getResult()); note.runAll(); while(p2.isTerminated()==false || p2.getResult()==null) Thread.yield(); assertEquals("repl1: p2", p2.getResult().message()); } @Test public void testSchedule() throws InterruptedException, IOException{ // create a note and a paragraph Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); Paragraph p = note.addParagraph(); Map config = new HashMap<String, Object>(); p.setConfig(config); p.setText("p1"); Date dateFinished = p.getDateFinished(); assertNull(dateFinished); // set cron scheduler, once a second config = note.getConfig(); config.put("enabled", true); config.put("cron", "* * * * * ?"); note.setConfig(config); notebook.refreshCron(note.id()); Thread.sleep(1*1000); // remove cron scheduler. config.put("cron", null); note.setConfig(config); notebook.refreshCron(note.id()); Thread.sleep(1000); dateFinished = p.getDateFinished(); assertNotNull(dateFinished); Thread.sleep(1*1000); assertEquals(dateFinished, p.getDateFinished()); } @Test public void testAutoRestartInterpreterAfterSchedule() throws InterruptedException, IOException{ // create a note and a paragraph Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); Paragraph p = note.addParagraph(); Map config = new HashMap<String, Object>(); p.setConfig(config); p.setText("p1"); // set cron scheduler, once a second config = note.getConfig(); config.put("enabled", true); config.put("cron", "* * * * * ?"); config.put("releaseresource", "true"); note.setConfig(config); notebook.refreshCron(note.id()); while (p.getStatus() != Status.FINISHED) { Thread.sleep(100); } Date dateFinished = p.getDateFinished(); assertNotNull(dateFinished); // restart interpreter for (InterpreterSetting setting : note.getNoteReplLoader().getInterpreterSettings()) { notebook.getInterpreterFactory().restart(setting.id()); } Thread.sleep(1000); while (p.getStatus() != Status.FINISHED) { Thread.sleep(100); } assertNotEquals(dateFinished, p.getDateFinished()); // remove cron scheduler. config.put("cron", null); note.setConfig(config); notebook.refreshCron(note.id()); } @Test public void testCloneNote() throws IOException, CloneNotSupportedException, InterruptedException { Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); final Paragraph p = note.addParagraph(); p.setText("hello world"); note.runAll(); while(p.isTerminated()==false || p.getResult()==null) Thread.yield(); p.setStatus(Status.RUNNING); Note cloneNote = notebook.cloneNote(note.getId(), "clone note"); Paragraph cp = cloneNote.paragraphs.get(0); assertEquals(cp.getStatus(), Status.READY); assertNotEquals(cp.getId(), p.getId()); assertEquals(cp.text, p.text); assertEquals(cp.getResult().message(), p.getResult().message()); } @Test public void testAngularObjectRemovalOnNotebookRemove() throws InterruptedException, IOException { // create a note and a paragraph Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); AngularObjectRegistry registry = note.getNoteReplLoader() .getInterpreterSettings().get(0).getInterpreterGroup() .getAngularObjectRegistry(); Paragraph p1 = note.addParagraph(); // add paragraph scope object registry.add("o1", "object1", note.id(), p1.getId()); // add notebook scope object registry.add("o2", "object2", note.id(), null); // add global scope object registry.add("o3", "object3", null, null); // remove notebook notebook.removeNote(note.id()); // notebook scope or paragraph scope object should be removed assertNull(registry.get("o1", note.id(), null)); assertNull(registry.get("o2", note.id(), p1.getId())); // global object sould be remained assertNotNull(registry.get("o3", null, null)); } @Test public void testAngularObjectRemovalOnParagraphRemove() throws InterruptedException, IOException { // create a note and a paragraph Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); AngularObjectRegistry registry = note.getNoteReplLoader() .getInterpreterSettings().get(0).getInterpreterGroup() .getAngularObjectRegistry(); Paragraph p1 = note.addParagraph(); // add paragraph scope object registry.add("o1", "object1", note.id(), p1.getId()); // add notebook scope object registry.add("o2", "object2", note.id(), null); // add global scope object registry.add("o3", "object3", null, null); // remove notebook note.removeParagraph(p1.getId()); // paragraph scope should be removed assertNull(registry.get("o1", note.id(), null)); // notebook scope and global object sould be remained assertNotNull(registry.get("o2", note.id(), null)); assertNotNull(registry.get("o3", null, null)); } @Test public void testAngularObjectRemovalOnInterpreterRestart() throws InterruptedException, IOException { // create a note and a paragraph Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); AngularObjectRegistry registry = note.getNoteReplLoader() .getInterpreterSettings().get(0).getInterpreterGroup() .getAngularObjectRegistry(); // add local scope object registry.add("o1", "object1", note.id(), null); // add global scope object registry.add("o2", "object2", null, null); // restart interpreter factory.restart(note.getNoteReplLoader().getInterpreterSettings().get(0).id()); registry = note.getNoteReplLoader() .getInterpreterSettings().get(0).getInterpreterGroup() .getAngularObjectRegistry(); // local and global scope object should be removed assertNull(registry.get("o1", note.id(), null)); assertNull(registry.get("o2", null, null)); notebook.removeNote(note.id()); } @Test public void testAbortParagraphStatusOnInterpreterRestart() throws InterruptedException, IOException { Note note = notebook.createNote(); note.getNoteReplLoader().setInterpreters(factory.getDefaultInterpreterSettingList()); ArrayList<Paragraph> paragraphs = new ArrayList<>(); for (int i = 0; i < 100; i++) { Paragraph tmp = note.addParagraph(); tmp.setText("p" + tmp.getId()); paragraphs.add(tmp); } for (Paragraph p : paragraphs) { assertEquals(Job.Status.READY, p.getStatus()); } note.runAll(); while (paragraphs.get(0).getStatus() != Status.FINISHED) Thread.yield(); factory.restart(note.getNoteReplLoader().getInterpreterSettings().get(0).id()); boolean isAborted = false; for (Paragraph p : paragraphs) { logger.debug(p.getStatus().name()); if (isAborted) { assertEquals(Job.Status.ABORT, p.getStatus()); } if (p.getStatus() == Status.ABORT) { isAborted = true; } } assertTrue(isAborted); } private void delete(File file){ if(file.isFile()) file.delete(); else if(file.isDirectory()){ File [] files = file.listFiles(); if(files!=null && files.length>0){ for(File f : files){ delete(f); } } file.delete(); } } @Override public ParagraphJobListener getParagraphJobListener(Note note) { return new ParagraphJobListener(){ @Override public void onOutputAppend(Paragraph paragraph, InterpreterOutput out, String output) { } @Override public void onOutputUpdate(Paragraph paragraph, InterpreterOutput out, String output) { } @Override public void onProgressUpdate(Job job, int progress) { } @Override public void beforeStatusChange(Job job, Status before, Status after) { } @Override public void afterStatusChange(Job job, Status before, Status after) { } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.processing.datatypes; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.carbondata.core.devapi.DictionaryGenerationException; import org.apache.carbondata.core.keygenerator.KeyGenException; import org.apache.carbondata.core.keygenerator.KeyGenerator; import org.apache.carbondata.processing.newflow.complexobjects.StructObject; /** * Struct DataType stateless object used in data loading */ public class StructDataType implements GenericDataType<StructObject> { /** * children columns */ private List<GenericDataType> children = new ArrayList<GenericDataType>(); /** * name of the column */ private String name; /** * parent column name */ private String parentname; /** * column unique id */ private String columnId; /** * output array index */ private int outputArrayIndex; /** * data counter */ private int dataCounter; private StructDataType(List<GenericDataType> children, int outputArrayIndex, int dataCounter) { this.children = children; this.outputArrayIndex = outputArrayIndex; this.dataCounter = dataCounter; } /** * constructor * @param name * @param parentname * @param columnId */ public StructDataType(String name, String parentname, String columnId) { this.name = name; this.parentname = parentname; this.columnId = columnId; } /* * add child dimensions */ @Override public void addChildren(GenericDataType newChild) { if (this.getName().equals(newChild.getParentname())) { this.children.add(newChild); } else { for (GenericDataType child : this.children) { child.addChildren(newChild); } } } /* * get column name */ @Override public String getName() { return name; } /* * get parent column name */ @Override public String getParentname() { return parentname; } /* * get column unique id */ @Override public String getColumnId() { return columnId; } /* * get all primitive columns from complex column */ @Override public void getAllPrimitiveChildren(List<GenericDataType> primitiveChild) { for (int i = 0; i < children.size(); i++) { GenericDataType child = children.get(i); if (child instanceof PrimitiveDataType) { primitiveChild.add(child); } else { child.getAllPrimitiveChildren(primitiveChild); } } } /* * get surrogate index */ @Override public int getSurrogateIndex() { return 0; } /* * set surrogate index */ @Override public void setSurrogateIndex(int surrIndex) { } @Override public void writeByteArray(StructObject input, DataOutputStream dataOutputStream) throws IOException, DictionaryGenerationException { dataOutputStream.writeInt(children.size()); if (input == null) { dataOutputStream.writeInt(children.size()); for (int i = 0; i < children.size(); i++) { children.get(i).writeByteArray(null, dataOutputStream); } } else { Object[] data = input.getData(); for (int i = 0; i < data.length && i < children.size(); i++) { children.get(i).writeByteArray(data[i], dataOutputStream); } // For other children elements which dont have data, write empty for (int i = data.length; i < children.size(); i++) { children.get(i).writeByteArray(null, dataOutputStream); } } } @Override public void fillCardinality(List<Integer> dimCardWithComplex) { dimCardWithComplex.add(0); for (int i = 0; i < children.size(); i++) { children.get(i).fillCardinality(dimCardWithComplex); } } /* * parse bytearray and bit pack */ @Override public void parseAndBitPack(ByteBuffer byteArrayInput, DataOutputStream dataOutputStream, KeyGenerator[] generator) throws IOException, KeyGenException { int childElement = byteArrayInput.getInt(); dataOutputStream.writeInt(childElement); for (int i = 0; i < childElement; i++) { if (children.get(i) instanceof PrimitiveDataType) { dataOutputStream.writeInt(generator[children.get(i).getSurrogateIndex()] .getKeySizeInBytes()); } children.get(i).parseAndBitPack(byteArrayInput, dataOutputStream, generator); } } /* * return all columns count */ @Override public int getColsCount() { int colsCount = 1; for (int i = 0; i < children.size(); i++) { colsCount += children.get(i).getColsCount(); } return colsCount; } /* * set output array index */ @Override public void setOutputArrayIndex(int outputArrayIndex) { this.outputArrayIndex = outputArrayIndex++; for (int i = 0; i < children.size(); i++) { if (children.get(i) instanceof PrimitiveDataType) { children.get(i).setOutputArrayIndex(outputArrayIndex++); } else { children.get(i).setOutputArrayIndex(outputArrayIndex++); outputArrayIndex = getMaxOutputArrayIndex() + 1; } } } /* * get max array index */ @Override public int getMaxOutputArrayIndex() { int currentMax = outputArrayIndex; for (int i = 0; i < children.size(); i++) { int childMax = children.get(i).getMaxOutputArrayIndex(); if (childMax > currentMax) { currentMax = childMax; } } return currentMax; } /* * split byte array and return metadata and primitive columns */ @Override public void getColumnarDataForComplexType(List<ArrayList<byte[]>> columnsArray, ByteBuffer inputArray) { ByteBuffer b = ByteBuffer.allocate(8); int childElement = inputArray.getInt(); b.putInt(childElement); if (childElement == 0) { b.putInt(0); } else { b.putInt(children.get(0).getDataCounter()); } columnsArray.get(this.outputArrayIndex).add(b.array()); for (int i = 0; i < childElement; i++) { if (children.get(i) instanceof PrimitiveDataType) { ((PrimitiveDataType) children.get(i)).setKeySize(inputArray.getInt()); } children.get(i).getColumnarDataForComplexType(columnsArray, inputArray); } this.dataCounter++; } /* * return data counter */ @Override public int getDataCounter() { return this.dataCounter; } /* * fill agg block */ @Override public void fillAggKeyBlock(List<Boolean> aggKeyBlockWithComplex, boolean[] aggKeyBlock) { aggKeyBlockWithComplex.add(false); for (int i = 0; i < children.size(); i++) { children.get(i).fillAggKeyBlock(aggKeyBlockWithComplex, aggKeyBlock); } } /* * fill keysize */ @Override public void fillBlockKeySize(List<Integer> blockKeySizeWithComplex, int[] primitiveBlockKeySize) { blockKeySizeWithComplex.add(8); for (int i = 0; i < children.size(); i++) { children.get(i).fillBlockKeySize(blockKeySizeWithComplex, primitiveBlockKeySize); } } /* * fill cardinality */ @Override public void fillCardinalityAfterDataLoad(List<Integer> dimCardWithComplex, int[] maxSurrogateKeyArray) { dimCardWithComplex.add(0); for (int i = 0; i < children.size(); i++) { children.get(i).fillCardinalityAfterDataLoad(dimCardWithComplex, maxSurrogateKeyArray); } } @Override public GenericDataType<StructObject> deepCopy() { List<GenericDataType> childrenClone = new ArrayList<>(); for (GenericDataType child : children) { childrenClone.add(child.deepCopy()); } return new StructDataType(childrenClone, this.outputArrayIndex, this.dataCounter); } }
package cbs.repository.code.model; import java.util.ArrayList; import java.util.Date; import java.util.List; public class ActtscExample { /** * This field was generated by MyBatis Generator. * This field corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ protected String orderByClause; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ protected boolean distinct; /** * This field was generated by MyBatis Generator. * This field corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ protected List<Criteria> oredCriteria; /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public ActtscExample() { oredCriteria = new ArrayList<Criteria>(); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public String getOrderByClause() { return orderByClause; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public void setDistinct(boolean distinct) { this.distinct = distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public boolean isDistinct() { return distinct; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public List<Criteria> getOredCriteria() { return oredCriteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public void or(Criteria criteria) { oredCriteria.add(criteria); } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } /** * This method was generated by MyBatis Generator. * This method corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andTddtdcIsNull() { addCriterion("TDDTDC is null"); return (Criteria) this; } public Criteria andTddtdcIsNotNull() { addCriterion("TDDTDC is not null"); return (Criteria) this; } public Criteria andTddtdcEqualTo(String value) { addCriterion("TDDTDC =", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcNotEqualTo(String value) { addCriterion("TDDTDC <>", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcGreaterThan(String value) { addCriterion("TDDTDC >", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcGreaterThanOrEqualTo(String value) { addCriterion("TDDTDC >=", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcLessThan(String value) { addCriterion("TDDTDC <", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcLessThanOrEqualTo(String value) { addCriterion("TDDTDC <=", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcLike(String value) { addCriterion("TDDTDC like", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcNotLike(String value) { addCriterion("TDDTDC not like", value, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcIn(List<String> values) { addCriterion("TDDTDC in", values, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcNotIn(List<String> values) { addCriterion("TDDTDC not in", values, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcBetween(String value1, String value2) { addCriterion("TDDTDC between", value1, value2, "tddtdc"); return (Criteria) this; } public Criteria andTddtdcNotBetween(String value1, String value2) { addCriterion("TDDTDC not between", value1, value2, "tddtdc"); return (Criteria) this; } public Criteria andTddnamIsNull() { addCriterion("TDDNAM is null"); return (Criteria) this; } public Criteria andTddnamIsNotNull() { addCriterion("TDDNAM is not null"); return (Criteria) this; } public Criteria andTddnamEqualTo(String value) { addCriterion("TDDNAM =", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamNotEqualTo(String value) { addCriterion("TDDNAM <>", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamGreaterThan(String value) { addCriterion("TDDNAM >", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamGreaterThanOrEqualTo(String value) { addCriterion("TDDNAM >=", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamLessThan(String value) { addCriterion("TDDNAM <", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamLessThanOrEqualTo(String value) { addCriterion("TDDNAM <=", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamLike(String value) { addCriterion("TDDNAM like", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamNotLike(String value) { addCriterion("TDDNAM not like", value, "tddnam"); return (Criteria) this; } public Criteria andTddnamIn(List<String> values) { addCriterion("TDDNAM in", values, "tddnam"); return (Criteria) this; } public Criteria andTddnamNotIn(List<String> values) { addCriterion("TDDNAM not in", values, "tddnam"); return (Criteria) this; } public Criteria andTddnamBetween(String value1, String value2) { addCriterion("TDDNAM between", value1, value2, "tddnam"); return (Criteria) this; } public Criteria andTddnamNotBetween(String value1, String value2) { addCriterion("TDDNAM not between", value1, value2, "tddnam"); return (Criteria) this; } public Criteria andTddcn1IsNull() { addCriterion("TDDCN1 is null"); return (Criteria) this; } public Criteria andTddcn1IsNotNull() { addCriterion("TDDCN1 is not null"); return (Criteria) this; } public Criteria andTddcn1EqualTo(String value) { addCriterion("TDDCN1 =", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1NotEqualTo(String value) { addCriterion("TDDCN1 <>", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1GreaterThan(String value) { addCriterion("TDDCN1 >", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1GreaterThanOrEqualTo(String value) { addCriterion("TDDCN1 >=", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1LessThan(String value) { addCriterion("TDDCN1 <", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1LessThanOrEqualTo(String value) { addCriterion("TDDCN1 <=", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1Like(String value) { addCriterion("TDDCN1 like", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1NotLike(String value) { addCriterion("TDDCN1 not like", value, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1In(List<String> values) { addCriterion("TDDCN1 in", values, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1NotIn(List<String> values) { addCriterion("TDDCN1 not in", values, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1Between(String value1, String value2) { addCriterion("TDDCN1 between", value1, value2, "tddcn1"); return (Criteria) this; } public Criteria andTddcn1NotBetween(String value1, String value2) { addCriterion("TDDCN1 not between", value1, value2, "tddcn1"); return (Criteria) this; } public Criteria andTddcn2IsNull() { addCriterion("TDDCN2 is null"); return (Criteria) this; } public Criteria andTddcn2IsNotNull() { addCriterion("TDDCN2 is not null"); return (Criteria) this; } public Criteria andTddcn2EqualTo(String value) { addCriterion("TDDCN2 =", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2NotEqualTo(String value) { addCriterion("TDDCN2 <>", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2GreaterThan(String value) { addCriterion("TDDCN2 >", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2GreaterThanOrEqualTo(String value) { addCriterion("TDDCN2 >=", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2LessThan(String value) { addCriterion("TDDCN2 <", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2LessThanOrEqualTo(String value) { addCriterion("TDDCN2 <=", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2Like(String value) { addCriterion("TDDCN2 like", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2NotLike(String value) { addCriterion("TDDCN2 not like", value, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2In(List<String> values) { addCriterion("TDDCN2 in", values, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2NotIn(List<String> values) { addCriterion("TDDCN2 not in", values, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2Between(String value1, String value2) { addCriterion("TDDCN2 between", value1, value2, "tddcn2"); return (Criteria) this; } public Criteria andTddcn2NotBetween(String value1, String value2) { addCriterion("TDDCN2 not between", value1, value2, "tddcn2"); return (Criteria) this; } public Criteria andTddcn3IsNull() { addCriterion("TDDCN3 is null"); return (Criteria) this; } public Criteria andTddcn3IsNotNull() { addCriterion("TDDCN3 is not null"); return (Criteria) this; } public Criteria andTddcn3EqualTo(String value) { addCriterion("TDDCN3 =", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3NotEqualTo(String value) { addCriterion("TDDCN3 <>", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3GreaterThan(String value) { addCriterion("TDDCN3 >", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3GreaterThanOrEqualTo(String value) { addCriterion("TDDCN3 >=", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3LessThan(String value) { addCriterion("TDDCN3 <", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3LessThanOrEqualTo(String value) { addCriterion("TDDCN3 <=", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3Like(String value) { addCriterion("TDDCN3 like", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3NotLike(String value) { addCriterion("TDDCN3 not like", value, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3In(List<String> values) { addCriterion("TDDCN3 in", values, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3NotIn(List<String> values) { addCriterion("TDDCN3 not in", values, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3Between(String value1, String value2) { addCriterion("TDDCN3 between", value1, value2, "tddcn3"); return (Criteria) this; } public Criteria andTddcn3NotBetween(String value1, String value2) { addCriterion("TDDCN3 not between", value1, value2, "tddcn3"); return (Criteria) this; } public Criteria andTddcn4IsNull() { addCriterion("TDDCN4 is null"); return (Criteria) this; } public Criteria andTddcn4IsNotNull() { addCriterion("TDDCN4 is not null"); return (Criteria) this; } public Criteria andTddcn4EqualTo(String value) { addCriterion("TDDCN4 =", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4NotEqualTo(String value) { addCriterion("TDDCN4 <>", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4GreaterThan(String value) { addCriterion("TDDCN4 >", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4GreaterThanOrEqualTo(String value) { addCriterion("TDDCN4 >=", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4LessThan(String value) { addCriterion("TDDCN4 <", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4LessThanOrEqualTo(String value) { addCriterion("TDDCN4 <=", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4Like(String value) { addCriterion("TDDCN4 like", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4NotLike(String value) { addCriterion("TDDCN4 not like", value, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4In(List<String> values) { addCriterion("TDDCN4 in", values, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4NotIn(List<String> values) { addCriterion("TDDCN4 not in", values, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4Between(String value1, String value2) { addCriterion("TDDCN4 between", value1, value2, "tddcn4"); return (Criteria) this; } public Criteria andTddcn4NotBetween(String value1, String value2) { addCriterion("TDDCN4 not between", value1, value2, "tddcn4"); return (Criteria) this; } public Criteria andAmdtlrIsNull() { addCriterion("AMDTLR is null"); return (Criteria) this; } public Criteria andAmdtlrIsNotNull() { addCriterion("AMDTLR is not null"); return (Criteria) this; } public Criteria andAmdtlrEqualTo(String value) { addCriterion("AMDTLR =", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrNotEqualTo(String value) { addCriterion("AMDTLR <>", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrGreaterThan(String value) { addCriterion("AMDTLR >", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrGreaterThanOrEqualTo(String value) { addCriterion("AMDTLR >=", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrLessThan(String value) { addCriterion("AMDTLR <", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrLessThanOrEqualTo(String value) { addCriterion("AMDTLR <=", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrLike(String value) { addCriterion("AMDTLR like", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrNotLike(String value) { addCriterion("AMDTLR not like", value, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrIn(List<String> values) { addCriterion("AMDTLR in", values, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrNotIn(List<String> values) { addCriterion("AMDTLR not in", values, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrBetween(String value1, String value2) { addCriterion("AMDTLR between", value1, value2, "amdtlr"); return (Criteria) this; } public Criteria andAmdtlrNotBetween(String value1, String value2) { addCriterion("AMDTLR not between", value1, value2, "amdtlr"); return (Criteria) this; } public Criteria andUpddatIsNull() { addCriterion("UPDDAT is null"); return (Criteria) this; } public Criteria andUpddatIsNotNull() { addCriterion("UPDDAT is not null"); return (Criteria) this; } public Criteria andUpddatEqualTo(Date value) { addCriterion("UPDDAT =", value, "upddat"); return (Criteria) this; } public Criteria andUpddatNotEqualTo(Date value) { addCriterion("UPDDAT <>", value, "upddat"); return (Criteria) this; } public Criteria andUpddatGreaterThan(Date value) { addCriterion("UPDDAT >", value, "upddat"); return (Criteria) this; } public Criteria andUpddatGreaterThanOrEqualTo(Date value) { addCriterion("UPDDAT >=", value, "upddat"); return (Criteria) this; } public Criteria andUpddatLessThan(Date value) { addCriterion("UPDDAT <", value, "upddat"); return (Criteria) this; } public Criteria andUpddatLessThanOrEqualTo(Date value) { addCriterion("UPDDAT <=", value, "upddat"); return (Criteria) this; } public Criteria andUpddatIn(List<Date> values) { addCriterion("UPDDAT in", values, "upddat"); return (Criteria) this; } public Criteria andUpddatNotIn(List<Date> values) { addCriterion("UPDDAT not in", values, "upddat"); return (Criteria) this; } public Criteria andUpddatBetween(Date value1, Date value2) { addCriterion("UPDDAT between", value1, value2, "upddat"); return (Criteria) this; } public Criteria andUpddatNotBetween(Date value1, Date value2) { addCriterion("UPDDAT not between", value1, value2, "upddat"); return (Criteria) this; } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table ACTTSC * * @mbggenerated do_not_delete_during_merge Sun Nov 21 21:36:06 CST 2010 */ public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } /** * This class was generated by MyBatis Generator. * This class corresponds to the database table ACTTSC * * @mbggenerated Sun Nov 21 21:36:06 CST 2010 */ public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } protected Criterion(String condition) { super(); this.condition = condition; this.noValue = true; } protected Criterion(String condition, Object value) { super(); this.condition = condition; this.value = value; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value, Object secondValue) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.betweenValue = true; } } }
/* * JBoss, Home of Professional Open Source * * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xnio.streams; import java.io.Flushable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import org.xnio.BrokenPipeException; import org.xnio.Buffers; import org.xnio.Pooled; /** * An {@code OutputStream} implementation which writes out {@code ByteBuffer}s to a consumer. */ public class BufferPipeOutputStream extends OutputStream { // internal buffer private Pooled<ByteBuffer> buffer; // indicates this stream is closed private boolean closed; private final BufferWriter bufferWriterTask; /** * Construct a new instance. The internal buffers will have a capacity of {@code bufferSize}. The * given {@code bufferWriterTask} will be called to send buffers, flush the output stream, and handle the * end-of-file condition. * * @param bufferWriterTask the writer task * @throws IOException if an error occurs while initializing the stream */ public BufferPipeOutputStream(final BufferWriter bufferWriterTask) throws IOException { this.bufferWriterTask = bufferWriterTask; synchronized (this) { buffer = bufferWriterTask.getBuffer(true); } } private static IOException closed() { return new IOException("Stream is closed"); } private void checkClosed() throws IOException { assert Thread.holdsLock(this); if (closed) { throw closed(); } } private Pooled<ByteBuffer> getBuffer() throws IOException { assert Thread.holdsLock(this); final Pooled<ByteBuffer> buffer = this.buffer; if (buffer != null && buffer.getResource().hasRemaining()) { return buffer; } else { if (buffer != null) send(false); return this.buffer = bufferWriterTask.getBuffer(false); } } /** {@inheritDoc} */ public void write(final int b) throws IOException { synchronized (this) { checkClosed(); getBuffer().getResource().put((byte) b); } } /** {@inheritDoc} */ public void write(final byte[] b, int off, int len) throws IOException { synchronized (this) { checkClosed(); while (len > 0) { final ByteBuffer buffer = getBuffer().getResource(); final int cnt = Math.min(len, buffer.remaining()); buffer.put(b, off, cnt); len -= cnt; off += cnt; } } } // call with lock held private void send(boolean eof) throws IOException { assert Thread.holdsLock(this); assert !closed; final Pooled<ByteBuffer> pooledBuffer = buffer; final ByteBuffer buffer = pooledBuffer == null ? null : pooledBuffer.getResource(); this.buffer = null; if (buffer != null && buffer.position() > 0) { buffer.flip(); send(pooledBuffer, eof); } else if (eof) { Pooled<ByteBuffer> pooledBuffer1 = getBuffer(); final ByteBuffer buffer1 = pooledBuffer1.getResource(); buffer1.flip(); send(pooledBuffer1, eof); } } private void send(Pooled<ByteBuffer> buffer, boolean eof) throws IOException { assert Thread.holdsLock(this); try { bufferWriterTask.accept(buffer, eof); } catch (IOException e) { this.closed = true; throw e; } } /** {@inheritDoc} */ public void flush() throws IOException { flush(false); } private void flush(boolean eof) throws IOException { synchronized (this) { if (closed) { return; } send(eof); try { bufferWriterTask.flush(); } catch (IOException e) { closed = true; buffer = null; throw e; } } } /** {@inheritDoc} */ public void close() throws IOException { synchronized (this) { if (closed) { return; } try { flush(true); } finally { closed = true; } } } /** * Break the pipe and return any filling pooled buffer. Sets the stream to an EOF condition. Callers to this * method should ensure that any threads blocked on {@link BufferWriter#accept(org.xnio.Pooled, boolean)} are * unblocked, preferably with a {@link BrokenPipeException}. * * @return the current pooled buffer, or {@code null} if none was pending */ public Pooled<ByteBuffer> breakPipe() { synchronized (this) { if (closed) { return null; } closed = true; try { return buffer; } finally { buffer = null; } } } /** * A buffer writer for an {@link BufferPipeOutputStream}. */ public interface BufferWriter extends Flushable { /** * Get a new buffer to be filled. The new buffer may, for example, include a prepended header. This method * may block until a buffer is available or until some other condition, such as flow control, is met. * * @param firstBuffer {@code true} if this is the first buffer in the stream, {@code false} otherwise * @return the new buffer * @throws IOException if an I/O error occurs */ Pooled<ByteBuffer> getBuffer(boolean firstBuffer) throws IOException; /** * Accept a buffer. If this is the last buffer that will be sent, the {@code eof} flag will be set to {@code true}. * This method should block until the entire buffer is consumed, or an error occurs. This method may also block * until some other condition, such as flow control, is met. * * @param pooledBuffer the buffer to send * @param eof {@code true} if this is the last buffer which will be sent * @throws IOException if an I/O error occurs */ void accept(Pooled<ByteBuffer> pooledBuffer, boolean eof) throws IOException; /** * Flushes this stream by writing any buffered output to the underlying stream. This method should block until * the data is fully flushed. This method may also block until some other condition, such as flow control, is * met. * * @throws IOException If an I/O error occurs */ void flush() throws IOException; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package db; import java.util.ArrayList; import ghidra.util.ObjectStorage; /** * <code>ObjectStorageAdapterDB</code> provides an ObjectStorage * implementation for use by Saveable objects. This allows Saveable objects * to save or restore their state using a fixed set of primitives and primitive arrays. * This implementation provides various data access methods for storing/retrieving data. * In addition, support is provided for utilizing a Record object for data storage * using a suitable schema. */ public class ObjectStorageAdapterDB implements ObjectStorage { private ArrayList<Field> fieldList = new ArrayList<Field>(); private int col = 0; private boolean readOnly = false; /** * Construct an empty writable storage adapter. */ public ObjectStorageAdapterDB() { } /** * Construct a read-only storage adapter from an * existing record. * @param rec data record */ public ObjectStorageAdapterDB(DBRecord rec) { readOnly = true; Field[] fields = rec.getFields(); for (int i = 0; i < fields.length; i++) { fieldList.add(fields[i]); } } @Override public void putInt(int value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new IntField(value)); } @Override public void putByte(byte value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new ByteField(value)); } @Override public void putShort(short value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new ShortField(value)); } @Override public void putLong(long value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new LongField(value)); } @Override public void putString(String value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new StringField(value)); } @Override public void putBoolean(boolean value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BooleanField(value)); } @Override public void putFloat(float value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putDouble(double value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public int getInt() { try { return fieldList.get(col++).getIntValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public byte getByte() { try { return fieldList.get(col++).getByteValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public short getShort() { try { return fieldList.get(col++).getShortValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public long getLong() { try { return fieldList.get(col++).getLongValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public boolean getBoolean() { try { return fieldList.get(col++).getBooleanValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public String getString() { try { return fieldList.get(col++).getString(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public float getFloat() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getFloatValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public double getDouble() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getDoubleValue(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public void putInts(int[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putBytes(byte[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putShorts(short[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putLongs(long[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putFloats(float[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putDoubles(double[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public void putStrings(String[] value) { if (readOnly) throw new IllegalStateException(); fieldList.add(new BinaryCodedField(value)); } @Override public int[] getInts() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getIntArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public byte[] getBytes() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getByteArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public short[] getShorts() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getShortArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public long[] getLongs() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getLongArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public float[] getFloats() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getFloatArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public double[] getDoubles() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getDoubleArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } @Override public String[] getStrings() { try { BinaryCodedField codedField = new BinaryCodedField((BinaryField) fieldList.get(col++)); return codedField.getStringArray(); } catch (IndexOutOfBoundsException e) { throw new IllegalFieldAccessException(); } } /** * Get the Schema associated with the stored data. * @param version version to be assigned to schema instance * @return Schema */ public Schema getSchema(int version) { Field[] fields = new Field[fieldList.size()]; String[] fieldNames = new String[fields.length]; for (int i = 0; i < fields.length; i++) { fields[i] = fieldList.get(i).newField(); fieldNames[i] = Integer.toString(i); } return new Schema(version, "key", fields, fieldNames); } /** * Save data into a Record. * @param rec database record. */ public void save(DBRecord rec) { int cnt = fieldList.size(); for (int i = 0; i < cnt; i++) { rec.setField(i, fieldList.get(i)); } } }
package jp.sourceforge.ea2ddl.dao.bsentity; import java.io.Serializable; import java.util.*; import org.seasar.dbflute.Entity; import org.seasar.dbflute.dbmeta.DBMeta; import jp.sourceforge.ea2ddl.dao.allcommon.DBMetaInstanceHandler; /** * The entity of t_constants that the type is TABLE. <br /> * <pre> * [primary-key] * * * [column] * ConstantName, ConstantValue * * [sequence] * * * [identity] * * * [version-no] * * * [foreign-table] * * * [referrer-table] * * * [foreign-property] * * * [referrer-property] * * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsTConstants implements Entity, Serializable { // =================================================================================== // Definition // ========== /** Serial version UID. (Default) */ private static final long serialVersionUID = 1L; // =================================================================================== // Attribute // ========= // ----------------------------------------------------- // Column // ------ /** ConstantName: {UQ : VARCHAR(50)} */ protected String _constantname; /** ConstantValue: {VARCHAR(255)} */ protected String _constantvalue; // ----------------------------------------------------- // Internal // -------- /** The attribute of entity modified properties. (for S2Dao) */ protected EntityModifiedProperties _modifiedProperties = newEntityModifiedProperties(); // =================================================================================== // Table Name // ========== public String getTableDbName() { return "t_constants"; } public String getTablePropertyName() { // as JavaBeansRule return "TConstants"; } // =================================================================================== // DBMeta // ====== public DBMeta getDBMeta() { return DBMetaInstanceHandler.findDBMeta(getTableDbName()); } // =================================================================================== // Classification Classifying // ========================== // =================================================================================== // Classification Determination // ============================ // =================================================================================== // Classification Name/Alias // ========================= // =================================================================================== // Foreign Property // ================ // =================================================================================== // Referrer Property // ================= // =================================================================================== // Determination // ============= public boolean hasPrimaryKeyValue() { return false; } // =================================================================================== // Modified Properties // =================== public Set<String> getModifiedPropertyNames() { return _modifiedProperties.getPropertyNames(); } protected EntityModifiedProperties newEntityModifiedProperties() { return new EntityModifiedProperties(); } public void clearModifiedPropertyNames() { _modifiedProperties.clear(); } public boolean hasModification() { return !_modifiedProperties.isEmpty(); } // =================================================================================== // Basic Override // ============== /** * If the all-column value of the other is same as this one, returns true. * @param other Other entity. (Nullable) * @return Comparing result. If other is null, returns false. */ public boolean equals(Object other) { if (other == null || !(other instanceof BsTConstants)) { return false; } final BsTConstants otherEntity = (BsTConstants)other; if (!helpComparingValue(getConstantname(), otherEntity.getConstantname())) { return false; } if (!helpComparingValue(getConstantvalue(), otherEntity.getConstantvalue())) { return false; } return true; } protected boolean helpComparingValue(Object value1, Object value2) { if (value1 == null && value2 == null) { return true; } return value1 != null && value2 != null && value1.equals(value2); } /** * Calculates hash-code from all columns. * @return Hash-code from all-columns. */ public int hashCode() { int result = 17; if (this.getConstantname() != null) { result = result + this.getConstantname().hashCode(); } if (this.getConstantvalue() != null) { result = result + this.getConstantvalue().hashCode(); } return result; } /** * @return The view string of columns. (NotNull) */ public String toString() { String delimiter = ","; StringBuilder sb = new StringBuilder(); sb.append(delimiter).append(getConstantname()); sb.append(delimiter).append(getConstantvalue()); if (sb.length() > 0) { sb.delete(0, delimiter.length()); } sb.insert(0, "{").append("}"); return sb.toString(); } // =================================================================================== // Accessor // ======== /** * ConstantName: {UQ : VARCHAR(50)} <br /> * @return The value of the column 'ConstantName'. (Nullable) */ public String getConstantname() { return _constantname; } /** * ConstantName: {UQ : VARCHAR(50)} <br /> * @param constantname The value of the column 'ConstantName'. (Nullable) */ public void setConstantname(String constantname) { _modifiedProperties.addPropertyName("constantname"); this._constantname = constantname; } /** * ConstantValue: {VARCHAR(255)} <br /> * @return The value of the column 'ConstantValue'. (Nullable) */ public String getConstantvalue() { return _constantvalue; } /** * ConstantValue: {VARCHAR(255)} <br /> * @param constantvalue The value of the column 'ConstantValue'. (Nullable) */ public void setConstantvalue(String constantvalue) { _modifiedProperties.addPropertyName("constantvalue"); this._constantvalue = constantvalue; } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util; import com.google.common.base.Joiner; import com.google.common.base.Supplier; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; import com.google.common.reflect.ClassPath; import com.google.common.reflect.ClassPath.ClassInfo; import com.google.common.reflect.Invokable; import com.google.common.reflect.Parameter; import com.google.common.reflect.TypeToken; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.lang.reflect.WildcardType; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.regex.Pattern; /** * Represents the API surface of a package prefix. Used for accessing public classes, * methods, and the types they reference, to control what dependencies are re-exported. * * <p>For the purposes of calculating the public API surface, exposure includes any public * or protected occurrence of: * * <ul> * <li>superclasses * <li>interfaces implemented * <li>actual type arguments to generic types * <li>array component types * <li>method return types * <li>method parameter types * <li>type variable bounds * <li>wildcard bounds * </ul> * * <p>Exposure is a transitive property. The resulting map excludes primitives * and array classes themselves. * * <p>It is prudent (though not required) to prune prefixes like "java" via the builder * method {@link #pruningPrefix} to halt the traversal so it does not uselessly catalog references * that are not interesting. */ @SuppressWarnings("rawtypes") public class ApiSurface { /** * Returns an empty {@link ApiSurface}. */ public static ApiSurface empty() { return new ApiSurface(Collections.<Class<?>>emptySet(), Collections.<Pattern>emptySet()); } /** * Returns an {@link ApiSurface} object representing the given package and all subpackages. */ public static ApiSurface ofPackage(String packageName) throws IOException { return ApiSurface.empty().includingPackage(packageName); } /** * Returns an {@link ApiSurface} object representing just the surface of the given class. */ public static ApiSurface ofClass(Class<?> clazz) { return ApiSurface.empty().includingClass(clazz); } /** * Returns an {@link ApiSurface} like this one, but also including the named * package and all of its subpackages. */ public ApiSurface includingPackage(String packageName) throws IOException { ClassPath classPath = ClassPath.from(ClassLoader.getSystemClassLoader()); Set<Class<?>> newRootClasses = Sets.newHashSet(); newRootClasses.addAll(rootClasses); for (ClassInfo classInfo : classPath.getTopLevelClassesRecursive(packageName)) { Class clazz = classInfo.load(); if (exposed(clazz.getModifiers())) { newRootClasses.add(clazz); } } return new ApiSurface(newRootClasses, patternsToPrune); } /** * Returns an {@link ApiSurface} like this one, but also including the given class. */ public ApiSurface includingClass(Class<?> clazz) { Set<Class<?>> newRootClasses = Sets.newHashSet(); newRootClasses.addAll(rootClasses); newRootClasses.add(clazz); return new ApiSurface(newRootClasses, patternsToPrune); } /** * Returns an {@link ApiSurface} like this one, but pruning transitive * references from classes whose full name (including package) begins with the provided prefix. */ public ApiSurface pruningPrefix(String prefix) { return pruningPattern(Pattern.compile(Pattern.quote(prefix) + ".*")); } /** * Returns an {@link ApiSurface} like this one, but pruning references from the named * class. */ public ApiSurface pruningClassName(String className) { return pruningPattern(Pattern.compile(Pattern.quote(className))); } /** * Returns an {@link ApiSurface} like this one, but pruning references from the * provided class. */ public ApiSurface pruningClass(Class<?> clazz) { return pruningClassName(clazz.getName()); } /** * Returns an {@link ApiSurface} like this one, but pruning transitive * references from classes whose full name (including package) begins with the provided prefix. */ public ApiSurface pruningPattern(Pattern pattern) { Set<Pattern> newPatterns = Sets.newHashSet(); newPatterns.addAll(patternsToPrune); newPatterns.add(pattern); return new ApiSurface(rootClasses, newPatterns); } /** * See {@link #pruningPattern(Pattern)}. */ public ApiSurface pruningPattern(String patternString) { return pruningPattern(Pattern.compile(patternString)); } /** * Returns all public classes originally belonging to the package * in the {@link ApiSurface}. */ public Set<Class<?>> getRootClasses() { return rootClasses; } /** * Returns exposed types in this set, including arrays and primitives as * specified. */ public Set<Class<?>> getExposedClasses() { return getExposedToExposers().keySet(); } /** * Returns a path from an exposed class to a root class. There may be many, but this * gives only one. * * <p>If there are only cycles, with no path back to a root class, throws * IllegalStateException. */ public List<Class<?>> getAnyExposurePath(Class<?> exposedClass) { Set<Class<?>> excluded = Sets.newHashSet(); excluded.add(exposedClass); List<Class<?>> path = getAnyExposurePath(exposedClass, excluded); if (path == null) { throw new IllegalArgumentException( "Class " + exposedClass + " has no path back to any root class." + " It should never have been considered exposed."); } else { return path; } } /** * Returns a path from an exposed class to a root class. There may be many, but this * gives only one. It will not return a path that crosses the excluded classes. * * <p>If there are only cycles or paths through the excluded classes, returns null. * * <p>If the class is not actually in the exposure map, throws IllegalArgumentException */ private List<Class<?>> getAnyExposurePath(Class<?> exposedClass, Set<Class<?>> excluded) { List<Class<?>> exposurePath = Lists.newArrayList(); exposurePath.add(exposedClass); Collection<Class<?>> exposers = getExposedToExposers().get(exposedClass); if (exposers.isEmpty()) { throw new IllegalArgumentException("Class " + exposedClass + " is not exposed."); } for (Class<?> exposer : exposers) { if (excluded.contains(exposer)) { continue; } // A null exposer means this is already a root class. if (exposer == null) { return exposurePath; } List<Class<?>> restOfPath = getAnyExposurePath( exposer, Sets.union(excluded, Sets.newHashSet(exposer))); if (restOfPath != null) { exposurePath.addAll(restOfPath); return exposurePath; } } return null; } //////////////////////////////////////////////////////////////////// // Fields initialized upon construction private final Set<Class<?>> rootClasses; private final Set<Pattern> patternsToPrune; // Fields computed on-demand private Multimap<Class<?>, Class<?>> exposedToExposers = null; private Pattern prunedPattern = null; private Set<Type> visited = null; private ApiSurface(Set<Class<?>> rootClasses, Set<Pattern> patternsToPrune) { this.rootClasses = rootClasses; this.patternsToPrune = patternsToPrune; } /** * A map from exposed types to place where they are exposed, in the sense of being a part * of a public-facing API surface. * * <p>This map is the adjencency list representation of a directed graph, where an edge from type * {@code T1} to type {@code T2} indicates that {@code T2} directly exposes {@code T1} in its API * surface. * * <p>The traversal methods in this class are designed to avoid repeatedly processing types, since * there will almost always be cyclic references. */ private Multimap<Class<?>, Class<?>> getExposedToExposers() { if (exposedToExposers == null) { constructExposedToExposers(); } return exposedToExposers; } /** * See {@link #getExposedToExposers}. */ private void constructExposedToExposers() { visited = Sets.newHashSet(); exposedToExposers = Multimaps.newSetMultimap( Maps.<Class<?>, Collection<Class<?>>>newHashMap(), new Supplier<Set<Class<?>>>() { @Override public Set<Class<?>> get() { return Sets.newHashSet(); } }); for (Class<?> clazz : rootClasses) { addExposedTypes(clazz, null); } } /** * A combined {@code Pattern} that implements all the pruning specified. */ private Pattern getPrunedPattern() { if (prunedPattern == null) { constructPrunedPattern(); } return prunedPattern; } /** * See {@link #getPrunedPattern}. */ private void constructPrunedPattern() { Set<String> prunedPatternStrings = Sets.newHashSet(); for (Pattern patternToPrune : patternsToPrune) { prunedPatternStrings.add(patternToPrune.pattern()); } prunedPattern = Pattern.compile("(" + Joiner.on(")|(").join(prunedPatternStrings) + ")"); } /** * Whether a type and all that it references should be pruned from the graph. */ private boolean pruned(Type type) { return pruned(TypeToken.of(type).getRawType()); } /** * Whether a class and all that it references should be pruned from the graph. */ private boolean pruned(Class<?> clazz) { return clazz.isPrimitive() || clazz.isArray() || getPrunedPattern().matcher(clazz.getName()).matches(); } /** * Whether a type has already beens sufficiently processed. */ private boolean done(Type type) { return visited.contains(type); } private void recordExposure(Class<?> exposed, Class<?> cause) { exposedToExposers.put(exposed, cause); } private void recordExposure(Type exposed, Class<?> cause) { exposedToExposers.put(TypeToken.of(exposed).getRawType(), cause); } private void visit(Type type) { visited.add(type); } /** * See {@link #addExposedTypes(Type, Class)}. */ private void addExposedTypes(TypeToken type, Class<?> cause) { addExposedTypes(type.getType(), cause); } /** * Adds any references learned by following a link from {@code cause} to {@code type}. * This will dispatch according to the concrete {@code Type} implementation. See the * other overloads of {@code addExposedTypes} for their details. */ private void addExposedTypes(Type type, Class<?> cause) { if (type instanceof TypeVariable) { addExposedTypes((TypeVariable) type, cause); } else if (type instanceof WildcardType) { addExposedTypes((WildcardType) type, cause); } else if (type instanceof GenericArrayType) { addExposedTypes((GenericArrayType) type, cause); } else if (type instanceof ParameterizedType) { addExposedTypes((ParameterizedType) type, cause); } else if (type instanceof Class) { addExposedTypes((Class) type, cause); } else { throw new IllegalArgumentException("Unknown implementation of Type"); } } /** * Adds any types exposed to this set. These will * come from the (possibly absent) bounds on the * type variable. */ private void addExposedTypes(TypeVariable type, Class<?> cause) { if (done(type)) { return; } visit(type); for (Type bound : type.getBounds()) { addExposedTypes(bound, cause); } } /** * Adds any types exposed to this set. These will come from the (possibly absent) bounds on the * wildcard. */ private void addExposedTypes(WildcardType type, Class<?> cause) { visit(type); for (Type lowerBound : type.getLowerBounds()) { addExposedTypes(lowerBound, cause); } for (Type upperBound : type.getUpperBounds()) { addExposedTypes(upperBound, cause); } } /** * Adds any types exposed from the given array type. The array type itself is not added. The * cause of the exposure of the underlying type is considered whatever type exposed the array * type. */ private void addExposedTypes(GenericArrayType type, Class<?> cause) { if (done(type)) { return; } visit(type); addExposedTypes(type.getGenericComponentType(), cause); } /** * Adds any types exposed to this set. Even if the * root type is to be pruned, the actual type arguments * are processed. */ private void addExposedTypes(ParameterizedType type, Class<?> cause) { // Even if the type is already done, this link to it may be new boolean alreadyDone = done(type); if (!pruned(type)) { visit(type); recordExposure(type, cause); } if (alreadyDone) { return; } // For a parameterized type, pruning does not take place // here, only for the raw class. // The type parameters themselves may not be pruned, // for example with List<MyApiType> probably the // standard List is pruned, but MyApiType is not. addExposedTypes(type.getRawType(), cause); for (Type typeArg : type.getActualTypeArguments()) { addExposedTypes(typeArg, cause); } } /** * Adds a class and all of the types it exposes. The cause * of the class being exposed is given, and the cause * of everything within the class is that class itself. */ private void addExposedTypes(Class<?> clazz, Class<?> cause) { if (pruned(clazz)) { return; } // Even if `clazz` has been visited, the link from `cause` may be new boolean alreadyDone = done(clazz); visit(clazz); recordExposure(clazz, cause); if (alreadyDone || pruned(clazz)) { return; } TypeToken<?> token = TypeToken.of(clazz); for (TypeToken<?> superType : token.getTypes()) { if (!superType.equals(token)) { addExposedTypes(superType, clazz); } } for (Class innerClass : clazz.getDeclaredClasses()) { if (exposed(innerClass.getModifiers())) { addExposedTypes(innerClass, clazz); } } for (Field field : clazz.getDeclaredFields()) { if (exposed(field.getModifiers())) { addExposedTypes(field, clazz); } } for (Invokable invokable : getExposedInvokables(token)) { addExposedTypes(invokable, clazz); } } private void addExposedTypes(Invokable<?, ?> invokable, Class<?> cause) { addExposedTypes(invokable.getReturnType(), cause); for (Annotation annotation : invokable.getAnnotations()) { addExposedTypes(annotation.annotationType(), cause); } for (Parameter parameter : invokable.getParameters()) { addExposedTypes(parameter, cause); } for (TypeToken<?> exceptionType : invokable.getExceptionTypes()) { addExposedTypes(exceptionType, cause); } } private void addExposedTypes(Parameter parameter, Class<?> cause) { addExposedTypes(parameter.getType(), cause); for (Annotation annotation : parameter.getAnnotations()) { addExposedTypes(annotation.annotationType(), cause); } } private void addExposedTypes(Field field, Class<?> cause) { addExposedTypes(field.getGenericType(), cause); for (Annotation annotation : field.getDeclaredAnnotations()) { addExposedTypes(annotation.annotationType(), cause); } } /** * Returns an {@link Invokable} for each public methods or constructors of a type. */ private Set<Invokable> getExposedInvokables(TypeToken type) { Set<Invokable> invokables = Sets.newHashSet(); for (Constructor constructor : type.getRawType().getConstructors()) { if (0 != (constructor.getModifiers() & (Modifier.PUBLIC | Modifier.PROTECTED))) { invokables.add(type.constructor(constructor)); } } for (Method method : type.getRawType().getMethods()) { if (0 != (method.getModifiers() & (Modifier.PUBLIC | Modifier.PROTECTED))) { invokables.add(type.method(method)); } } return invokables; } /** * Returns true of the given modifier bitmap indicates exposure (public or protected access). */ private boolean exposed(int modifiers) { return 0 != (modifiers & (Modifier.PUBLIC | Modifier.PROTECTED)); } //////////////////////////////////////////////////////////////////////////// public static ApiSurface getSdkApiSurface() throws IOException { return ApiSurface.ofPackage("com.google.cloud.dataflow") .pruningPattern("com[.]google[.]cloud[.]dataflow.*Test") .pruningPrefix("java") .pruningPrefix("com.google.api") .pruningPrefix("com.google.protobuf") .pruningPrefix("org.joda.time") .pruningPrefix("org.apache.avro") .pruningPrefix("org.junit") .pruningPrefix("com.fasterxml.jackson.annotation"); } public static void main(String[] args) throws Exception { List<String> names = Lists.newArrayList(); for (Class clazz : getSdkApiSurface().getExposedClasses()) { names.add(clazz.getName()); } List<String> sortedNames = Lists.newArrayList(names); Collections.sort(sortedNames); for (String name : sortedNames) { System.out.println(name); } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.thrift2; import static org.apache.hadoop.hbase.util.Bytes.getBytes; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.collections.MapUtils; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.OperationWithAttributes; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan.ReadType; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.thrift2.generated.TAppend; import org.apache.hadoop.hbase.thrift2.generated.TColumn; import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement; import org.apache.hadoop.hbase.thrift2.generated.TColumnValue; import org.apache.hadoop.hbase.thrift2.generated.TCompareOp; import org.apache.hadoop.hbase.thrift2.generated.TDelete; import org.apache.hadoop.hbase.thrift2.generated.TDeleteType; import org.apache.hadoop.hbase.thrift2.generated.TDurability; import org.apache.hadoop.hbase.thrift2.generated.TGet; import org.apache.hadoop.hbase.thrift2.generated.THRegionInfo; import org.apache.hadoop.hbase.thrift2.generated.THRegionLocation; import org.apache.hadoop.hbase.thrift2.generated.TIncrement; import org.apache.hadoop.hbase.thrift2.generated.TMutation; import org.apache.hadoop.hbase.thrift2.generated.TPut; import org.apache.hadoop.hbase.thrift2.generated.TReadType; import org.apache.hadoop.hbase.thrift2.generated.TResult; import org.apache.hadoop.hbase.thrift2.generated.TRowMutations; import org.apache.hadoop.hbase.thrift2.generated.TScan; import org.apache.hadoop.hbase.thrift2.generated.TServerName; import org.apache.hadoop.hbase.thrift2.generated.TTimeRange; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Private public class ThriftUtilities { private ThriftUtilities() { throw new UnsupportedOperationException("Can't initialize class"); } /** * Creates a {@link Get} (HBase) from a {@link TGet} (Thrift). * * This ignores any timestamps set on {@link TColumn} objects. * * @param in the <code>TGet</code> to convert * * @return <code>Get</code> object * * @throws IOException if an invalid time range or max version parameter is given */ public static Get getFromThrift(TGet in) throws IOException { Get out = new Get(in.getRow()); // Timestamp overwrites time range if both are set if (in.isSetTimestamp()) { out.setTimeStamp(in.getTimestamp()); } else if (in.isSetTimeRange()) { out.setTimeRange(in.getTimeRange().getMinStamp(), in.getTimeRange().getMaxStamp()); } if (in.isSetMaxVersions()) { out.setMaxVersions(in.getMaxVersions()); } if (in.isSetFilterString()) { ParseFilter parseFilter = new ParseFilter(); out.setFilter(parseFilter.parseFilterString(in.getFilterString())); } if (in.isSetAttributes()) { addAttributes(out,in.getAttributes()); } if (in.isSetAuthorizations()) { out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels())); } if (!in.isSetColumns()) { return out; } for (TColumn column : in.getColumns()) { if (column.isSetQualifier()) { out.addColumn(column.getFamily(), column.getQualifier()); } else { out.addFamily(column.getFamily()); } } return out; } /** * Converts multiple {@link TGet}s (Thrift) into a list of {@link Get}s (HBase). * * @param in list of <code>TGet</code>s to convert * * @return list of <code>Get</code> objects * * @throws IOException if an invalid time range or max version parameter is given * @see #getFromThrift(TGet) */ public static List<Get> getsFromThrift(List<TGet> in) throws IOException { List<Get> out = new ArrayList<>(in.size()); for (TGet get : in) { out.add(getFromThrift(get)); } return out; } /** * Creates a {@link TResult} (Thrift) from a {@link Result} (HBase). * * @param in the <code>Result</code> to convert * * @return converted result, returns an empty result if the input is <code>null</code> */ public static TResult resultFromHBase(Result in) { Cell[] raw = in.rawCells(); TResult out = new TResult(); byte[] row = in.getRow(); if (row != null) { out.setRow(in.getRow()); } List<TColumnValue> columnValues = new ArrayList<>(raw.length); for (Cell kv : raw) { TColumnValue col = new TColumnValue(); col.setFamily(CellUtil.cloneFamily(kv)); col.setQualifier(CellUtil.cloneQualifier(kv)); col.setTimestamp(kv.getTimestamp()); col.setValue(CellUtil.cloneValue(kv)); if (kv.getTagsLength() > 0) { col.setTags(CellUtil.getTagArray(kv)); } columnValues.add(col); } out.setColumnValues(columnValues); return out; } /** * Converts multiple {@link Result}s (HBase) into a list of {@link TResult}s (Thrift). * * @param in array of <code>Result</code>s to convert * * @return list of converted <code>TResult</code>s * * @see #resultFromHBase(Result) */ public static List<TResult> resultsFromHBase(Result[] in) { List<TResult> out = new ArrayList<>(in.length); for (Result result : in) { out.add(resultFromHBase(result)); } return out; } /** * Creates a {@link Put} (HBase) from a {@link TPut} (Thrift) * * @param in the <code>TPut</code> to convert * * @return converted <code>Put</code> */ public static Put putFromThrift(TPut in) { Put out; if (in.isSetTimestamp()) { out = new Put(in.getRow(), in.getTimestamp()); } else { out = new Put(in.getRow()); } if (in.isSetDurability()) { out.setDurability(durabilityFromThrift(in.getDurability())); } for (TColumnValue columnValue : in.getColumnValues()) { if (columnValue.isSetTimestamp()) { out.addImmutable( columnValue.getFamily(), columnValue.getQualifier(), columnValue.getTimestamp(), columnValue.getValue()); } else { out.addImmutable( columnValue.getFamily(), columnValue.getQualifier(), columnValue.getValue()); } } if (in.isSetAttributes()) { addAttributes(out,in.getAttributes()); } if (in.getCellVisibility() != null) { out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression())); } return out; } /** * Converts multiple {@link TPut}s (Thrift) into a list of {@link Put}s (HBase). * * @param in list of <code>TPut</code>s to convert * * @return list of converted <code>Put</code>s * * @see #putFromThrift(TPut) */ public static List<Put> putsFromThrift(List<TPut> in) { List<Put> out = new ArrayList<>(in.size()); for (TPut put : in) { out.add(putFromThrift(put)); } return out; } /** * Creates a {@link Delete} (HBase) from a {@link TDelete} (Thrift). * * @param in the <code>TDelete</code> to convert * * @return converted <code>Delete</code> */ public static Delete deleteFromThrift(TDelete in) { Delete out; if (in.isSetColumns()) { out = new Delete(in.getRow()); for (TColumn column : in.getColumns()) { if (column.isSetQualifier()) { if (column.isSetTimestamp()) { if (in.isSetDeleteType() && in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS)) out.addColumns(column.getFamily(), column.getQualifier(), column.getTimestamp()); else out.addColumn(column.getFamily(), column.getQualifier(), column.getTimestamp()); } else { if (in.isSetDeleteType() && in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS)) out.addColumns(column.getFamily(), column.getQualifier()); else out.addColumn(column.getFamily(), column.getQualifier()); } } else { if (column.isSetTimestamp()) { out.addFamily(column.getFamily(), column.getTimestamp()); } else { out.addFamily(column.getFamily()); } } } } else { if (in.isSetTimestamp()) { out = new Delete(in.getRow(), in.getTimestamp()); } else { out = new Delete(in.getRow()); } } if (in.isSetAttributes()) { addAttributes(out,in.getAttributes()); } if (in.isSetDurability()) { out.setDurability(durabilityFromThrift(in.getDurability())); } return out; } /** * Converts multiple {@link TDelete}s (Thrift) into a list of {@link Delete}s (HBase). * * @param in list of <code>TDelete</code>s to convert * * @return list of converted <code>Delete</code>s * * @see #deleteFromThrift(TDelete) */ public static List<Delete> deletesFromThrift(List<TDelete> in) { List<Delete> out = new ArrayList<>(in.size()); for (TDelete delete : in) { out.add(deleteFromThrift(delete)); } return out; } public static TDelete deleteFromHBase(Delete in) { TDelete out = new TDelete(ByteBuffer.wrap(in.getRow())); List<TColumn> columns = new ArrayList<>(in.getFamilyCellMap().entrySet().size()); long rowTimestamp = in.getTimeStamp(); if (rowTimestamp != HConstants.LATEST_TIMESTAMP) { out.setTimestamp(rowTimestamp); } // Map<family, List<KeyValue>> for (Map.Entry<byte[], List<org.apache.hadoop.hbase.Cell>> familyEntry: in.getFamilyCellMap().entrySet()) { TColumn column = new TColumn(ByteBuffer.wrap(familyEntry.getKey())); for (org.apache.hadoop.hbase.Cell cell: familyEntry.getValue()) { byte[] family = CellUtil.cloneFamily(cell); byte[] qualifier = CellUtil.cloneQualifier(cell); long timestamp = cell.getTimestamp(); if (family != null) { column.setFamily(family); } if (qualifier != null) { column.setQualifier(qualifier); } if (timestamp != HConstants.LATEST_TIMESTAMP) { column.setTimestamp(timestamp); } } columns.add(column); } out.setColumns(columns); return out; } /** * Creates a {@link RowMutations} (HBase) from a {@link TRowMutations} (Thrift) * * @param in the <code>TRowMutations</code> to convert * * @return converted <code>RowMutations</code> */ public static RowMutations rowMutationsFromThrift(TRowMutations in) throws IOException { List<TMutation> mutations = in.getMutations(); RowMutations out = new RowMutations(in.getRow(), mutations.size()); for (TMutation mutation : mutations) { if (mutation.isSetPut()) { out.add(putFromThrift(mutation.getPut())); } if (mutation.isSetDeleteSingle()) { out.add(deleteFromThrift(mutation.getDeleteSingle())); } } return out; } public static Scan scanFromThrift(TScan in) throws IOException { Scan out = new Scan(); if (in.isSetStartRow()) out.setStartRow(in.getStartRow()); if (in.isSetStopRow()) out.setStopRow(in.getStopRow()); if (in.isSetCaching()) out.setCaching(in.getCaching()); if (in.isSetMaxVersions()) { out.setMaxVersions(in.getMaxVersions()); } if (in.isSetColumns()) { for (TColumn column : in.getColumns()) { if (column.isSetQualifier()) { out.addColumn(column.getFamily(), column.getQualifier()); } else { out.addFamily(column.getFamily()); } } } TTimeRange timeRange = in.getTimeRange(); if (timeRange != null && timeRange.isSetMinStamp() && timeRange.isSetMaxStamp()) { out.setTimeRange(timeRange.getMinStamp(), timeRange.getMaxStamp()); } if (in.isSetBatchSize()) { out.setBatch(in.getBatchSize()); } if (in.isSetFilterString()) { ParseFilter parseFilter = new ParseFilter(); out.setFilter(parseFilter.parseFilterString(in.getFilterString())); } if (in.isSetAttributes()) { addAttributes(out,in.getAttributes()); } if (in.isSetAuthorizations()) { out.setAuthorizations(new Authorizations(in.getAuthorizations().getLabels())); } if (in.isSetReversed()) { out.setReversed(in.isReversed()); } if (in.isSetCacheBlocks()) { out.setCacheBlocks(in.isCacheBlocks()); } if (in.isSetColFamTimeRangeMap()) { Map<ByteBuffer, TTimeRange> colFamTimeRangeMap = in.getColFamTimeRangeMap(); if (MapUtils.isNotEmpty(colFamTimeRangeMap)) { for (Map.Entry<ByteBuffer, TTimeRange> entry : colFamTimeRangeMap.entrySet()) { out.setColumnFamilyTimeRange(Bytes.toBytes(entry.getKey()), entry.getValue().getMinStamp(), entry.getValue().getMaxStamp()); } } } if (in.isSetReadType()) { out.setReadType(readTypeFromThrift(in.getReadType())); } if (in.isSetLimit()) { out.setLimit(in.getLimit()); } return out; } public static Increment incrementFromThrift(TIncrement in) throws IOException { Increment out = new Increment(in.getRow()); for (TColumnIncrement column : in.getColumns()) { out.addColumn(column.getFamily(), column.getQualifier(), column.getAmount()); } if (in.isSetAttributes()) { addAttributes(out,in.getAttributes()); } if (in.isSetDurability()) { out.setDurability(durabilityFromThrift(in.getDurability())); } if(in.getCellVisibility() != null) { out.setCellVisibility(new CellVisibility(in.getCellVisibility().getExpression())); } return out; } public static Append appendFromThrift(TAppend append) throws IOException { Append out = new Append(append.getRow()); for (TColumnValue column : append.getColumns()) { out.addColumn(column.getFamily(), column.getQualifier(), column.getValue()); } if (append.isSetAttributes()) { addAttributes(out, append.getAttributes()); } if (append.isSetDurability()) { out.setDurability(durabilityFromThrift(append.getDurability())); } if(append.getCellVisibility() != null) { out.setCellVisibility(new CellVisibility(append.getCellVisibility().getExpression())); } return out; } public static THRegionLocation regionLocationFromHBase(HRegionLocation hrl) { HRegionInfo hri = hrl.getRegionInfo(); ServerName serverName = hrl.getServerName(); THRegionInfo thRegionInfo = new THRegionInfo(); THRegionLocation thRegionLocation = new THRegionLocation(); TServerName tServerName = new TServerName(); tServerName.setHostName(serverName.getHostname()); tServerName.setPort(serverName.getPort()); tServerName.setStartCode(serverName.getStartcode()); thRegionInfo.setTableName(hri.getTable().getName()); thRegionInfo.setEndKey(hri.getEndKey()); thRegionInfo.setStartKey(hri.getStartKey()); thRegionInfo.setOffline(hri.isOffline()); thRegionInfo.setSplit(hri.isSplit()); thRegionInfo.setReplicaId(hri.getReplicaId()); thRegionLocation.setRegionInfo(thRegionInfo); thRegionLocation.setServerName(tServerName); return thRegionLocation; } public static List<THRegionLocation> regionLocationsFromHBase(List<HRegionLocation> locations) { List<THRegionLocation> tlocations = new ArrayList<>(locations.size()); for (HRegionLocation hrl:locations) { tlocations.add(regionLocationFromHBase(hrl)); } return tlocations; } /** * Adds all the attributes into the Operation object */ private static void addAttributes(OperationWithAttributes op, Map<ByteBuffer, ByteBuffer> attributes) { if (attributes == null || attributes.isEmpty()) { return; } for (Map.Entry<ByteBuffer, ByteBuffer> entry : attributes.entrySet()) { String name = Bytes.toStringBinary(getBytes(entry.getKey())); byte[] value = getBytes(entry.getValue()); op.setAttribute(name, value); } } private static Durability durabilityFromThrift(TDurability tDurability) { switch (tDurability.getValue()) { case 1: return Durability.SKIP_WAL; case 2: return Durability.ASYNC_WAL; case 3: return Durability.SYNC_WAL; case 4: return Durability.FSYNC_WAL; default: return null; } } public static CompareOp compareOpFromThrift(TCompareOp tCompareOp) { switch (tCompareOp.getValue()) { case 0: return CompareOp.LESS; case 1: return CompareOp.LESS_OR_EQUAL; case 2: return CompareOp.EQUAL; case 3: return CompareOp.NOT_EQUAL; case 4: return CompareOp.GREATER_OR_EQUAL; case 5: return CompareOp.GREATER; case 6: return CompareOp.NO_OP; default: return null; } } private static ReadType readTypeFromThrift(TReadType tReadType) { switch (tReadType.getValue()) { case 1: return ReadType.DEFAULT; case 2: return ReadType.STREAM; case 3: return ReadType.PREAD; default: return null; } } }
/** */ package es.um.nosql.s13e.entitydifferentiation.DecisionTree; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.EReference; /** * <!-- begin-user-doc --> * The <b>Package</b> for the model. * It contains accessors for the meta objects to represent * <ul> * <li>each class,</li> * <li>each feature of each class,</li> * <li>each operation of each class,</li> * <li>each enum,</li> * <li>and each data type</li> * </ul> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeFactory * @model kind="package" * @generated */ public interface DecisionTreePackage extends EPackage { /** * The package name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ String eNAME = "DecisionTree"; /** * The package namespace URI. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ String eNS_URI = "http://www.modelum.es/DecisionTree"; /** * The package namespace name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ String eNS_PREFIX = "DecisionTree"; /** * The singleton instance of the package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ DecisionTreePackage eINSTANCE = es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl.init(); /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeNodeImpl <em>Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTreeNode() * @generated */ int DECISION_TREE_NODE = 0; /** * The feature id for the '<em><b>Yes Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_NODE__YES_BRANCH = 0; /** * The feature id for the '<em><b>No Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_NODE__NO_BRANCH = 1; /** * The number of structural features of the '<em>Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_NODE_FEATURE_COUNT = 2; /** * The number of operations of the '<em>Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_NODE_OPERATION_COUNT = 0; /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.LeafNodeImpl <em>Leaf Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.LeafNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getLeafNode() * @generated */ int LEAF_NODE = 1; /** * The feature id for the '<em><b>Yes Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int LEAF_NODE__YES_BRANCH = DECISION_TREE_NODE__YES_BRANCH; /** * The feature id for the '<em><b>No Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int LEAF_NODE__NO_BRANCH = DECISION_TREE_NODE__NO_BRANCH; /** * The feature id for the '<em><b>Identified Variation</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int LEAF_NODE__IDENTIFIED_VARIATION = DECISION_TREE_NODE_FEATURE_COUNT + 0; /** * The number of structural features of the '<em>Leaf Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int LEAF_NODE_FEATURE_COUNT = DECISION_TREE_NODE_FEATURE_COUNT + 1; /** * The number of operations of the '<em>Leaf Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int LEAF_NODE_OPERATION_COUNT = DECISION_TREE_NODE_OPERATION_COUNT + 0; /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.IntermediateNodeImpl <em>Intermediate Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.IntermediateNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getIntermediateNode() * @generated */ int INTERMEDIATE_NODE = 2; /** * The feature id for the '<em><b>Yes Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int INTERMEDIATE_NODE__YES_BRANCH = DECISION_TREE_NODE__YES_BRANCH; /** * The feature id for the '<em><b>No Branch</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int INTERMEDIATE_NODE__NO_BRANCH = DECISION_TREE_NODE__NO_BRANCH; /** * The feature id for the '<em><b>Checked Property</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int INTERMEDIATE_NODE__CHECKED_PROPERTY = DECISION_TREE_NODE_FEATURE_COUNT + 0; /** * The number of structural features of the '<em>Intermediate Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int INTERMEDIATE_NODE_FEATURE_COUNT = DECISION_TREE_NODE_FEATURE_COUNT + 1; /** * The number of operations of the '<em>Intermediate Node</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int INTERMEDIATE_NODE_OPERATION_COUNT = DECISION_TREE_NODE_OPERATION_COUNT + 0; /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeForEntityImpl <em>For Entity</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeForEntityImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTreeForEntity() * @generated */ int DECISION_TREE_FOR_ENTITY = 3; /** * The feature id for the '<em><b>Root</b></em>' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_FOR_ENTITY__ROOT = 0; /** * The feature id for the '<em><b>Entity</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_FOR_ENTITY__ENTITY = 1; /** * The number of structural features of the '<em>For Entity</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_FOR_ENTITY_FEATURE_COUNT = 2; /** * The number of operations of the '<em>For Entity</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREE_FOR_ENTITY_OPERATION_COUNT = 0; /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreesImpl <em>Decision Trees</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreesImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTrees() * @generated */ int DECISION_TREES = 4; /** * The feature id for the '<em><b>Trees</b></em>' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREES__TREES = 0; /** * The feature id for the '<em><b>Name</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREES__NAME = 1; /** * The number of structural features of the '<em>Decision Trees</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREES_FEATURE_COUNT = 2; /** * The number of operations of the '<em>Decision Trees</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int DECISION_TREES_OPERATION_COUNT = 0; /** * The meta object id for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.PropertySpec2Impl <em>Property Spec2</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.PropertySpec2Impl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getPropertySpec2() * @generated */ int PROPERTY_SPEC2 = 5; /** * The feature id for the '<em><b>Needs Type Check</b></em>' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int PROPERTY_SPEC2__NEEDS_TYPE_CHECK = 0; /** * The feature id for the '<em><b>Property</b></em>' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int PROPERTY_SPEC2__PROPERTY = 1; /** * The number of structural features of the '<em>Property Spec2</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int PROPERTY_SPEC2_FEATURE_COUNT = 2; /** * The number of operations of the '<em>Property Spec2</em>' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated * @ordered */ int PROPERTY_SPEC2_OPERATION_COUNT = 0; /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode <em>Node</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Node</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode * @generated */ EClass getDecisionTreeNode(); /** * Returns the meta object for the containment reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode#getYesBranch <em>Yes Branch</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Yes Branch</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode#getYesBranch() * @see #getDecisionTreeNode() * @generated */ EReference getDecisionTreeNode_YesBranch(); /** * Returns the meta object for the containment reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode#getNoBranch <em>No Branch</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>No Branch</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeNode#getNoBranch() * @see #getDecisionTreeNode() * @generated */ EReference getDecisionTreeNode_NoBranch(); /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.LeafNode <em>Leaf Node</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Leaf Node</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.LeafNode * @generated */ EClass getLeafNode(); /** * Returns the meta object for the reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.LeafNode#getIdentifiedVariation <em>Identified Variation</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the reference '<em>Identified Variation</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.LeafNode#getIdentifiedVariation() * @see #getLeafNode() * @generated */ EReference getLeafNode_IdentifiedVariation(); /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.IntermediateNode <em>Intermediate Node</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Intermediate Node</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.IntermediateNode * @generated */ EClass getIntermediateNode(); /** * Returns the meta object for the containment reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.IntermediateNode#getCheckedProperty <em>Checked Property</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Checked Property</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.IntermediateNode#getCheckedProperty() * @see #getIntermediateNode() * @generated */ EReference getIntermediateNode_CheckedProperty(); /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity <em>For Entity</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>For Entity</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity * @generated */ EClass getDecisionTreeForEntity(); /** * Returns the meta object for the containment reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity#getRoot <em>Root</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference '<em>Root</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity#getRoot() * @see #getDecisionTreeForEntity() * @generated */ EReference getDecisionTreeForEntity_Root(); /** * Returns the meta object for the reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity#getEntity <em>Entity</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the reference '<em>Entity</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTreeForEntity#getEntity() * @see #getDecisionTreeForEntity() * @generated */ EReference getDecisionTreeForEntity_Entity(); /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees <em>Decision Trees</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Decision Trees</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees * @generated */ EClass getDecisionTrees(); /** * Returns the meta object for the containment reference list '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees#getTrees <em>Trees</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the containment reference list '<em>Trees</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees#getTrees() * @see #getDecisionTrees() * @generated */ EReference getDecisionTrees_Trees(); /** * Returns the meta object for the attribute '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees#getName <em>Name</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Name</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.DecisionTrees#getName() * @see #getDecisionTrees() * @generated */ EAttribute getDecisionTrees_Name(); /** * Returns the meta object for class '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2 <em>Property Spec2</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for class '<em>Property Spec2</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2 * @generated */ EClass getPropertySpec2(); /** * Returns the meta object for the attribute '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2#isNeedsTypeCheck <em>Needs Type Check</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Needs Type Check</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2#isNeedsTypeCheck() * @see #getPropertySpec2() * @generated */ EAttribute getPropertySpec2_NeedsTypeCheck(); /** * Returns the meta object for the reference '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2#getProperty <em>Property</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the reference '<em>Property</em>'. * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.PropertySpec2#getProperty() * @see #getPropertySpec2() * @generated */ EReference getPropertySpec2_Property(); /** * Returns the factory that creates the instances of the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the factory that creates the instances of the model. * @generated */ DecisionTreeFactory getDecisionTreeFactory(); /** * <!-- begin-user-doc --> * Defines literals for the meta objects that represent * <ul> * <li>each class,</li> * <li>each feature of each class,</li> * <li>each operation of each class,</li> * <li>each enum,</li> * <li>and each data type</li> * </ul> * <!-- end-user-doc --> * @generated */ interface Literals { /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeNodeImpl <em>Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTreeNode() * @generated */ EClass DECISION_TREE_NODE = eINSTANCE.getDecisionTreeNode(); /** * The meta object literal for the '<em><b>Yes Branch</b></em>' containment reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference DECISION_TREE_NODE__YES_BRANCH = eINSTANCE.getDecisionTreeNode_YesBranch(); /** * The meta object literal for the '<em><b>No Branch</b></em>' containment reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference DECISION_TREE_NODE__NO_BRANCH = eINSTANCE.getDecisionTreeNode_NoBranch(); /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.LeafNodeImpl <em>Leaf Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.LeafNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getLeafNode() * @generated */ EClass LEAF_NODE = eINSTANCE.getLeafNode(); /** * The meta object literal for the '<em><b>Identified Variation</b></em>' reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference LEAF_NODE__IDENTIFIED_VARIATION = eINSTANCE.getLeafNode_IdentifiedVariation(); /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.IntermediateNodeImpl <em>Intermediate Node</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.IntermediateNodeImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getIntermediateNode() * @generated */ EClass INTERMEDIATE_NODE = eINSTANCE.getIntermediateNode(); /** * The meta object literal for the '<em><b>Checked Property</b></em>' containment reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference INTERMEDIATE_NODE__CHECKED_PROPERTY = eINSTANCE.getIntermediateNode_CheckedProperty(); /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeForEntityImpl <em>For Entity</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreeForEntityImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTreeForEntity() * @generated */ EClass DECISION_TREE_FOR_ENTITY = eINSTANCE.getDecisionTreeForEntity(); /** * The meta object literal for the '<em><b>Root</b></em>' containment reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference DECISION_TREE_FOR_ENTITY__ROOT = eINSTANCE.getDecisionTreeForEntity_Root(); /** * The meta object literal for the '<em><b>Entity</b></em>' reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference DECISION_TREE_FOR_ENTITY__ENTITY = eINSTANCE.getDecisionTreeForEntity_Entity(); /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreesImpl <em>Decision Trees</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreesImpl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getDecisionTrees() * @generated */ EClass DECISION_TREES = eINSTANCE.getDecisionTrees(); /** * The meta object literal for the '<em><b>Trees</b></em>' containment reference list feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference DECISION_TREES__TREES = eINSTANCE.getDecisionTrees_Trees(); /** * The meta object literal for the '<em><b>Name</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EAttribute DECISION_TREES__NAME = eINSTANCE.getDecisionTrees_Name(); /** * The meta object literal for the '{@link es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.PropertySpec2Impl <em>Property Spec2</em>}' class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.PropertySpec2Impl * @see es.um.nosql.s13e.entitydifferentiation.DecisionTree.impl.DecisionTreePackageImpl#getPropertySpec2() * @generated */ EClass PROPERTY_SPEC2 = eINSTANCE.getPropertySpec2(); /** * The meta object literal for the '<em><b>Needs Type Check</b></em>' attribute feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EAttribute PROPERTY_SPEC2__NEEDS_TYPE_CHECK = eINSTANCE.getPropertySpec2_NeedsTypeCheck(); /** * The meta object literal for the '<em><b>Property</b></em>' reference feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ EReference PROPERTY_SPEC2__PROPERTY = eINSTANCE.getPropertySpec2_Property(); } } //DecisionTreePackage
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.BinaryIndexFieldData; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.time.ZoneId; import java.time.ZoneOffset; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD; import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD; /** A {@link FieldMapper} for indexing numeric and date ranges, and creating queries */ public class RangeFieldMapper extends FieldMapper { public static final boolean DEFAULT_INCLUDE_UPPER = true; public static final boolean DEFAULT_INCLUDE_LOWER = true; public static class Defaults { public static final Explicit<Boolean> COERCE = new Explicit<>(true, false); public static final DateFormatter DATE_FORMATTER = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; } // this is private since it has a different default static final Setting<Boolean> COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, Setting.Property.IndexScope); private static RangeFieldMapper toType(FieldMapper in) { return (RangeFieldMapper) in; } public static class Builder extends FieldMapper.Builder { private final Parameter<Boolean> index = Parameter.indexParam(m -> toType(m).index, true); private final Parameter<Boolean> hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter<Boolean> store = Parameter.storeParam(m -> toType(m).store, false); private final Parameter<Explicit<Boolean>> coerce; private final Parameter<String> format = Parameter.stringParam("format", false, m -> toType(m).format, Defaults.DATE_FORMATTER.pattern()); private final Parameter<Locale> locale = new Parameter<>("locale", false, () -> Locale.ROOT, (n, c, o) -> LocaleUtils.parse(o.toString()), m -> toType(m).locale); private final Parameter<Map<String, String>> meta = Parameter.metaParam(); private final RangeType type; public Builder(String name, RangeType type, Settings settings) { this(name, type, COERCE_SETTING.get(settings)); } public Builder(String name, RangeType type, boolean coerceByDefault) { super(name); this.type = type; this.coerce = Parameter.explicitBoolParam("coerce", true, m -> toType(m).coerce, coerceByDefault); if (this.type != RangeType.DATE) { format.neverSerialize(); locale.neverSerialize(); } } public void docValues(boolean hasDocValues) { this.hasDocValues.setValue(hasDocValues); } Builder format(String format) { this.format.setValue(format); return this; } @Override protected List<Parameter<?>> getParameters() { return List.of(index, hasDocValues, store, coerce, format, locale, meta); } protected RangeFieldType setupFieldType(ContentPath contentPath) { if (format.isConfigured()) { if (type != RangeType.DATE) { throw new IllegalArgumentException("field [" + name() + "] of type [range]" + " should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type"); } return new RangeFieldType(buildFullName(contentPath), index.getValue(), store.getValue(), hasDocValues.getValue(), DateFormatter.forPattern(format.getValue()).withLocale(locale.getValue()), coerce.getValue().value(), meta.getValue()); } if (type == RangeType.DATE) { return new RangeFieldType(buildFullName(contentPath), index.getValue(), store.getValue(), hasDocValues.getValue(), Defaults.DATE_FORMATTER, coerce.getValue().value(), meta.getValue()); } return new RangeFieldType(buildFullName(contentPath), type, index.getValue(), store.getValue(), hasDocValues.getValue(), coerce.getValue().value(), meta.getValue()); } @Override public RangeFieldMapper build(ContentPath contentPath) { RangeFieldType ft = setupFieldType(contentPath); return new RangeFieldMapper(name, ft, multiFieldsBuilder.build(this, contentPath), copyTo.build(), type, this); } } public static final class RangeFieldType extends MappedFieldType { protected final RangeType rangeType; protected final DateFormatter dateTimeFormatter; protected final DateMathParser dateMathParser; protected final boolean coerce; public RangeFieldType(String name, RangeType type, boolean indexed, boolean stored, boolean hasDocValues, boolean coerce, Map<String, String> meta) { super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); assert type != RangeType.DATE; this.rangeType = Objects.requireNonNull(type); dateTimeFormatter = null; dateMathParser = null; this.coerce = coerce; } public RangeFieldType(String name, RangeType type) { this(name, type, true, false, true, false, Collections.emptyMap()); } public RangeFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues, DateFormatter formatter, boolean coerce, Map<String, String> meta) { super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.rangeType = RangeType.DATE; this.dateTimeFormatter = Objects.requireNonNull(formatter); this.dateMathParser = dateTimeFormatter.toDateMathParser(); this.coerce = coerce; } public RangeFieldType(String name, DateFormatter formatter) { this(name, true, false, true, formatter, false, Collections.emptyMap()); } public RangeType rangeType() { return rangeType; } @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) { failIfNoDocValues(); return new BinaryIndexFieldData.Builder(name(), CoreValuesSourceType.RANGE); } @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { DateFormatter defaultFormatter = dateTimeFormatter(); DateFormatter formatter = format != null ? DateFormatter.forPattern(format).withLocale(defaultFormatter.locale()) : defaultFormatter; return new SourceValueFetcher(name(), context) { @Override @SuppressWarnings("unchecked") protected Object parseSourceValue(Object value) { RangeType rangeType = rangeType(); if ((value instanceof Map) == false) { assert rangeType == RangeType.IP; Tuple<InetAddress, Integer> ipRange = InetAddresses.parseCidr(value.toString()); return InetAddresses.toCidrString(ipRange.v1(), ipRange.v2()); } Map<String, Object> range = (Map<String, Object>) value; Map<String, Object> parsedRange = new HashMap<>(); for (Map.Entry<String, Object> entry : range.entrySet()) { Object parsedValue = rangeType.parseValue(entry.getValue(), coerce, dateMathParser); Object formattedValue = rangeType.formatValue(parsedValue, formatter); parsedRange.put(entry.getKey(), formattedValue); } return parsedRange; } }; } @Override public String typeName() { return rangeType.name; } public DateFormatter dateTimeFormatter() { return dateTimeFormatter; } protected DateMathParser dateMathParser() { return dateMathParser; } @Override public DocValueFormat docValueFormat(String format, ZoneId timeZone) { if (rangeType == RangeType.DATE) { DateFormatter dateTimeFormatter = this.dateTimeFormatter; if (format != null) { dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale()); } if (timeZone == null) { timeZone = ZoneOffset.UTC; } // the resolution here is always set to milliseconds, as aggregations use this formatter mainly and those are always in // milliseconds. The only special case here is docvalue fields, which are handled somewhere else return new DocValueFormat.DateTime(dateTimeFormatter, timeZone, DateFieldMapper.Resolution.MILLISECONDS); } return super.docValueFormat(format, timeZone); } @Override public Query termQuery(Object value, SearchExecutionContext context) { return rangeQuery(value, value, true, true, ShapeRelation.INTERSECTS, null, null, context); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, ZoneId timeZone, DateMathParser parser, SearchExecutionContext context) { failIfNotIndexed(); if (parser == null) { parser = dateMathParser(); } return rangeType.rangeQuery(name(), hasDocValues(), lowerTerm, upperTerm, includeLower, includeUpper, relation, timeZone, parser, context); } } private final RangeType type; private final boolean index; private final boolean hasDocValues; private final boolean store; private final Explicit<Boolean> coerce; private final String format; private final Locale locale; private final boolean coerceByDefault; private RangeFieldMapper( String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo, RangeType type, Builder builder) { super(simpleName, mappedFieldType, multiFields, copyTo); this.type = type; this.index = builder.index.getValue(); this.hasDocValues = builder.hasDocValues.getValue(); this.store = builder.store.getValue(); this.coerce = builder.coerce.getValue(); this.format = builder.format.getValue(); this.locale = builder.locale.getValue(); this.coerceByDefault = builder.coerce.getDefaultValue().value(); } boolean coerce() { return coerce.value(); } @Override public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), type, coerceByDefault).init(this); } @Override public RangeFieldType fieldType() { return (RangeFieldType) super.fieldType(); } @Override protected String contentType() { return fieldType().typeName(); } @Override protected void parseCreateField(ParseContext context) throws IOException { Range range; XContentParser parser = context.parser(); final XContentParser.Token start = parser.currentToken(); if (start == XContentParser.Token.VALUE_NULL) { return; } else if (start == XContentParser.Token.START_OBJECT) { RangeFieldType fieldType = fieldType(); RangeType rangeType = fieldType.rangeType; String fieldName = null; Object from = rangeType.minValue(); Object to = rangeType.maxValue(); boolean includeFrom = DEFAULT_INCLUDE_LOWER; boolean includeTo = DEFAULT_INCLUDE_UPPER; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else { if (fieldName.equals(GT_FIELD.getPreferredName())) { includeFrom = false; if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom); } } else if (fieldName.equals(GTE_FIELD.getPreferredName())) { includeFrom = true; if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom); } } else if (fieldName.equals(LT_FIELD.getPreferredName())) { includeTo = false; if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo); } } else if (fieldName.equals(LTE_FIELD.getPreferredName())) { includeTo = true; if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo); } } else { throw new MapperParsingException("error parsing field [" + name() + "], with unknown parameter [" + fieldName + "]"); } } } range = new Range(rangeType, from, to, includeFrom, includeTo); } else if (fieldType().rangeType == RangeType.IP && start == XContentParser.Token.VALUE_STRING) { range = parseIpRangeFromCidr(parser); } else { throw new MapperParsingException("error parsing field [" + name() + "], expected an object but got " + parser.currentName()); } context.doc().addAll(fieldType().rangeType.createFields(context, name(), range, index, hasDocValues, store)); if (hasDocValues == false && (index || store)) { context.addToFieldNames(fieldType().name()); } } private static Range parseIpRangeFromCidr(final XContentParser parser) throws IOException { final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(parser.text()); // create the lower value by zeroing out the host portion, upper value by filling it with all ones. byte[] lower = cidr.v1().getAddress(); byte[] upper = lower.clone(); for (int i = cidr.v2(); i < 8 * lower.length; i++) { int m = 1 << 7 - (i & 7); lower[i >> 3] &= ~m; upper[i >> 3] |= m; } try { return new Range(RangeType.IP, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper), true, true); } catch (UnknownHostException bogus) { throw new AssertionError(bogus); } } /** Class defining a range */ public static class Range { RangeType type; Object from; Object to; private final boolean includeFrom; private final boolean includeTo; public Range(RangeType type, Object from, Object to, boolean includeFrom, boolean includeTo) { this.type = type; this.from = from; this.to = to; this.includeFrom = includeFrom; this.includeTo = includeTo; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Range range = (Range) o; return includeFrom == range.includeFrom && includeTo == range.includeTo && type == range.type && from.equals(range.from) && to.equals(range.to); } @Override public int hashCode() { return Objects.hash(type, from, to, includeFrom, includeTo); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(includeFrom ? '[' : '('); Object f = includeFrom || from.equals(type.minValue()) ? from : type.nextDown(from); Object t = includeTo || to.equals(type.maxValue()) ? to : type.nextUp(to); sb.append(type == RangeType.IP ? InetAddresses.toAddrString((InetAddress)f) : f.toString()); sb.append(" : "); sb.append(type == RangeType.IP ? InetAddresses.toAddrString((InetAddress)t) : t.toString()); sb.append(includeTo ? ']' : ')'); return sb.toString(); } public Object getFrom() { return from; } public Object getTo() { return to; } } static class BinaryRangesDocValuesField extends CustomDocValuesField { private final Set<Range> ranges; private final RangeType rangeType; BinaryRangesDocValuesField(String name, Range range, RangeType rangeType) { super(name); this.rangeType = rangeType; ranges = new HashSet<>(); add(range); } void add(Range range) { ranges.add(range); } @Override public BytesRef binaryValue() { try { return rangeType.encodeRanges(ranges); } catch (IOException e) { throw new ElasticsearchException("failed to encode ranges", e); } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.fileIndex; import com.intellij.ide.startup.CacheUpdater; import com.intellij.ide.startup.FileContent; import com.intellij.ide.startup.StartupManagerEx; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ContentIterator; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.ex.ProjectRootManagerEx; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiManager; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import java.io.*; import java.util.*; /** * @author nik */ public abstract class AbstractFileIndex<IndexEntry extends FileIndexEntry> implements FileIndex<IndexEntry> { private static final Logger LOG = Logger.getInstance("#com.intellij.util.fileIndex.AbstractFileIndex"); private final Map<String, IndexEntry> myFileUrl2IndexEntry = new HashMap<String, IndexEntry>(); private final ProjectFileIndex myProjectFileIndex; private boolean myFormatChanged; private final Project myProject; private FileIndexCacheUpdater myRootsChangeCacheUpdater; private final StartupManagerEx myStartupManager; private FileIndexRefreshCacheUpdater myRefreshCacheUpdater; private final Object myIndexLock = new Object(); protected AbstractFileIndex(final Project project) { myProject = project; myProjectFileIndex = ProjectRootManager.getInstance(project).getFileIndex(); myStartupManager = StartupManagerEx.getInstanceEx(project); } protected abstract IndexEntry createIndexEntry(DataInputStream input) throws IOException; protected abstract String getLoadingIndicesMessage(); protected abstract String getBuildingIndicesMessage(boolean formatChanged); public abstract boolean belongs(VirtualFile file); public abstract byte getCurrentVersion(); @NonNls public abstract String getCachesDirName(); public abstract void queueEntryUpdate(final VirtualFile file); protected abstract void doUpdateIndexEntry(final VirtualFile file); public ProjectFileIndex getProjectFileIndex() { return myProjectFileIndex; } protected File getCacheLocation(final String dirName) { final String cacheFileName = myProject.getName() + "." + myProject.getLocationHash(); return new File(PathManager.getSystemPath() + File.separator + dirName + File.separator + cacheFileName); } public final void updateIndexEntry(final VirtualFile file) { if (!myStartupManager.startupActivityPassed() || myProjectFileIndex.isIgnored(file)) { return; } doUpdateIndexEntry(file); } public final void removeIndexEntry(final VirtualFile file) { if (myProjectFileIndex.isIgnored(file)) { return; } removeIndexEntry(file.getUrl()); } protected void onEntryAdded(String url, IndexEntry entry) { } protected void onEntryRemoved(String url, IndexEntry entry) { } private void saveCache() { final File cacheFile = getCacheLocation(getCachesDirName()); FileUtil.createParentDirs(cacheFile); DataOutputStream output = null; try { output = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(cacheFile))); output.writeByte(getCurrentVersion()); writeHeader(output); synchronized (myIndexLock) { output.writeInt(myFileUrl2IndexEntry.size()); for (final Map.Entry<String, IndexEntry> entry : myFileUrl2IndexEntry.entrySet()) { output.writeUTF(entry.getKey()); entry.getValue().write(output); } } output.close(); } catch (IOException e) { LOG.debug(e); if (output != null) { try { output.close(); output = null; } catch (IOException e1) { } } cacheFile.delete(); } finally { if (output != null) { try { output.close(); } catch (IOException e1) { } } } } protected void readHeader(DataInputStream input) throws IOException { } protected void writeHeader(final DataOutputStream output) throws IOException { } private boolean loadCache() { final File cacheFile = getCacheLocation(getCachesDirName()); if (!cacheFile.exists()) return false; clearMaps(); DataInputStream input = null; final ProgressIndicator indicator = getProgressIndicator(); try { input = new DataInputStream(new BufferedInputStream(new FileInputStream(cacheFile))); int version = input.readByte(); if (version != getCurrentVersion()) { myFormatChanged = true; return false; } if (indicator != null) { indicator.pushState(); indicator.setText(getLoadingIndicesMessage()); } readHeader(input); int size = input.readInt(); for (int i = 0; i < size; i++) { if (indicator != null) { indicator.setFraction(((double)i) / size); } final String url = input.readUTF(); putIndexEntry(url, createIndexEntry(input)); } if (indicator != null) { indicator.popState(); } input.close(); return true; } catch (IOException e) { LOG.debug(e); } finally { if (input != null) { try { input.close(); } catch (IOException e1) { } } } return false; } public final void putIndexEntry(final String url, final IndexEntry entry) { synchronized (myIndexLock) { myFileUrl2IndexEntry.put(url, entry); } onEntryAdded(url, entry); } public final IndexEntry getIndexEntry(final String url) { synchronized (myIndexLock) { return myFileUrl2IndexEntry.get(url); } } @Nullable public final IndexEntry removeIndexEntry(final String url) { final IndexEntry entry; synchronized (myIndexLock) { entry = myFileUrl2IndexEntry.remove(url); } if (entry != null) { onEntryRemoved(url, entry); } return entry; } protected void clearMaps() { synchronized (myIndexLock) { myFileUrl2IndexEntry.clear(); } } public void initialize() { final Runnable loadCacheRunnable = new Runnable() { public void run() { myRootsChangeCacheUpdater = new FileIndexCacheUpdater(); final ProjectRootManagerEx rootManager = ProjectRootManagerEx.getInstanceEx(myProject); rootManager.registerChangeUpdater(myRootsChangeCacheUpdater); loadCache(); buildIndex(); if (!ApplicationManager.getApplication().isUnitTestMode()) { myRefreshCacheUpdater = new FileIndexRefreshCacheUpdater(AbstractFileIndex.this); } } }; if (ApplicationManager.getApplication().isUnitTestMode()) { myRefreshCacheUpdater = new FileIndexRefreshCacheUpdater(this); loadCacheRunnable.run(); } else { StartupManager.getInstance(myProject).registerStartupActivity(loadCacheRunnable); } } @Nullable private static ProgressIndicator getProgressIndicator() { return ProgressManager.getInstance().getProgressIndicator(); } public void dispose() { if (myRefreshCacheUpdater != null) { Disposer.dispose(myRefreshCacheUpdater); } if (myRootsChangeCacheUpdater != null) { ProjectRootManagerEx.getInstanceEx(myProject).unregisterChangeUpdater(myRootsChangeCacheUpdater); } if (ApplicationManager.getApplication().isUnitTestMode()) { getCacheLocation(getCachesDirName()).delete(); } else { saveCache(); } clearMaps(); } private void buildIndex() { final ProgressIndicator indicator = getProgressIndicator(); if (indicator != null) { indicator.pushState(); indicator.setIndeterminate(false); indicator.setText(getBuildingIndicesMessage(myFormatChanged)); myFormatChanged = false; } PsiManager.getInstance(myProject).startBatchFilesProcessingMode(); try { final VirtualFile[] files = queryNeededFiles(true, getFileTypesToRefresh()); for (int i = 0; i < files.length; i++) { if (indicator != null) { indicator.setFraction(((double)i)/ files.length); } doUpdateIndexEntry(files[i]); } } finally { PsiManager.getInstance(myProject).finishBatchFilesProcessingMode(); } if (indicator != null) { indicator.popState(); } } @Nullable protected Set<FileType> getFileTypesToRefresh() { return null; } protected void setFormatChanged() { myFormatChanged = true; } private VirtualFile[] queryNeededFiles(final boolean includeChangedFiles, @Nullable Set<FileType> fileTypesToRefresh) { final List<VirtualFile> files = new ArrayList<VirtualFile>(); myProjectFileIndex.iterateContent(new ContentIterator() { public boolean processFile(VirtualFile fileOrDir) { if (belongs(fileOrDir)) { files.add(fileOrDir); } return true; } }); List<VirtualFile> toUpdate = new ArrayList<VirtualFile>(); Set<String> toRemove; synchronized (myIndexLock) { toRemove = new THashSet<String>(myFileUrl2IndexEntry.keySet()); final int size = files.size(); for (int i = 0; i < size; i++) { final VirtualFile file = files.get(i); final String url = file.getUrl(); final IndexEntry entry = myFileUrl2IndexEntry.get(url); toRemove.remove(url); if (entry == null || includeChangedFiles && entry.getTimeStamp() != file.getTimeStamp() || fileTypesToRefresh != null && fileTypesToRefresh.contains(file.getFileType())) { toUpdate.add(file); } } } for (String url : toRemove) { removeIndexEntry(url); } return toUpdate.toArray(new VirtualFile[toUpdate.size()]); } private class FileIndexCacheUpdater implements CacheUpdater { public VirtualFile[] queryNeededFiles() { return AbstractFileIndex.this.queryNeededFiles(false, null); } public void processFile(FileContent fileContent) { updateIndexEntry(fileContent.getVirtualFile()); } public void updatingDone() { } public void canceled() { } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.injection.bean; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.exception.KettleException; /** * Engine for get/set metadata injection properties from bean. */ public class BeanInjector { private final BeanInjectionInfo info; public BeanInjector( BeanInjectionInfo info ) { this.info = info; } public Object getProperty( Object root, String propName ) throws Exception { List<Integer> extractedIndexes = new ArrayList<>(); BeanInjectionInfo.Property prop = info.getProperties().get( propName ); if ( prop == null ) { throw new RuntimeException( "Property not found" ); } Object obj = root; for ( int i = 1, arrIndex = 0; i < prop.path.size(); i++ ) { BeanLevelInfo s = prop.path.get( i ); obj = s.field.get( obj ); if ( obj == null ) { return null; // some value in path is null - return empty } switch ( s.dim ) { case ARRAY: int indexArray = extractedIndexes.get( arrIndex++ ); if ( Array.getLength( obj ) <= indexArray ) { return null; } obj = Array.get( obj, indexArray ); if ( obj == null ) { return null; // element is empty } break; case LIST: int indexList = extractedIndexes.get( arrIndex++ ); List<?> list = (List<?>) obj; if ( list.size() <= indexList ) { return null; } obj = list.get( indexList ); if ( obj == null ) { return null; // element is empty } break; case NONE: break; } } return obj; } public boolean hasProperty( Object root, String propName ) { BeanInjectionInfo.Property prop = info.getProperties().get( propName ); return prop != null; } public void setProperty( Object root, String propName, List<RowMetaAndData> data, String dataName ) throws KettleException { BeanInjectionInfo.Property prop = info.getProperties().get( propName ); if ( prop == null ) { throw new KettleException( "Property '" + propName + "' not found for injection to " + root.getClass() ); } if ( prop.pathArraysCount == 0 ) { // no arrays in path try { setProperty( root, prop, 0, data.get( 0 ), dataName ); } catch ( Exception ex ) { throw new KettleException( "Error inject property '" + propName + "' into " + root.getClass(), ex ); } } else if ( prop.pathArraysCount == 1 ) { // one array in path try { for ( int i = 0; i < data.size(); i++ ) { setProperty( root, prop, i, data.get( i ), dataName ); } } catch ( Exception ex ) { throw new KettleException( "Error inject property '" + propName + "' into " + root.getClass(), ex ); } } else { if ( prop.pathArraysCount > 1 ) { throw new KettleException( "Property '" + propName + "' has more than one array in path for injection to " + root.getClass() ); } } } private void setProperty( Object root, BeanInjectionInfo.Property prop, int index, RowMetaAndData data, String dataName ) throws Exception { Object obj = root; for ( int i = 1; i < prop.path.size(); i++ ) { BeanLevelInfo s = prop.path.get( i ); if ( i < prop.path.size() - 1 ) { // get path Object next; switch ( s.dim ) { case ARRAY: // array Object existArray = extendArray( s, obj, index + 1 ); next = Array.get( existArray, index ); // get specific element if ( next == null ) { next = createObject( s.leafClass, root ); Array.set( existArray, index, next ); } obj = next; break; case LIST: // list List<Object> existList = extendList( s, obj, index + 1 ); next = existList.get( index ); // get specific element if ( next == null ) { next = createObject( s.leafClass, root ); existList.set( index, next ); } obj = next; break; case NONE: // plain field if ( s.field != null ) { next = s.field.get( obj ); if ( next == null ) { next = createObject( s.leafClass, root ); s.field.set( obj, next ); } obj = next; } else if ( s.getter != null ) { next = s.getter.invoke( obj ); if ( next == null ) { if ( s.setter == null ) { throw new KettleException( "No setter defined for " + root.getClass() ); } next = s.leafClass.newInstance(); s.setter.invoke( obj, next ); } obj = next; } else { throw new KettleException( "No field or getter defined for " + root.getClass() ); } break; } } else { // set to latest field if ( !s.convertEmpty && data.isEmptyValue( dataName ) ) { return; } if ( s.setter != null ) { Object value = data.getAsJavaType( dataName, s.leafClass, s.converter ); // usual setter s.setter.invoke( obj, value ); } else if ( s.field != null ) { Object value; switch ( s.dim ) { case ARRAY: Object existArray = extendArray( s, obj, index + 1 ); value = data.getAsJavaType( dataName, s.leafClass, s.converter ); Array.set( existArray, index, value ); break; case LIST: List<Object> existList = extendList( s, obj, index + 1 ); value = data.getAsJavaType( dataName, s.leafClass, s.converter ); existList.set( index, value ); break; case NONE: value = data.getAsJavaType( dataName, s.leafClass, s.converter ); s.field.set( obj, value ); break; } } else { throw new KettleException( "No field or setter defined for " + root.getClass() ); } } } } private Object createObject( Class<?> clazz, Object root ) throws KettleException { try { // Object can be inner of metadata class. In this case constructor will require parameter for ( Constructor<?> c : clazz.getConstructors() ) { if ( c.getParameterTypes().length == 0 ) { return clazz.newInstance(); } else if ( c.getParameterTypes().length == 1 && c.getParameterTypes()[0].isAssignableFrom( info.clazz ) ) { return c.newInstance( root ); } } } catch ( Throwable ex ) { throw new KettleException( "Can't create object " + clazz, ex ); } throw new KettleException( "Constructor not found for " + clazz ); } private Object extendArray( BeanLevelInfo s, Object obj, int newSize ) throws Exception { Object existArray = s.field.get( obj ); if ( existArray == null ) { existArray = Array.newInstance( s.leafClass, newSize ); s.field.set( obj, existArray ); } int existSize = Array.getLength( existArray ); if ( existSize < newSize ) { Object newSized = Array.newInstance( s.leafClass, newSize ); System.arraycopy( existArray, 0, newSized, 0, existSize ); existArray = newSized; s.field.set( obj, existArray ); } return existArray; } private List<Object> extendList( BeanLevelInfo s, Object obj, int newSize ) throws Exception { @SuppressWarnings( "unchecked" ) List<Object> existList = (List<Object>) s.field.get( obj ); if ( existList == null ) { existList = new ArrayList<>(); s.field.set( obj, existList ); } while ( existList.size() < newSize ) { existList.add( null ); } return existList; } }
package com.cloud.vm.dao; import com.cloud.info.ConsoleProxyLoadInfo; import com.cloud.utils.Pair; import com.cloud.utils.db.Attribute; import com.cloud.utils.db.GenericDaoBase; import com.cloud.utils.db.SearchBuilder; import com.cloud.utils.db.SearchCriteria; import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.db.UpdateBuilder; import com.cloud.vm.ConsoleProxyVO; import com.cloud.vm.VirtualMachine.State; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; @Component public class ConsoleProxyDaoImpl extends GenericDaoBase<ConsoleProxyVO, Long> implements ConsoleProxyDao { private static final Logger s_logger = LoggerFactory.getLogger(ConsoleProxyDaoImpl.class); // // query SQL for returnning console proxy assignment info as following // proxy vm id, count of assignment // private static final String PROXY_ASSIGNMENT_MATRIX = "SELECT c.id, count(runningVm.id) AS count " + " FROM console_proxy AS c LEFT JOIN vm_instance AS i ON c.id=i.id LEFT JOIN" + " (SELECT v.id AS id, v.proxy_id AS proxy_id FROM vm_instance AS v WHERE " + " (v.state='Running' OR v.state='Creating' OR v.state='Starting' OR v.state='Migrating')) " + " AS runningVm ON c.id = runningVm.proxy_id WHERE i.state='Running' " + " GROUP BY c.id"; // // query SQL for returnning running VM count at data center basis // private static final String DATACENTER_VM_MATRIX = "SELECT d.id, d.name, count(v.id) AS count" + " FROM data_center AS d LEFT JOIN vm_instance AS v ON v.data_center_id=d.id " + " WHERE (v.state='Creating' OR v.state='Starting' OR v.state='Running' OR v.state='Migrating')" + " GROUP BY d.id, d.name"; private static final String DATACENTER_ACTIVE_SESSION_MATRIX = "SELECT d.id, d.name, sum(c.active_session) AS count" + " FROM data_center AS d LEFT JOIN vm_instance AS v ON v.data_center_id=d.id " + " LEFT JOIN console_proxy AS c ON v.id=c.id " + " WHERE v.type='ConsoleProxy' AND (v.state='Creating' OR v.state='Starting' OR v.state='Running' OR v.state='Migrating')" + " GROUP BY d.id, d.name"; // // query SQL for returnning running console proxy count at data center basis // private static final String DATACENTER_PROXY_MATRIX = "SELECT d.id, d.name, count(dcid) as count" + " FROM data_center as d" + " LEFT JOIN (" + " SELECT v.data_center_id as dcid, c.active_session as active_session from vm_instance as v" + " INNER JOIN console_proxy as c ON v.id=c.id AND v.type='ConsoleProxy' AND (v.state='Creating' OR v.state='Starting' OR v.state='Running' OR v" + ".state='Migrating')" + " ) as t ON d.id = t.dcid" + " GROUP BY d.id, d.name"; private static final String GET_PROXY_LOAD = "SELECT count(*) AS count" + " FROM vm_instance AS v " + " WHERE v.proxy_id=? AND (v.state='Running' OR v.state='Starting' OR v.state='Creating' OR v.state='Migrating')"; private static final String GET_PROXY_ACTIVE_LOAD = "SELECT active_session AS count" + " FROM console_proxy" + " WHERE id=?"; private static final String STORAGE_POOL_HOST_INFO = "SELECT p.data_center_id, count(ph.host_id) " + " FROM storage_pool p, storage_pool_host_ref ph " + " WHERE p.id = ph.pool_id AND p.data_center_id = ? " + " GROUP by p.data_center_id"; private static final String SHARED_STORAGE_POOL_HOST_INFO = "SELECT p.data_center_id, count(ph.host_id) " + " FROM storage_pool p, storage_pool_host_ref ph " + " WHERE p.pool_type <> 'LVM' AND p.id = ph.pool_id AND p.data_center_id = ? " + " GROUP by p.data_center_id"; protected final Attribute _updateTimeAttr; protected SearchBuilder<ConsoleProxyVO> DataCenterStatusSearch; protected SearchBuilder<ConsoleProxyVO> StateSearch; protected SearchBuilder<ConsoleProxyVO> HostSearch; protected SearchBuilder<ConsoleProxyVO> LastHostSearch; protected SearchBuilder<ConsoleProxyVO> HostUpSearch; protected SearchBuilder<ConsoleProxyVO> StateChangeSearch; public ConsoleProxyDaoImpl() { DataCenterStatusSearch = createSearchBuilder(); DataCenterStatusSearch.and("dc", DataCenterStatusSearch.entity().getDataCenterId(), SearchCriteria.Op.EQ); DataCenterStatusSearch.and("states", DataCenterStatusSearch.entity().getState(), SearchCriteria.Op.IN); DataCenterStatusSearch.done(); StateSearch = createSearchBuilder(); StateSearch.and("states", StateSearch.entity().getState(), SearchCriteria.Op.IN); StateSearch.done(); HostSearch = createSearchBuilder(); HostSearch.and("host", HostSearch.entity().getHostId(), SearchCriteria.Op.EQ); HostSearch.done(); LastHostSearch = createSearchBuilder(); LastHostSearch.and("lastHost", LastHostSearch.entity().getLastHostId(), SearchCriteria.Op.EQ); LastHostSearch.and("state", LastHostSearch.entity().getState(), SearchCriteria.Op.EQ); LastHostSearch.done(); HostUpSearch = createSearchBuilder(); HostUpSearch.and("host", HostUpSearch.entity().getHostId(), SearchCriteria.Op.EQ); HostUpSearch.and("states", HostUpSearch.entity().getState(), SearchCriteria.Op.NIN); HostUpSearch.done(); StateChangeSearch = createSearchBuilder(); StateChangeSearch.and("id", StateChangeSearch.entity().getId(), SearchCriteria.Op.EQ); StateChangeSearch.and("states", StateChangeSearch.entity().getState(), SearchCriteria.Op.EQ); StateChangeSearch.and("host", StateChangeSearch.entity().getHostId(), SearchCriteria.Op.EQ); StateChangeSearch.and("update", StateChangeSearch.entity().getUpdated(), SearchCriteria.Op.EQ); StateChangeSearch.done(); _updateTimeAttr = _allAttributes.get("updateTime"); assert _updateTimeAttr != null : "Couldn't get this updateTime attribute"; } @Override public void update(final long id, final int activeSession, final Date updateTime, final byte[] sessionDetails) { final ConsoleProxyVO ub = createForUpdate(); ub.setActiveSession(activeSession); ub.setLastUpdateTime(updateTime); ub.setSessionDetails(sessionDetails); update(id, ub); } @Override public List<ConsoleProxyVO> getProxyListInStates(final long dataCenterId, final State... states) { final SearchCriteria<ConsoleProxyVO> sc = DataCenterStatusSearch.create(); sc.setParameters("states", (Object[]) states); sc.setParameters("dc", dataCenterId); return listBy(sc); } @Override public List<ConsoleProxyVO> getProxyListInStates(final State... states) { final SearchCriteria<ConsoleProxyVO> sc = StateSearch.create(); sc.setParameters("states", (Object[]) states); return listBy(sc); } @Override public List<ConsoleProxyVO> listByHostId(final long hostId) { final SearchCriteria<ConsoleProxyVO> sc = HostSearch.create(); sc.setParameters("host", hostId); return listBy(sc); } @Override public List<ConsoleProxyVO> listByLastHostId(final long hostId) { final SearchCriteria<ConsoleProxyVO> sc = LastHostSearch.create(); sc.setParameters("lastHost", hostId); sc.setParameters("state", State.Stopped); return listBy(sc); } @Override public List<ConsoleProxyVO> listUpByHostId(final long hostId) { final SearchCriteria<ConsoleProxyVO> sc = HostUpSearch.create(); sc.setParameters("host", hostId); sc.setParameters("states", new Object[]{State.Destroyed, State.Stopped, State.Expunging}); return listBy(sc); } @Override public List<ConsoleProxyLoadInfo> getDatacenterProxyLoadMatrix() { return getDatacenterLoadMatrix(DATACENTER_PROXY_MATRIX); } @Override public List<ConsoleProxyLoadInfo> getDatacenterVMLoadMatrix() { return getDatacenterLoadMatrix(DATACENTER_VM_MATRIX); } @Override public List<ConsoleProxyLoadInfo> getDatacenterSessionLoadMatrix() { return getDatacenterLoadMatrix(DATACENTER_ACTIVE_SESSION_MATRIX); } @Override public List<Pair<Long, Integer>> getDatacenterStoragePoolHostInfo(final long dcId, final boolean countAllPoolTypes) { final ArrayList<Pair<Long, Integer>> l = new ArrayList<>(); final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { if (countAllPoolTypes) { pstmt = txn.prepareAutoCloseStatement(STORAGE_POOL_HOST_INFO); } else { pstmt = txn.prepareAutoCloseStatement(SHARED_STORAGE_POOL_HOST_INFO); } pstmt.setLong(1, dcId); final ResultSet rs = pstmt.executeQuery(); while (rs.next()) { l.add(new Pair<>(rs.getLong(1), rs.getInt(2))); } } catch (final SQLException e) { s_logger.debug("Caught SQLException: ", e); } return l; } @Override public List<Pair<Long, Integer>> getProxyLoadMatrix() { final ArrayList<Pair<Long, Integer>> l = new ArrayList<>(); final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(PROXY_ASSIGNMENT_MATRIX); final ResultSet rs = pstmt.executeQuery(); while (rs.next()) { l.add(new Pair<>(rs.getLong(1), rs.getInt(2))); } } catch (final SQLException e) { s_logger.debug("Caught SQLException: ", e); } return l; } @Override public int getProxyStaticLoad(final long proxyVmId) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(GET_PROXY_LOAD); pstmt.setLong(1, proxyVmId); final ResultSet rs = pstmt.executeQuery(); if (rs != null && rs.first()) { return rs.getInt(1); } } catch (final SQLException e) { s_logger.debug("Caught SQLException: ", e); } return 0; } @Override public int getProxyActiveLoad(final long proxyVmId) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(GET_PROXY_ACTIVE_LOAD); pstmt.setLong(1, proxyVmId); final ResultSet rs = pstmt.executeQuery(); if (rs != null && rs.first()) { return rs.getInt(1); } } catch (final SQLException e) { s_logger.debug("Caught SQLException: ", e); } return 0; } @Override public List<Long> getRunningProxyListByMsid(final long msid) { final List<Long> l = new ArrayList<>(); final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement("SELECT c.id FROM console_proxy c, vm_instance v, host h " + "WHERE c.id=v.id AND v.state='Running' AND v.host_id=h.id AND h.mgmt_server_id=?"); pstmt.setLong(1, msid); final ResultSet rs = pstmt.executeQuery(); while (rs.next()) { l.add(rs.getLong(1)); } } catch (final SQLException e) { s_logger.debug("Caught SQLException: ", e); } return l; } private List<ConsoleProxyLoadInfo> getDatacenterLoadMatrix(final String sql) { final ArrayList<ConsoleProxyLoadInfo> l = new ArrayList<>(); final TransactionLegacy txn = TransactionLegacy.currentTxn(); PreparedStatement pstmt = null; try { pstmt = txn.prepareAutoCloseStatement(sql); final ResultSet rs = pstmt.executeQuery(); while (rs.next()) { final ConsoleProxyLoadInfo info = new ConsoleProxyLoadInfo(); info.setId(rs.getLong(1)); info.setName(rs.getString(2)); info.setCount(rs.getInt(3)); l.add(info); } } catch (final SQLException e) { s_logger.debug("Exception: ", e); } return l; } @Override public boolean remove(final Long id) { final TransactionLegacy txn = TransactionLegacy.currentTxn(); txn.start(); final ConsoleProxyVO proxy = createForUpdate(); proxy.setPublicIpAddress(null); proxy.setPrivateIpAddress(null); final UpdateBuilder ub = getUpdateBuilder(proxy); ub.set(proxy, "state", State.Destroyed); ub.set(proxy, "privateIpAddress", null); update(id, ub, proxy); final boolean result = super.remove(id); txn.commit(); return result; } }
package cz.habarta.typescript.generator.parser; import cz.habarta.typescript.generator.ExcludingTypeProcessor; import cz.habarta.typescript.generator.OptionalProperties; import cz.habarta.typescript.generator.Settings; import cz.habarta.typescript.generator.TypeProcessor; import cz.habarta.typescript.generator.util.Pair; import cz.habarta.typescript.generator.util.PropertyMember; import cz.habarta.typescript.generator.util.Utils; import jakarta.json.bind.annotation.JsonbCreator; import jakarta.json.bind.annotation.JsonbProperty; import jakarta.json.bind.annotation.JsonbTransient; import jakarta.json.bind.annotation.JsonbVisibility; import jakarta.json.bind.config.PropertyNamingStrategy; import jakarta.json.bind.config.PropertyVisibilityStrategy; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Parameter; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.OptionalDouble; import java.util.OptionalInt; import java.util.OptionalLong; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; // simplified+dependency free version of apache johnzon JsonbAccessMode public class JsonbParser extends ModelParser { private final Class<? extends Annotation> johnzonAny; public static class Factory extends ModelParser.Factory { @Override public TypeProcessor getSpecificTypeProcessor() { return new ExcludingTypeProcessor(Collections.emptyList()); } @Override public JsonbParser create(Settings settings, TypeProcessor commonTypeProcessor, List<RestApplicationParser> restApplicationParsers) { return new JsonbParser(settings, commonTypeProcessor, restApplicationParsers); } } public JsonbParser(Settings settings, TypeProcessor commonTypeProcessor) { this(settings, commonTypeProcessor, Collections.emptyList()); } public JsonbParser(Settings settings, TypeProcessor commonTypeProcessor, List<RestApplicationParser> restApplicationParsers) { super(settings, commonTypeProcessor, restApplicationParsers); johnzonAny = loadJohnzonAnyClass(); } @SuppressWarnings("unchecked") private Class<? extends Annotation> loadJohnzonAnyClass() { try { return (Class<? extends Annotation>) settings.classLoader .loadClass("org.apache.johnzon.mapper.JohnzonAny"); } catch (ClassNotFoundException e) { return null; } } @Override protected DeclarationModel parseClass(final SourceType<Class<?>> sourceClass) { if (sourceClass.type.isEnum()) { return ModelParser.parseEnum(sourceClass); } else { return parseBean(sourceClass); } } // simplistic impl handling @JsonbProperty and @JsonbTransient on fields private BeanModel parseBean(final SourceType<Class<?>> sourceClass) { final JsonbPropertyExtractor extractor = createExtractor(); final List<PropertyModel> properties = extractor.visit(sourceClass.type); final Type superclass = sourceClass.type.getGenericSuperclass() == Object.class ? null : sourceClass.type.getGenericSuperclass(); if (superclass != null) { addBeanToQueue(new SourceType<>(superclass, sourceClass.type, "<superClass>")); } final List<Type> interfaces = Arrays.asList(sourceClass.type.getGenericInterfaces()); for (Type aInterface : interfaces) { addBeanToQueue(new SourceType<>(aInterface, sourceClass.type, "<interface>")); } return new BeanModel( sourceClass.type, superclass, null, null, null, interfaces, properties, null); } private JsonbPropertyExtractor createExtractor() { return new JsonbPropertyExtractor( johnzonAny, new PropertyNamingStrategyFactory(Optional.ofNullable(settings.jsonbConfiguration).map(c -> c.namingStrategy).orElse("IDENTITY")).create(), new DefaultPropertyVisibilityStrategy(settings.classLoader), new FieldAndMethodAccessMode(johnzonAny)); } private class JsonbPropertyExtractor { private final Class<? extends Annotation> johnzonAny; private final PropertyNamingStrategy naming; private final PropertyVisibilityStrategy visibility; private final BaseAccessMode delegate; private JsonbPropertyExtractor( final Class<? extends Annotation> johnzonAny, final PropertyNamingStrategy propertyNamingStrategy, final PropertyVisibilityStrategy visibilityStrategy, final BaseAccessMode delegate) { this.johnzonAny = johnzonAny; this.naming = propertyNamingStrategy; this.visibility = visibilityStrategy; this.delegate = delegate; } private List<PropertyModel> visit(final Class<?> clazz) { return Stream.of(clazz.getConstructors()) .filter(it -> getJsonbAnnotation(it, JsonbCreator.class) != null) .findFirst() .map(it -> new ArrayList<>(Stream.concat(visitConstructor(it), visitClass(clazz).stream()) .collect(Collectors.toMap(PropertyModel::getName, Function.identity(), (a, b) -> a)) // merge models .values())) .orElseGet(() -> new ArrayList<>(visitClass(clazz))); } private Stream<PropertyModel> visitConstructor(final Constructor<?> constructor) { // JSON-B 1.0 assumes all constructor params are required even if impls can diverge on that due // to user feedbacks so for our libraryDefinition let's assume it is true. // only exception is about optional wrappers which can be optional indeed final List<Type> parameterTypes = settings.getTypeParser().getConstructorParameterTypes(constructor); final List<Pair<Parameter, Type>> parameters = Utils.zip(Arrays.asList(constructor.getParameters()), parameterTypes); return parameters.stream() .map(it -> { final Type type = it.getValue2(); final Parameter parameter = it.getValue1(); final Optional<JsonbProperty> property = Optional.ofNullable( getJsonbAnnotation(parameter, JsonbProperty.class)); final PropertyMember propertyMember = new PropertyMember( parameter, it.getValue2(), parameter.getAnnotatedType(), parameter::getAnnotation); return JsonbParser.this.processTypeAndCreateProperty( property .map(JsonbProperty::value) .filter(p -> !p.isEmpty()) .orElseGet(parameter::getName), type, null, settings.optionalProperties != OptionalProperties.useLibraryDefinition ? isPropertyOptional(propertyMember) : (isOptional(type) || OptionalInt.class == type || OptionalLong.class == type || OptionalDouble.class == type || property.map(JsonbProperty::nillable).orElse(false)), null, constructor.getDeclaringClass(), new ParameterMember(parameter), null, null); }); } private List<PropertyModel> visitClass(final Class<?> clazz) { return delegate.find(clazz).entrySet().stream() .filter(e -> !isTransient(e.getValue(), visibility)) .filter(e -> johnzonAny == null || e.getValue().getAnnotation(johnzonAny) == null) .map(e -> { final DecoratedType decoratedType = e.getValue(); final Member member = findMember(decoratedType); final PropertyMember propertyMember = wrapMember( settings.getTypeParser(), member, /*creatorIndex*/ null, decoratedType::getAnnotation, member.getName(), member.getDeclaringClass()); if (propertyMember == null) { return null; } final JsonbProperty property = decoratedType.getAnnotation(JsonbProperty.class); final String key = property == null || property.value().isEmpty() ? naming.translateName(e.getKey()) : property.value(); return JsonbParser.this.processTypeAndCreateProperty( key, Field.class.isInstance(member) ? settings.getTypeParser().getFieldType(Field.class.cast(member)) : settings.getTypeParser().getMethodReturnType(Method.class.cast(member)), null, settings.optionalProperties == OptionalProperties.useLibraryDefinition || JsonbParser.this.isPropertyOptional(propertyMember), null, clazz, member, null, null); }) .filter(Objects::nonNull) .sorted(Comparator.comparing(PropertyModel::getName)) .collect(Collectors.toList()); } private Member findMember(final DecoratedType value) { if (FieldAndMethodAccessMode.CompositeDecoratedType.class.isInstance(value)) { // unwrap to use the right reader final FieldAndMethodAccessMode.CompositeDecoratedType<?> decoratedType = FieldAndMethodAccessMode.CompositeDecoratedType.class.cast(value); final DecoratedType type1 = decoratedType.getType1(); final DecoratedType type2 = decoratedType.getType2(); if (FieldAccessMode.FieldDecoratedType.class.isInstance(type1)) { return findMember(type1); } return findMember(type2); } else if (JsonbParser.FieldAccessMode.FieldDecoratedType.class.isInstance(value)){ return JsonbParser.FieldAccessMode.FieldDecoratedType.class.cast(value).getField(); } else if (MethodAccessMode.MethodDecoratedType.class.isInstance(value)){ return MethodAccessMode.MethodDecoratedType.class.cast(value).getMethod(); } throw new IllegalArgumentException("Unsupported reader: " + value); } private boolean isOptional(final Type type) { return ParameterizedType.class.isInstance(type) && Optional.class == ParameterizedType.class.cast(type).getRawType(); } private boolean isTransient(final JsonbParser.DecoratedType dt, final PropertyVisibilityStrategy visibility) { if (!FieldAndMethodAccessMode.CompositeDecoratedType.class.isInstance(dt)) { return isTransient(dt) || shouldSkip(visibility, dt); } final FieldAndMethodAccessMode.CompositeDecoratedType<?> cdt = FieldAndMethodAccessMode.CompositeDecoratedType.class.cast(dt); return isTransient(cdt.getType1()) || isTransient(cdt.getType2()) || (shouldSkip(visibility, cdt.getType1()) && shouldSkip(visibility, cdt.getType2())); } private boolean shouldSkip(final PropertyVisibilityStrategy visibility, final JsonbParser.DecoratedType t) { return isNotVisible(visibility, t); } private boolean isTransient(final JsonbParser.DecoratedType t) { if (t.getAnnotation(JsonbTransient.class) != null) { return true; } if (JsonbParser.FieldAccessMode.FieldDecoratedType.class.isInstance(t)) { final Field field = JsonbParser.FieldAccessMode.FieldDecoratedType.class.cast(t).getField(); return Modifier.isTransient(field.getModifiers()) || Modifier.isStatic(field.getModifiers()); } return false; } private boolean isNotVisible(final PropertyVisibilityStrategy visibility, final JsonbParser.DecoratedType t) { return !(JsonbParser.FieldAccessMode.FieldDecoratedType.class.isInstance(t) ? visibility.isVisible(JsonbParser.FieldAccessMode.FieldDecoratedType.class.cast(t).getField()) : (MethodAccessMode.MethodDecoratedType.class.isInstance(t) && visibility.isVisible(MethodAccessMode.MethodDecoratedType.class.cast(t).getMethod()))); } } private interface DecoratedType { Type getType(); <T extends Annotation> T getAnnotation(Class<T> clazz); <T extends Annotation> T getClassOrPackageAnnotation(Class<T> clazz); } private interface BaseAccessMode { Map<String, JsonbParser.DecoratedType> find(Class<?> clazz); } private static class FieldAccessMode implements BaseAccessMode { private final Class<? extends Annotation> johnzonAny; public FieldAccessMode(final Class<? extends Annotation> johnzonAny) { this.johnzonAny = johnzonAny; } @Override public Map<String, JsonbParser.DecoratedType> find(final Class<?> clazz) { final Map<String, JsonbParser.DecoratedType> readers = new HashMap<>(); for (final Map.Entry<String, Field> f : fields(clazz, true).entrySet()) { final String key = f.getKey(); if (isIgnored(key) || (johnzonAny != null && Meta.getAnnotation(f.getValue(), johnzonAny) != null)) { continue; } final Field field = f.getValue(); readers.put(key, new FieldDecoratedType(field, field.getGenericType())); } return readers; } protected boolean isIgnored(final String key) { return key.contains("$"); } protected Map<String, Field> fields(final Class<?> clazz, final boolean includeFinalFields) { final Map<String, Field> fields = new HashMap<>(); Class<?> current = clazz; while (current != null && current != Object.class) { for (final Field f : current.getDeclaredFields()) { final String name = f.getName(); final int modifiers = f.getModifiers(); if (fields.containsKey(name) || Modifier.isStatic(modifiers) || Modifier.isTransient(modifiers) || (!includeFinalFields && Modifier.isFinal(modifiers))) { continue; } fields.put(name, f); } current = current.getSuperclass(); } return fields; } private static class FieldDecoratedType implements JsonbParser.DecoratedType { protected final Field field; protected final Type type; public FieldDecoratedType(final Field field, final Type type) { this.field = field; this.field.setAccessible(true); this.type = type; } @Override public <T extends Annotation> T getClassOrPackageAnnotation(final Class<T> clazz) { return Meta.getClassOrPackageAnnotation(field, clazz); } public Field getField() { return field; } @Override public Type getType() { return type; } @Override public <T extends Annotation> T getAnnotation(final Class<T> clazz) { return Meta.getAnnotation(field, clazz); } @Override public String toString() { return "FieldDecoratedType{" + "field=" + field + '}'; } } } private static class MethodAccessMode implements BaseAccessMode { private final Class<? extends Annotation> johnzonAny; public MethodAccessMode(final Class<? extends Annotation> johnzonAny) { this.johnzonAny = johnzonAny; } @Override public Map<String, DecoratedType> find(final Class<?> clazz) { final Map<String, DecoratedType> readers = new HashMap<>(); if (Records.isRecord(clazz)) { readers.putAll(Stream.of(clazz.getMethods()) .filter(it -> it.getDeclaringClass() != Object.class && it.getParameterCount() == 0) .filter(it -> !"toString".equals(it.getName()) && !"hashCode".equals(it.getName())) .filter(it -> !isIgnored(it.getName()) && johnzonAny != null && Meta.getAnnotation(it, johnzonAny) == null) .collect(Collectors.toMap(Method::getName, it -> new MethodDecoratedType(it, it.getGenericReturnType()) { }))); } else { final PropertyDescriptor[] propertyDescriptors = getPropertyDescriptors(clazz); for (final PropertyDescriptor descriptor : propertyDescriptors) { final Method readMethod = descriptor.getReadMethod(); final String name = descriptor.getName(); if (readMethod != null && readMethod.getDeclaringClass() != Object.class) { if (isIgnored(name) || johnzonAny != null && Meta.getAnnotation(readMethod, johnzonAny) != null) { continue; } readers.put(name, new MethodDecoratedType(readMethod, readMethod.getGenericReturnType())); } else if (readMethod == null && descriptor.getWriteMethod() != null && // isXXX, not supported by javabeans (descriptor.getPropertyType() == Boolean.class || descriptor.getPropertyType() == boolean.class)) { try { final Method method = clazz.getMethod( "is" + Character.toUpperCase(name.charAt(0)) + (name.length() > 1 ? name.substring(1) : "")); readers.put(name, new MethodDecoratedType(method, method.getGenericReturnType())); } catch (final NoSuchMethodException e) { // no-op } } } } return readers; } protected boolean isIgnored(final String name) { return name.equals("metaClass") || name.contains("$"); } private PropertyDescriptor[] getPropertyDescriptors(final Class<?> clazz) { final PropertyDescriptor[] propertyDescriptors; try { propertyDescriptors = Introspector.getBeanInfo(clazz).getPropertyDescriptors(); } catch (final IntrospectionException e) { throw new IllegalStateException(e); } return propertyDescriptors; } public static class MethodDecoratedType implements DecoratedType { protected final Method method; protected final Type type; public MethodDecoratedType(final Method method, final Type type) { this.method = method; method.setAccessible(true); this.type = type; } @Override public <T extends Annotation> T getClassOrPackageAnnotation(final Class<T> clazz) { return Meta.getClassOrPackageAnnotation(method, clazz); } public Method getMethod() { return method; } @Override public Type getType() { return type; } @Override public <T extends Annotation> T getAnnotation(final Class<T> clazz) { return Meta.getAnnotation(method, clazz); } @Override public String toString() { return "MethodDecoratedType{" + "method=" + method + '}'; } } } private static class FieldAndMethodAccessMode implements BaseAccessMode { private final FieldAccessMode fields; private final MethodAccessMode methods; private FieldAndMethodAccessMode(final Class<? extends Annotation> johnzonAny) { this.fields = new FieldAccessMode(johnzonAny); this.methods = new MethodAccessMode(johnzonAny); } @Override public Map<String, JsonbParser.DecoratedType> find(final Class<?> clazz) { final Map<String, JsonbParser.DecoratedType> methodReaders = this.methods.find(clazz); final boolean record = Records.isRecord(clazz); if (record) { return methodReaders; } final Map<String, JsonbParser.DecoratedType> fieldsReaders = this.fields.find(clazz); final Map<String, JsonbParser.DecoratedType> readers = new HashMap<>(fieldsReaders); for (final Map.Entry<String, JsonbParser.DecoratedType> entry : methodReaders.entrySet()) { final Method mr = MethodAccessMode.MethodDecoratedType.class.cast(entry.getValue()).getMethod(); final String fieldName = record ? mr.getName() : Introspector.decapitalize(mr.getName().startsWith("is") ? mr.getName().substring(2) : mr.getName().substring(3)); final Field f = getField(fieldName, clazz); final JsonbParser.DecoratedType existing = readers.get(entry.getKey()); if (existing == null) { if (f != null) { // useful to hold the Field and transient state for example, just as fallback readers.put(entry.getKey(), new CompositeDecoratedType<>( entry.getValue(), new FieldAccessMode.FieldDecoratedType(f, f.getType()))); } else { readers.put(entry.getKey(), entry.getValue()); } } else { readers.put(entry.getKey(), new CompositeDecoratedType<>(entry.getValue(), existing)); } } return readers; } private Field getField(final String fieldName, final Class<?> type) { Class<?> t = type; while (t != Object.class && t != null) { try { return t.getDeclaredField(fieldName); } catch (final NoSuchFieldException e) { // no-op } t = t.getSuperclass(); } return null; } public static class CompositeDecoratedType<T extends DecoratedType> implements DecoratedType { protected final T type1; protected final T type2; private CompositeDecoratedType(final T type1, final T type2) { this.type1 = type1; this.type2 = type2; } @Override public <A extends Annotation> A getClassOrPackageAnnotation(final Class<A> clazz) { final A found = type1.getClassOrPackageAnnotation(clazz); return found == null ? type2.getClassOrPackageAnnotation(clazz) : found; } @Override public <A extends Annotation> A getAnnotation(final Class<A> clazz) { final A found = type1.getAnnotation(clazz); return found == null ? type2.getAnnotation(clazz) : found; } @Override public Type getType() { return type1.getType(); } public DecoratedType getType1() { return type1; } public DecoratedType getType2() { return type2; } @Override public String toString() { return "CompositeDecoratedType{" + "type1=" + type1 + ", type2=" + type2 + '}'; } } } private static class DefaultPropertyVisibilityStrategy implements PropertyVisibilityStrategy { private final ClassLoader classLoader; private final ConcurrentMap<Class<?>, PropertyVisibilityStrategy> strategies = new ConcurrentHashMap<>(); public DefaultPropertyVisibilityStrategy(ClassLoader classLoader) { this.classLoader = classLoader; } @Override public boolean isVisible(final Field field) { if (getJsonbAnnotation(field, JsonbProperty.class) != null) { return true; } final PropertyVisibilityStrategy strategy = strategies.computeIfAbsent( field.getDeclaringClass(), this::visibilityStrategy); return strategy == this ? Modifier.isPublic(field.getModifiers()) : strategy.isVisible(field); } @Override public boolean isVisible(final Method method) { final PropertyVisibilityStrategy strategy = strategies.computeIfAbsent( method.getDeclaringClass(), this::visibilityStrategy); return strategy == this ? Modifier.isPublic(method.getModifiers()) : strategy.isVisible(method); } private PropertyVisibilityStrategy visibilityStrategy(final Class<?> type) { JsonbVisibility visibility = getJsonbAnnotation(type, JsonbVisibility.class); if (visibility != null) { try { return visibility.value().getConstructor().newInstance(); } catch (final ReflectiveOperationException e) { throw new IllegalArgumentException(e); } } Package p = type.getPackage(); while (p != null) { visibility = getJsonbAnnotation(p, JsonbVisibility.class); if (visibility != null) { try { return visibility.value().getConstructor().newInstance(); } catch (final ReflectiveOperationException e) { throw new IllegalArgumentException(e); } } final String name = p.getName(); final int end = name.lastIndexOf('.'); if (end < 0) { break; } final String parentPack = name.substring(0, end); p = Package.getPackage(parentPack); if (p == null) { try { p = classLoader.loadClass(parentPack + ".package-info").getPackage(); } catch (final ClassNotFoundException e) { // no-op } } } return this; } } private static class PropertyNamingStrategyFactory { private final Object value; public PropertyNamingStrategyFactory(final Object value) { this.value = value; } public PropertyNamingStrategy create() { if (String.class.isInstance(value)) { final String val = value.toString(); switch (val) { case PropertyNamingStrategy.IDENTITY: return propertyName -> propertyName; case PropertyNamingStrategy.LOWER_CASE_WITH_DASHES: return new ConfigurableNamingStrategy(Character::toLowerCase, '-'); case PropertyNamingStrategy.LOWER_CASE_WITH_UNDERSCORES: return new ConfigurableNamingStrategy(Character::toLowerCase, '_'); case PropertyNamingStrategy.UPPER_CAMEL_CASE: return camelCaseStrategy(); case PropertyNamingStrategy.UPPER_CAMEL_CASE_WITH_SPACES: final PropertyNamingStrategy camelCase = camelCaseStrategy(); final PropertyNamingStrategy space = new ConfigurableNamingStrategy(Function.identity(), ' '); return propertyName -> camelCase.translateName(space.translateName(propertyName)); case PropertyNamingStrategy.CASE_INSENSITIVE: return propertyName -> propertyName; default: throw new IllegalArgumentException(val + " unknown as PropertyNamingStrategy"); } } if (PropertyNamingStrategy.class.isInstance(value)) { return PropertyNamingStrategy.class.cast(value); } throw new IllegalArgumentException(value + " not supported as PropertyNamingStrategy"); } private PropertyNamingStrategy camelCaseStrategy() { return propertyName -> Character.toUpperCase(propertyName.charAt(0)) + (propertyName.length() > 1 ? propertyName.substring(1) : ""); } private static class ConfigurableNamingStrategy implements PropertyNamingStrategy { private final Function<Character, Character> converter; private final char separator; public ConfigurableNamingStrategy(final Function<Character, Character> wordConverter, final char sep) { this.converter = wordConverter; this.separator = sep; } @Override public String translateName(final String propertyName) { final StringBuilder global = new StringBuilder(); final StringBuilder current = new StringBuilder(); for (int i = 0; i < propertyName.length(); i++) { final char c = propertyName.charAt(i); if (Character.isUpperCase(c)) { final char transformed = converter.apply(c); if (current.length() > 0) { global.append(current).append(separator); current.setLength(0); } current.append(transformed); } else { current.append(c); } } if (current.length() > 0) { global.append(current); } else { global.setLength(global.length() - 1); // remove last sep } return global.toString(); } } } private static class Records { private static final Method IS_RECORD; static { Method isRecord = null; try { isRecord = Class.class.getMethod("isRecord"); } catch (final NoSuchMethodException e) { // no-op } IS_RECORD = isRecord; } private Records() { // no-op } public static boolean isRecord(final Class<?> clazz) { try { return IS_RECORD != null && Boolean.class.cast(IS_RECORD.invoke(clazz)); } catch (final InvocationTargetException | IllegalAccessException e) { return false; } } } private static final class Meta { private Meta() { // no-op } private static <T extends Annotation> T getAnnotation(final AnnotatedElement holder, final Class<T> api) { return getDirectAnnotation(holder, api); } private static <T extends Annotation> T getClassOrPackageAnnotation(final Method holder, final Class<T> api) { return getIndirectAnnotation(api, holder::getDeclaringClass, () -> holder.getDeclaringClass().getPackage()); } private static <T extends Annotation> T getClassOrPackageAnnotation(final Field holder, final Class<T> api) { return getIndirectAnnotation(api, holder::getDeclaringClass, () -> holder.getDeclaringClass().getPackage()); } private static <T extends Annotation> T getDirectAnnotation(final AnnotatedElement holder, final Class<T> api) { final T annotation = getJsonbAnnotation(holder, api); if (annotation != null) { return annotation; } return findMeta(holder.getAnnotations(), api); } private static <T extends Annotation> T getIndirectAnnotation(final Class<T> api, final Supplier<Class<?>> ownerSupplier, final Supplier<Package> packageSupplier) { final T ownerAnnotation = getJsonbAnnotation(ownerSupplier.get(), api); if (ownerAnnotation != null) { return ownerAnnotation; } final Package pck = packageSupplier.get(); if (pck != null) { return getJsonbAnnotation(pck, api); } return null; } public static <T extends Annotation> T findMeta(final Annotation[] annotations, final Class<T> api) { for (final Annotation a : annotations) { final Class<? extends Annotation> userType = a.annotationType(); final T aa = getJsonbAnnotation(userType, api); if (aa != null) { boolean overriden = false; final Map<String, Method> mapping = new HashMap<String, Method>(); for (final Class<?> cm : Arrays.asList(api, userType)) { for (final Method m : cm.getMethods()) { overriden = mapping.put(m.getName(), m) != null || overriden; } } if (!overriden) { return aa; } return api.cast(newAnnotation(mapping, a, aa)); } } return null; } @SuppressWarnings("unchecked") private static <T extends Annotation> T newAnnotation(final Map<String, Method> methodMapping, final Annotation user, final T johnzon) { return (T) Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), new Class<?>[]{johnzon.annotationType()}, (proxy, method, args) -> { final Method m = methodMapping.get(method.getName()); try { if (m.getDeclaringClass() == user.annotationType()) { return m.invoke(user, args); } return m.invoke(johnzon, args); } catch (final InvocationTargetException ite) { throw ite.getTargetException(); } }); } } private static class ParameterMember implements Member, AnnotatedElement { private final Parameter parameter; public ParameterMember(final Parameter parameter) { this.parameter = parameter; } @Override public Class<?> getDeclaringClass() { return parameter.getDeclaringExecutable().getDeclaringClass(); } @Override public String getName() { return parameter.getName(); } @Override public int getModifiers() { return parameter.getModifiers(); } @Override public boolean isSynthetic() { return parameter.isSynthetic(); } @Override public <T extends Annotation> T getAnnotation(final Class<T> type) { return parameter.getAnnotation(type); } @Override public Annotation[] getAnnotations() { return parameter.getAnnotations(); } @Override public Annotation[] getDeclaredAnnotations() { return parameter.getDeclaredAnnotations(); } } static <A extends Annotation> A getJsonbAnnotation(AnnotatedElement annotatedElement, Class<A> jakartaAnnotationClass) { final Class<?> javaxAnnotationClass = javax(jakartaAnnotationClass); return Utils.getMigratedAnnotation(annotatedElement, jakartaAnnotationClass, javaxAnnotationClass); } private static <T> Class<T> javax(Class<T> jakartaClass) { @SuppressWarnings("unchecked") final Class<T> cls = (Class<T>) javaxClasses.get().get(jakartaClass); return cls; } private static final Supplier<Map<Class<?>, Class<?>>> javaxClasses = Utils.memoize(() -> { final Map<Class<?>, Class<?>> map = new LinkedHashMap<>(); map.put(jakarta.json.bind.annotation.JsonbCreator.class, javax.json.bind.annotation.JsonbCreator.class); map.put(jakarta.json.bind.annotation.JsonbProperty.class, javax.json.bind.annotation.JsonbProperty.class); map.put(jakarta.json.bind.annotation.JsonbTransient.class, javax.json.bind.annotation.JsonbTransient.class); map.put(jakarta.json.bind.annotation.JsonbVisibility.class, javax.json.bind.annotation.JsonbVisibility.class); map.put(jakarta.json.bind.config.PropertyNamingStrategy.class, javax.json.bind.config.PropertyNamingStrategy.class); map.put(jakarta.json.bind.config.PropertyVisibilityStrategy.class, javax.json.bind.config.PropertyVisibilityStrategy.class); return map; }); }
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.sshd; import static com.google.gerrit.server.ssh.SshAddressesModule.IANA_SSH_PORT; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.gerrit.common.Version; import com.google.gerrit.extensions.events.LifecycleListener; import com.google.gerrit.server.config.ConfigUtil; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.ssh.SshAdvertisedAddresses; import com.google.gerrit.server.ssh.SshInfo; import com.google.gerrit.server.ssh.SshListenAddresses; import com.google.gerrit.server.util.IdGenerator; import com.google.gerrit.server.util.SocketUtil; import com.google.inject.Inject; import com.google.inject.Singleton; import com.jcraft.jsch.HostKey; import com.jcraft.jsch.JSchException; import org.apache.mina.transport.socket.SocketSessionConfig; import org.apache.sshd.SshServer; import org.apache.sshd.common.Channel; import org.apache.sshd.common.Cipher; import org.apache.sshd.common.Compression; import org.apache.sshd.common.ForwardingFilter; import org.apache.sshd.common.KeyExchange; import org.apache.sshd.common.KeyPairProvider; import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.Random; import org.apache.sshd.common.Session; import org.apache.sshd.common.Signature; import org.apache.sshd.common.SshdSocketAddress; import org.apache.sshd.common.cipher.AES128CBC; import org.apache.sshd.common.cipher.AES128CTR; import org.apache.sshd.common.cipher.AES192CBC; import org.apache.sshd.common.cipher.AES256CBC; import org.apache.sshd.common.cipher.AES256CTR; import org.apache.sshd.common.cipher.ARCFOUR128; import org.apache.sshd.common.cipher.ARCFOUR256; import org.apache.sshd.common.cipher.BlowfishCBC; import org.apache.sshd.common.cipher.CipherNone; import org.apache.sshd.common.cipher.TripleDESCBC; import org.apache.sshd.common.compression.CompressionNone; import org.apache.sshd.common.file.FileSystemFactory; import org.apache.sshd.common.file.FileSystemView; import org.apache.sshd.common.file.SshFile; import org.apache.sshd.common.forward.DefaultTcpipForwarderFactory; import org.apache.sshd.common.forward.TcpipServerChannel; import org.apache.sshd.common.future.CloseFuture; import org.apache.sshd.common.future.SshFutureListener; import org.apache.sshd.common.io.IoAcceptor; import org.apache.sshd.common.io.IoServiceFactory; import org.apache.sshd.common.io.IoSession; import org.apache.sshd.common.io.mina.MinaServiceFactory; import org.apache.sshd.common.io.mina.MinaSession; import org.apache.sshd.common.mac.HMACMD5; import org.apache.sshd.common.mac.HMACMD596; import org.apache.sshd.common.mac.HMACSHA1; import org.apache.sshd.common.mac.HMACSHA196; import org.apache.sshd.common.random.BouncyCastleRandom; import org.apache.sshd.common.random.JceRandom; import org.apache.sshd.common.random.SingletonRandomFactory; import org.apache.sshd.common.session.AbstractSession; import org.apache.sshd.common.signature.SignatureDSA; import org.apache.sshd.common.signature.SignatureRSA; import org.apache.sshd.common.util.Buffer; import org.apache.sshd.common.util.SecurityUtils; import org.apache.sshd.server.Command; import org.apache.sshd.server.CommandFactory; import org.apache.sshd.server.PublickeyAuthenticator; import org.apache.sshd.server.UserAuth; import org.apache.sshd.server.auth.UserAuthPublicKey; import org.apache.sshd.server.auth.gss.GSSAuthenticator; import org.apache.sshd.server.auth.gss.UserAuthGSS; import org.apache.sshd.server.channel.ChannelSession; import org.apache.sshd.server.kex.DHG1; import org.apache.sshd.server.kex.DHG14; import org.apache.sshd.server.session.SessionFactory; import org.bouncycastle.crypto.prng.RandomGenerator; import org.bouncycastle.crypto.prng.VMPCRandomGenerator; import org.eclipse.jgit.lib.Config; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.UnknownHostException; import java.security.InvalidKeyException; import java.security.KeyPair; import java.security.PublicKey; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * SSH daemon to communicate with Gerrit. * <p> * Use a Git URL such as {@code ssh://${email}@${host}:${port}/${path}}, * e.g. {@code ssh://sop@google.com@gerrit.com:8010/tools/gerrit.git} to * access the SSH daemon itself. * <p> * Versions of Git before 1.5.3 may require setting the username and port * properties in the user's {@code ~/.ssh/config} file, and using a host * alias through a URL such as <code>gerrit-alias:/tools/gerrit.git: * <pre> * Host gerrit-alias * User sop@google.com * Hostname gerrit.com * Port 8010 * </pre> */ @Singleton public class SshDaemon extends SshServer implements SshInfo, LifecycleListener { private static final Logger log = LoggerFactory.getLogger(SshDaemon.class); public static enum SshSessionBackend { MINA, NIO2 } private final List<SocketAddress> listen; private final List<String> advertised; private final boolean keepAlive; private final List<HostKey> hostKeys; private volatile IoAcceptor daemonAcceptor; private final Config cfg; @Inject SshDaemon(final CommandFactory commandFactory, final NoShell noShell, final PublickeyAuthenticator userAuth, final GerritGSSAuthenticator kerberosAuth, final KeyPairProvider hostKeyProvider, final IdGenerator idGenerator, @GerritServerConfig final Config cfg, final SshLog sshLog, @SshListenAddresses final List<SocketAddress> listen, @SshAdvertisedAddresses final List<String> advertised) { setPort(IANA_SSH_PORT /* never used */); this.cfg = cfg; this.listen = listen; this.advertised = advertised; keepAlive = cfg.getBoolean("sshd", "tcpkeepalive", true); getProperties().put(SERVER_IDENTIFICATION, "GerritCodeReview_" + Version.getVersion() // + " (" + super.getVersion() + ")"); getProperties().put(MAX_AUTH_REQUESTS, String.valueOf(cfg.getInt("sshd", "maxAuthTries", 6))); getProperties().put( AUTH_TIMEOUT, String.valueOf(MILLISECONDS.convert(ConfigUtil.getTimeUnit(cfg, "sshd", null, "loginGraceTime", 120, SECONDS), SECONDS))); long idleTimeoutSeconds = ConfigUtil.getTimeUnit(cfg, "sshd", null, "idleTimeout", 0, SECONDS); getProperties().put( IDLE_TIMEOUT, String.valueOf(SECONDS.toMillis(idleTimeoutSeconds))); final int maxConnectionsPerUser = cfg.getInt("sshd", "maxConnectionsPerUser", 64); if (0 < maxConnectionsPerUser) { getProperties().put(MAX_CONCURRENT_SESSIONS, String.valueOf(maxConnectionsPerUser)); } final String kerberosKeytab = cfg.getString( "sshd", null, "kerberosKeytab"); final String kerberosPrincipal = cfg.getString( "sshd", null, "kerberosPrincipal"); System.setProperty(IoServiceFactory.class.getName(), MinaServiceFactory.class.getName()); if (SecurityUtils.isBouncyCastleRegistered()) { initProviderBouncyCastle(cfg); } else { initProviderJce(); } initCiphers(cfg); initMacs(cfg); initSignatures(); initChannels(); initForwarding(); initFileSystemFactory(); initSubsystems(); initCompression(); initUserAuth(userAuth, kerberosAuth, kerberosKeytab, kerberosPrincipal); setKeyPairProvider(hostKeyProvider); setCommandFactory(commandFactory); setShellFactory(noShell); setSessionFactory(new SessionFactory() { @Override protected AbstractSession createSession(final IoSession io) throws Exception { if (io instanceof MinaSession) { if (((MinaSession) io).getSession() .getConfig() instanceof SocketSessionConfig) { ((SocketSessionConfig) ((MinaSession) io).getSession() .getConfig()) .setKeepAlive(keepAlive); } } GerritServerSession s = (GerritServerSession)super.createSession(io); int id = idGenerator.next(); SocketAddress peer = io.getRemoteAddress(); final SshSession sd = new SshSession(id, peer); s.setAttribute(SshSession.KEY, sd); // Log a session close without authentication as a failure. // s.addCloseSessionListener(new SshFutureListener<CloseFuture>() { @Override public void operationComplete(CloseFuture future) { if (sd.isAuthenticationError()) { sshLog.onAuthFail(sd); } } }); return s; } @Override protected AbstractSession doCreateSession(IoSession ioSession) throws Exception { return new GerritServerSession(server, ioSession); } }); hostKeys = computeHostKeys(); } @Override public List<HostKey> getHostKeys() { return hostKeys; } public IoAcceptor getIoAcceptor() { return daemonAcceptor; } @Override public synchronized void start() { if (daemonAcceptor == null && !listen.isEmpty()) { checkConfig(); if (sessionFactory == null) { sessionFactory = createSessionFactory(); } sessionFactory.setServer(this); daemonAcceptor = createAcceptor(); try { String listenAddress = cfg.getString("sshd", null, "listenAddress"); boolean rewrite = !Strings.isNullOrEmpty(listenAddress) && listenAddress.endsWith(":0"); daemonAcceptor.bind(listen); if (rewrite) { SocketAddress bound = Iterables.getOnlyElement(daemonAcceptor.getBoundAddresses()); cfg.setString("sshd", null, "listenAddress", format((InetSocketAddress)bound)); } } catch (IOException e) { throw new IllegalStateException("Cannot bind to " + addressList(), e); } log.info(String.format("Started Gerrit %s on %s", version, addressList())); } } private static String format(InetSocketAddress s) { return String.format("%s:%d", s.getAddress().getHostAddress(), s.getPort()); } @Override public synchronized void stop() { if (daemonAcceptor != null) { try { daemonAcceptor.dispose(); log.info("Stopped Gerrit SSHD"); } finally { daemonAcceptor = null; } } } @Override protected void checkConfig() { super.checkConfig(); if (myHostKeys().isEmpty()) { throw new IllegalStateException("No SSHD host key"); } } private List<HostKey> computeHostKeys() { if (listen.isEmpty()) { return Collections.emptyList(); } final List<PublicKey> keys = myHostKeys(); final List<HostKey> r = new ArrayList<>(); for (final PublicKey pub : keys) { final Buffer buf = new Buffer(); buf.putRawPublicKey(pub); final byte[] keyBin = buf.getCompactData(); for (final String addr : advertised) { try { r.add(new HostKey(addr, keyBin)); } catch (JSchException e) { log.warn("Cannot format SSHD host key", e); } } } return Collections.unmodifiableList(r); } private List<PublicKey> myHostKeys() { final KeyPairProvider p = getKeyPairProvider(); final List<PublicKey> keys = new ArrayList<>(2); addPublicKey(keys, p, KeyPairProvider.SSH_RSA); addPublicKey(keys, p, KeyPairProvider.SSH_DSS); return keys; } private static void addPublicKey(final Collection<PublicKey> out, final KeyPairProvider p, final String type) { final KeyPair pair = p.loadKey(type); if (pair != null && pair.getPublic() != null) { out.add(pair.getPublic()); } } private String addressList() { final StringBuilder r = new StringBuilder(); for (Iterator<SocketAddress> i = listen.iterator(); i.hasNext();) { r.append(SocketUtil.format(i.next(), IANA_SSH_PORT)); if (i.hasNext()) { r.append(", "); } } return r.toString(); } private void initProviderBouncyCastle(Config cfg) { setKeyExchangeFactories(Arrays.<NamedFactory<KeyExchange>> asList( new DHG14.Factory(), new DHG1.Factory())); NamedFactory<Random> factory; if (cfg.getBoolean("sshd", null, "testUseInsecureRandom", false)) { factory = new InsecureBouncyCastleRandom.Factory(); } else { factory = new BouncyCastleRandom.Factory(); } setRandomFactory(new SingletonRandomFactory(factory)); } private static class InsecureBouncyCastleRandom implements Random { private static class Factory implements NamedFactory<Random> { @Override public String getName() { return "INSECURE_bouncycastle"; } @Override public Random create() { return new InsecureBouncyCastleRandom(); } } private final RandomGenerator random; private InsecureBouncyCastleRandom() { random = new VMPCRandomGenerator(); random.addSeedMaterial(1234); } @Override public void fill(byte[] bytes, int start, int len) { random.nextBytes(bytes, start, len); } } private void initProviderJce() { setKeyExchangeFactories(Arrays .<NamedFactory<KeyExchange>> asList(new DHG1.Factory())); setRandomFactory(new SingletonRandomFactory(new JceRandom.Factory())); } @SuppressWarnings("unchecked") private void initCiphers(final Config cfg) { final List<NamedFactory<Cipher>> a = new LinkedList<>(); a.add(new AES128CBC.Factory()); a.add(new TripleDESCBC.Factory()); a.add(new BlowfishCBC.Factory()); a.add(new AES192CBC.Factory()); a.add(new AES256CBC.Factory()); a.add(new AES128CTR.Factory()); a.add(new AES256CTR.Factory()); a.add(new ARCFOUR256.Factory()); a.add(new ARCFOUR128.Factory()); for (Iterator<NamedFactory<Cipher>> i = a.iterator(); i.hasNext();) { final NamedFactory<Cipher> f = i.next(); try { final Cipher c = f.create(); final byte[] key = new byte[c.getBlockSize()]; final byte[] iv = new byte[c.getIVSize()]; c.init(Cipher.Mode.Encrypt, key, iv); } catch (InvalidKeyException e) { log.warn("Disabling cipher " + f.getName() + ": " + e.getMessage() + "; try installing unlimited cryptography extension"); i.remove(); } catch (Exception e) { log.warn("Disabling cipher " + f.getName() + ": " + e.getMessage()); i.remove(); } } a.add(null); a.add(new CipherNone.Factory()); setCipherFactories(filter(cfg, "cipher", (NamedFactory<Cipher>[])a.toArray(new NamedFactory[a.size()]))); } private void initMacs(final Config cfg) { setMacFactories(filter(cfg, "mac", new HMACMD5.Factory(), new HMACSHA1.Factory(), new HMACMD596.Factory(), new HMACSHA196.Factory())); } @SafeVarargs private static <T> List<NamedFactory<T>> filter(final Config cfg, final String key, final NamedFactory<T>... avail) { final ArrayList<NamedFactory<T>> def = new ArrayList<>(); for (final NamedFactory<T> n : avail) { if (n == null) { break; } def.add(n); } final String[] want = cfg.getStringList("sshd", null, key); if (want == null || want.length == 0) { return def; } boolean didClear = false; for (final String setting : want) { String name = setting.trim(); boolean add = true; if (name.startsWith("-")) { add = false; name = name.substring(1).trim(); } else if (name.startsWith("+")) { name = name.substring(1).trim(); } else if (!didClear) { didClear = true; def.clear(); } final NamedFactory<T> n = find(name, avail); if (n == null) { final StringBuilder msg = new StringBuilder(); msg.append("sshd.").append(key).append(" = ").append(name) .append(" unsupported; only "); for (int i = 0; i < avail.length; i++) { if (avail[i] == null) { continue; } if (i > 0) { msg.append(", "); } msg.append(avail[i].getName()); } msg.append(" is supported"); log.error(msg.toString()); } else if (add) { if (!def.contains(n)) { def.add(n); } } else { def.remove(n); } } return def; } @SafeVarargs private static <T> NamedFactory<T> find(final String name, final NamedFactory<T>... avail) { for (final NamedFactory<T> n : avail) { if (n != null && name.equals(n.getName())) { return n; } } return null; } private void initSignatures() { setSignatureFactories(Arrays.<NamedFactory<Signature>> asList( new SignatureDSA.Factory(), new SignatureRSA.Factory())); } private void initCompression() { // Always disable transparent compression. The majority of our data // transfer is highly compressed Git pack files. We cannot make them // any smaller than they already are. // setCompressionFactories(Arrays .<NamedFactory<Compression>> asList(new CompressionNone.Factory())); } private void initChannels() { setChannelFactories(Arrays.<NamedFactory<Channel>> asList( new ChannelSession.Factory(), // new TcpipServerChannel.DirectTcpipFactory() // )); } private void initSubsystems() { setSubsystemFactories(Collections.<NamedFactory<Command>> emptyList()); } private void initUserAuth(final PublickeyAuthenticator pubkey, final GSSAuthenticator kerberosAuthenticator, String kerberosKeytab, String kerberosPrincipal) { List<NamedFactory<UserAuth>> authFactories = Lists.newArrayList(); if (kerberosKeytab != null) { authFactories.add(new UserAuthGSS.Factory()); log.info("Enabling kerberos with keytab " + kerberosKeytab); if (!new File(kerberosKeytab).canRead()) { log.error("Keytab " + kerberosKeytab + " does not exist or is not readable; further errors are possible"); } kerberosAuthenticator.setKeytabFile(kerberosKeytab); if (kerberosPrincipal == null) { try { kerberosPrincipal = "host/" + InetAddress.getLocalHost().getCanonicalHostName(); } catch(UnknownHostException e) { kerberosPrincipal = "host/localhost"; } } log.info("Using kerberos principal " + kerberosPrincipal); if (!kerberosPrincipal.startsWith("host/")) { log.warn("Host principal does not start with host/ " + "which most SSH clients will supply automatically"); } kerberosAuthenticator.setServicePrincipalName(kerberosPrincipal); setGSSAuthenticator(kerberosAuthenticator); } authFactories.add(new UserAuthPublicKey.Factory()); setUserAuthFactories(authFactories); setPublickeyAuthenticator(pubkey); } private void initForwarding() { setTcpipForwardingFilter(new ForwardingFilter() { @Override public boolean canForwardAgent(Session session) { return false; } @Override public boolean canForwardX11(Session session) { return false; } @Override public boolean canListen(SshdSocketAddress address, Session session) { return false; } @Override public boolean canConnect(SshdSocketAddress address, Session session) { return false; } }); setTcpipForwarderFactory(new DefaultTcpipForwarderFactory()); } private void initFileSystemFactory() { setFileSystemFactory(new FileSystemFactory() { @Override public FileSystemView createFileSystemView(Session session) throws IOException { return new FileSystemView() { @Override public SshFile getFile(SshFile baseDir, String file) { return null; } @Override public SshFile getFile(String file) { return null; }}; } }); } }
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.forge.camel.commands.project.completer; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import io.fabric8.forge.addon.utils.CamelProjectHelper; import io.fabric8.forge.camel.commands.project.dto.ComponentDto; import io.fabric8.forge.camel.commands.project.helper.PoorMansLogger; import org.apache.camel.catalog.CamelCatalog; import org.apache.camel.catalog.JSonSchemaHelper; import org.jboss.forge.addon.dependencies.Dependency; import org.jboss.forge.addon.projects.Project; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.input.InputComponent; import org.jboss.forge.addon.ui.input.UICompleter; import org.jboss.forge.addon.ui.input.UIInput; import static io.fabric8.forge.addon.utils.CamelProjectHelper.findCamelArtifacts; import static io.fabric8.forge.addon.utils.CamelProjectHelper.findCustomCamelArtifacts; import static io.fabric8.forge.camel.commands.project.helper.CamelCatalogHelper.componentsFromArtifact; import static io.fabric8.forge.camel.commands.project.helper.CamelCatalogHelper.createComponentDto; public class CamelComponentsCompleter implements UICompleter<ComponentDto> { private static final PoorMansLogger LOG = new PoorMansLogger(false); private final Project project; private final CamelCatalog camelCatalog; private final UIInput<String> filter; private final boolean excludeComponentsOnClasspath; private final boolean includeCatalogComponents; private final boolean consumerOnly; private final boolean producerOnly; private final boolean mustHaveOptions; private final Dependency core; public CamelComponentsCompleter(Project project, CamelCatalog camelCatalog, UIInput<String> filter, boolean excludeComponentsOnClasspath, boolean includeCatalogComponents, boolean consumerOnly, boolean producerOnly, boolean mustHasOptions) { this.project = project; this.camelCatalog = camelCatalog; this.filter = filter; this.excludeComponentsOnClasspath = excludeComponentsOnClasspath; this.includeCatalogComponents = includeCatalogComponents; this.consumerOnly = consumerOnly; this.producerOnly = producerOnly; this.mustHaveOptions = mustHasOptions; // need to find camel-core so we known the camel version core = CamelProjectHelper.findCamelCoreDependency(project); } @Override public Iterable<ComponentDto> getCompletionProposals(UIContext context, InputComponent input, String value) { if (core == null) { return null; } List<String> names = getComponentNames(); // filter non matching names first List<String> filtered = new ArrayList<String>(); for (String name : names) { if (value == null || name.startsWith(value)) { filtered.add(name); } } if (consumerOnly) { filtered = filterByConsumerOnly(filtered); } if (producerOnly) { filtered = filterByProducerOnly(filtered); } if (mustHaveOptions) { filtered = filterByMustHaveOptions(filtered); } filtered = filterByName(filtered); filtered = filterByLabel(filtered, filter.getValue()); List<ComponentDto> answer = new ArrayList<>(); for (String filter : filtered) { ComponentDto dto = createComponentDto(camelCatalog, filter); answer.add(dto); } return answer; } public Iterable<ComponentDto> getValueChoices(String label) { // need to find camel-core so we known the camel version Dependency core = CamelProjectHelper.findCamelCoreDependency(project); if (core == null) { return null; } List<String> names = getComponentNames(); if (label != null && !"<all>".equals(label)) { names = filterByLabel(names, label); } if (consumerOnly) { names = filterByConsumerOnly(names); } if (producerOnly) { names = filterByProducerOnly(names); } if (mustHaveOptions) { names = filterByMustHaveOptions(names); } List<ComponentDto> answer = new ArrayList<>(); for (String filter : names) { ComponentDto dto = createComponentDto(camelCatalog, filter); answer.add(dto); } return answer; } public Iterable<String> getValueNames(String label) { // need to find camel-core so we known the camel version Dependency core = CamelProjectHelper.findCamelCoreDependency(project); if (core == null) { return null; } List<String> names = getComponentNames(); if (label != null && !"<all>".equals(label)) { names = filterByLabel(names, label); } if (consumerOnly) { names = filterByConsumerOnly(names); } if (producerOnly) { names = filterByProducerOnly(names); } if (mustHaveOptions) { names = filterByMustHaveOptions(names); } return names; } protected List<String> getComponentNames() { List<String> names; if (includeCatalogComponents) { // find all available component names names = camelCatalog.findComponentNames(); // filter out existing components we already have if (excludeComponentsOnClasspath) { Set<Dependency> artifacts = findCamelArtifacts(project); for (Dependency dep : artifacts) { Set<String> components = componentsFromArtifact(camelCatalog, dep.getCoordinate().getArtifactId()); names.removeAll(components); } } } else { SortedSet<String> set = new TreeSet<>(); Set<Dependency> artifacts = findCamelArtifacts(project); for (Dependency dep : artifacts) { Set<String> components = componentsFromArtifact(camelCatalog, dep.getCoordinate().getArtifactId()); set.addAll(components); } artifacts = findCustomCamelArtifacts(project); for (Dependency dep : artifacts) { Set<String> components = componentsFromArtifact(camelCatalog, dep.getCoordinate().getArtifactId()); set.addAll(components); } names = new ArrayList<>(set); } return names; } private List<String> filterByConsumerOnly(List<String> choices) { List<String> answer = new ArrayList<String>(); for (String name : choices) { String json = camelCatalog.componentJSonSchema(name); // yes its correct we grab the producer value String producerOnly = findProducerOnly(json); if (producerOnly != null && "true".equals(producerOnly)) { // its not able to consume so skip it continue; } answer.add(name); } return answer; } private List<String> filterByProducerOnly(List<String> choices) { List<String> answer = new ArrayList<String>(); for (String name : choices) { String json = camelCatalog.componentJSonSchema(name); // yes its correct we grab the consumer value String consumerOnly = findConsumerOnly(json); if (consumerOnly != null && "true".equals(consumerOnly)) { // its not able to produce so skip it continue; } answer.add(name); } return answer; } private List<String> filterByMustHaveOptions(List<String> choices) { List<String> answer = new ArrayList<String>(); for (String name : choices) { String json = camelCatalog.componentJSonSchema(name); // must have at least one component option List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("componentProperties", json, true); if (!data.isEmpty()) { answer.add(name); } } return answer; } private List<String> filterByName(List<String> choices) { List<String> answer = new ArrayList<String>(); // filter names which are already on the classpath, or do not match the optional filter by label input for (String name : choices) { // skip if we already have the dependency boolean already = false; if (excludeComponentsOnClasspath) { String json = camelCatalog.componentJSonSchema(name); String artifactId = findArtifactId(json); if (artifactId != null) { already = CamelProjectHelper.hasDependency(project, "org.apache.camel", artifactId); } } if (!already) { answer.add(name); } } return answer; } private List<String> filterByLabel(List<String> choices, String label) { if (label == null || label.isEmpty()) { return choices; } List<String> answer = new ArrayList<String>(); // filter names for (String name : choices) { String json = camelCatalog.componentJSonSchema(name); String labels = findLabel(json); if (labels != null) { for (String target : labels.split(",")) { if (target.startsWith(label)) { answer.add(name); break; } } } else { // no label so they all match answer.addAll(choices); } } return answer; } private static String findArtifactId(String json) { List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("component", json, false); for (Map<String, String> row : data) { if (row.get("artifactId") != null) { return row.get("artifactId"); } } return null; } private static String findConsumerOnly(String json) { List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("component", json, false); for (Map<String, String> row : data) { if (row.get("consumerOnly") != null) { return row.get("consumerOnly"); } } return null; } private static String findProducerOnly(String json) { List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("component", json, false); for (Map<String, String> row : data) { if (row.get("producerOnly") != null) { return row.get("producerOnly"); } } return null; } private static String findLabel(String json) { List<Map<String, String>> data = JSonSchemaHelper.parseJsonSchema("component", json, false); for (Map<String, String> row : data) { if (row.get("label") != null) { return row.get("label"); } } return null; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.daemon.impl; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.llap.LlapNodeId; import org.apache.hadoop.hive.llap.daemon.FinishableStateUpdateHandler; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec; import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.tez.common.security.JobTokenIdentifier; public class QueryInfo { private final QueryIdentifier queryIdentifier; private final String appIdString; private final String dagIdString; private final String dagName; private final String hiveQueryIdString; private final int dagIdentifier; private final String user; private final String[] localDirsBase; private final FileSystem localFs; private String[] localDirs; private final LlapNodeId amNodeId; // Map of states for different vertices. private final Set<QueryFragmentInfo> knownFragments = Collections.newSetFromMap(new ConcurrentHashMap<QueryFragmentInfo, Boolean>()); private final ConcurrentMap<String, SourceStateProto> sourceStateMap; private final FinishableStateTracker finishableStateTracker = new FinishableStateTracker(); private final String tokenUserName, appId; private final AtomicReference<UserGroupInformation> umbilicalUgi; public QueryInfo(QueryIdentifier queryIdentifier, String appIdString, String dagIdString, String dagName, String hiveQueryIdString, int dagIdentifier, String user, ConcurrentMap<String, SourceStateProto> sourceStateMap, String[] localDirsBase, FileSystem localFs, String tokenUserName, String tokenAppId, final LlapNodeId amNodeId) { this.queryIdentifier = queryIdentifier; this.appIdString = appIdString; this.dagIdString = dagIdString; this.dagName = dagName; this.hiveQueryIdString = hiveQueryIdString; this.dagIdentifier = dagIdentifier; this.sourceStateMap = sourceStateMap; this.user = user; this.localDirsBase = localDirsBase; this.localFs = localFs; this.tokenUserName = tokenUserName; this.appId = tokenAppId; this.umbilicalUgi = new AtomicReference<>(); this.amNodeId = amNodeId; } public QueryIdentifier getQueryIdentifier() { return queryIdentifier; } public String getAppIdString() { return appIdString; } public String getDagIdString() { return dagIdString; } public String getHiveQueryIdString() { return hiveQueryIdString; } public int getDagIdentifier() { return dagIdentifier; } public String getUser() { return user; } public ConcurrentMap<String, SourceStateProto> getSourceStateMap() { return sourceStateMap; } public LlapNodeId getAmNodeId() { return amNodeId; } public QueryFragmentInfo registerFragment(String vertexName, int fragmentNumber, int attemptNumber, SignableVertexSpec vertexSpec, String fragmentIdString) { QueryFragmentInfo fragmentInfo = new QueryFragmentInfo( this, vertexName, fragmentNumber, attemptNumber, vertexSpec, fragmentIdString); knownFragments.add(fragmentInfo); return fragmentInfo; } public void unregisterFragment(QueryFragmentInfo fragmentInfo) { knownFragments.remove(fragmentInfo); } public List<QueryFragmentInfo> getRegisteredFragments() { return Lists.newArrayList(knownFragments); } private synchronized void createLocalDirs() throws IOException { if (localDirs == null) { localDirs = new String[localDirsBase.length]; for (int i = 0; i < localDirsBase.length; i++) { localDirs[i] = createAppSpecificLocalDir(localDirsBase[i], appIdString, user, dagIdentifier); localFs.mkdirs(new Path(localDirs[i])); } } } /** * Get, and create if required, local-dirs for a query * @return * @throws IOException */ public synchronized String[] getLocalDirs() throws IOException { if (localDirs == null) { createLocalDirs(); } return localDirs; } public synchronized String[] getLocalDirsNoCreate() { return this.localDirs; } private static String createAppSpecificLocalDir(String baseDir, String applicationIdString, String user, int dagIdentifier) { // TODO This is broken for secure clusters. The app will not have permission to create these directories. // May work via Slider - since the directory would already exist. Otherwise may need a custom shuffle handler. // TODO This should be the process user - and not the user on behalf of whom the query is being submitted. return baseDir + File.separator + "usercache" + File.separator + user + File.separator + "appcache" + File.separator + applicationIdString + File.separator + dagIdentifier; } /** * * @param handler * @param sources * @param fragmentInfo * @param lastFinishableState * @return true if the current state is the same as the lastFinishableState. false if the state has already changed. */ boolean registerForFinishableStateUpdates(FinishableStateUpdateHandler handler, List<String> sources, QueryFragmentInfo fragmentInfo, boolean lastFinishableState) { return finishableStateTracker .registerForUpdates(handler, sources, fragmentInfo, lastFinishableState); } void unregisterFinishableStateUpdate(FinishableStateUpdateHandler handler) { finishableStateTracker.unregisterForUpdates(handler); } void sourceStateUpdated(String sourceName) { finishableStateTracker.sourceStateUpdated(sourceName); } private static class FinishableStateTracker { private final Map<FinishableStateUpdateHandler, EntityInfo> trackedEntities = new HashMap<>(); private final Multimap<String, EntityInfo> sourceToEntity = HashMultimap.create(); private final ReentrantLock lock = new ReentrantLock(); boolean registerForUpdates(FinishableStateUpdateHandler handler, List<String> sources, QueryFragmentInfo fragmentInfo, boolean lastFinishableState) { lock.lock(); try { EntityInfo entityInfo = new EntityInfo(handler, sources, fragmentInfo, lastFinishableState); if (trackedEntities.put(handler, entityInfo) != null) { throw new IllegalStateException( "Only a single registration allowed per entity. Duplicate for " + handler.toString()); } for (String source : sources) { sourceToEntity.put(source, entityInfo); } boolean canFinish = QueryFragmentInfo.canFinish(fragmentInfo); if (lastFinishableState == canFinish) { // State has not changed. return true; } else { entityInfo.setLastFinishableState(canFinish); return false; } } finally { lock.unlock(); } } void unregisterForUpdates(FinishableStateUpdateHandler handler) { lock.lock(); try { EntityInfo info = trackedEntities.remove(handler); Preconditions.checkState(info != null, "Cannot invoke unregister on an entity which has not been registered"); for (String source : info.getSources()) { sourceToEntity.remove(source, info); } } finally { lock.unlock(); } } void sourceStateUpdated(String sourceName) { List<EntityInfo> interestedEntityInfos = null; lock.lock(); try { Collection<EntityInfo> entities = sourceToEntity.get(sourceName); if (entities != null) { // Create a copy since the underlying list can be changed elsewhere. interestedEntityInfos = new LinkedList<>(entities); } } finally { lock.unlock(); } if (interestedEntityInfos != null) { for (EntityInfo entityInfo : interestedEntityInfos) { boolean newFinishState = QueryFragmentInfo.canFinish(entityInfo.getFragmentInfo()); if (newFinishState != entityInfo.getLastFinishableState()) { // State changed. Callback entityInfo.setLastFinishableState(newFinishState); entityInfo.getHandler().finishableStateUpdated(newFinishState); } } } } } private static class EntityInfo { final FinishableStateUpdateHandler handler; final List<String> sources; final QueryFragmentInfo fragmentInfo; boolean lastFinishableState; public EntityInfo(FinishableStateUpdateHandler handler, List<String> sources, QueryFragmentInfo fragmentInfo, boolean lastFinishableState) { this.handler = handler; this.sources = sources; this.fragmentInfo = fragmentInfo; this.lastFinishableState = lastFinishableState; } public FinishableStateUpdateHandler getHandler() { return handler; } public QueryFragmentInfo getFragmentInfo() { return fragmentInfo; } public boolean getLastFinishableState() { return lastFinishableState; } public List<String> getSources() { return sources; } public void setLastFinishableState(boolean lastFinishableState) { this.lastFinishableState = lastFinishableState; } } public String getTokenUserName() { return tokenUserName; } public String getTokenAppId() { return appId; } public void setupUmbilicalUgi(String umbilicalUser, Token<JobTokenIdentifier> appToken, String amHost, int amPort) { synchronized (umbilicalUgi) { if (umbilicalUgi.get() == null) { UserGroupInformation taskOwner = UserGroupInformation.createRemoteUser(umbilicalUser); final InetSocketAddress address = NetUtils.createSocketAddrForHost(amHost, amPort); SecurityUtil.setTokenService(appToken, address); taskOwner.addToken(appToken); umbilicalUgi.set(taskOwner); } } } public UserGroupInformation getUmbilicalUgi() { synchronized (umbilicalUgi) { return umbilicalUgi.get(); } } }
package edu.wheaton.simulator.statistics; import java.awt.Color; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashSet; import java.util.Set; import edu.wheaton.simulator.datastructure.ElementAlreadyContainedException; import edu.wheaton.simulator.datastructure.Grid; import edu.wheaton.simulator.entity.Agent; import edu.wheaton.simulator.entity.Prototype; import edu.wheaton.simulator.entity.Trigger; import edu.wheaton.simulator.expression.Expression; import edu.wheaton.simulator.simulation.end.SimulationEnder; public class Loader { /** * The grid generated by parsing the save file */ private Grid grid; /** * Map of all PrototypeSnapshots for the simulation * Since PrototypeSnapshots are immutable, this collection is the same for each step */ private Set<Prototype> prototypes; /** * The name of the simulation you are loading */ private String name; /** * Handles ending the simulation */ private SimulationEnder simEnder; /** * Indicates if a simulation has been successfully loaded */ private boolean simulationLoaded; /** * Constructor */ public Loader(){ simulationLoaded = false; } /** * Get the loaded Grid * @return Populated Grid * @throws Exception If no Simulation has been loaded yet */ public Grid getGrid() throws Exception{ if(simulationLoaded) return grid; throw new Exception("No simulation has been loaded"); } /** * Get the loaded Set of Prototypes * @return Populated set of Prototypes * @throws Exception If no Simulation has been loaded yet */ public Set<Prototype> getPrototypes() throws Exception{ if(simulationLoaded) return prototypes; throw new Exception("No simulation has been loaded"); } /** * Get the name of the loaded simulation * @return Simulation name * @throws Exception If no Simulation has been loaded yet */ public String getName() throws Exception { if(simulationLoaded) return name; throw new Exception("No simulation has been loaded"); } /** * Get the loaded SimulationEnder * @return A SimulationEnder object * @throws Exception If no Simulation has been loaded yet */ public SimulationEnder getSimEnder() throws Exception { if(simulationLoaded) return simEnder; throw new Exception("No simulation has been loaded"); } /** * Load the contents of a file. After this is done call getGrid(), getPrototypes(), getSimEnder() and * getName() to retrieve the loaded information * Code based on http://stackoverflow.com/questions/15906640/ * @param fileName The name of the file to load */ public void loadSimulation(File f) throws Exception { File file = f; BufferedReader reader = null; name = f.getName(); this.prototypes = new HashSet<Prototype>(); //System.out.println(f.getAbsolutePath()); // TODO DEBUG try { reader = new BufferedReader(new FileReader(file)); //Instantiate the Grid int width = Integer.parseInt(reader.readLine()); int height = Integer.parseInt(reader.readLine()); grid = new Grid(width, height); //Set the updater String updater = reader.readLine(); if(updater.equals("Linear")) grid.setLinearUpdater(); else if(updater.equals("Priority")) grid.setPriorityUpdater(0, 100); else if(updater.equals("Atomic")) grid.setAtomicUpdater(); String readLine = reader.readLine(); while (readLine != null && !readLine.equals("")) { if(readLine.equals("AgentSnapshot")){ //Find the appropriate prototype String prototypeName = reader.readLine(); Prototype parent = getPrototype(prototypeName); //Read in the color and design String colorString = reader.readLine(); String[] colorToks = colorString.split("~"); Color color = new Color(Integer.parseInt(colorToks[0]), Integer.parseInt(colorToks[1]), Integer.parseInt(colorToks[2])); byte[] design = createByteArray(reader.readLine()); //Create the Agent Agent agent = new Agent(grid, parent, color, design); //System.out.println(agent.getColor()); //Get the Agent's position on the Grid int xpos = Integer.parseInt(reader.readLine()); int ypos = Integer.parseInt(reader.readLine()); //Add the agent's default fields readLine = reader.readLine(); while(readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split("~"); try { agent.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Agent Field already exists"); System.out.println(tokens[1] + " " + tokens[2]); e.printStackTrace(); } readLine = reader.readLine(); } //System.out.println("Adding Agent"); grid.addAgent(agent, xpos, ypos); } else if(readLine.equals("GlobalVariables")){ readLine = reader.readLine(); while(readLine.substring(0, 7).equals("GLOBAL")){ String[] tokens = readLine.split("~"); try { grid.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Grid Field already exists"); e.printStackTrace(); } readLine = reader.readLine(); } //System.out.println("Adding Grid Global"); } else if(readLine.equals("PrototypeSnapshot")){ //Parse the required prototype data String name = reader.readLine(); //Read in the color and design String colorString = reader.readLine(); String[] colorToks = colorString.split("~"); Color color = new Color(Integer.parseInt(colorToks[0]), Integer.parseInt(colorToks[1]), Integer.parseInt(colorToks[2])); byte[] design = createByteArray(reader.readLine()); //Create the prototype Prototype proto = new Prototype(color, design, name); //Add the prototype's default fields readLine = reader.readLine(); while(readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split("~"); try { proto.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Prototype Field already exists"); e.printStackTrace(); } readLine = reader.readLine(); } //Add the prototype's triggers while(readLine.substring(0, 7).equals("Trigger")){ String[] tokens = readLine.split("~"); proto.addTrigger(new Trigger(tokens[1], Integer.parseInt(tokens[2]), new Expression(tokens[3]), new Expression(tokens[4]))); readLine = reader.readLine(); } //System.out.println("Adding Prototype"); prototypes.add(proto); } else if(readLine.equals("EndConditions")){ simEnder = new SimulationEnder(); readLine = reader.readLine(); int maxSteps = Integer.parseInt(readLine); simEnder.setStepLimit(maxSteps); readLine = reader.readLine(); while(readLine != null && readLine.substring(0, 4).equals("POP")){ String[] tokens = readLine.split("~"); simEnder.setPopLimit(tokens[1], Integer.parseInt(tokens[2])); readLine = reader.readLine(); } //System.out.println("Added SimulationEnder"); } else{ readLine = reader.readLine(); } } } catch (FileNotFoundException e) { throw new Exception("Could not find file: " + file.getAbsolutePath()); } catch (IOException e) { throw new Exception("Could not read file: " + file.getAbsolutePath()); } catch(Exception e){ throw new Exception("Oh no! The load file was somehow corrupted! What oh what will we do?"); } finally { try { assert(reader!=null); reader.close(); } catch (IOException e) { throw new Exception("Could not close stream"); } } //Indicate that we are ready to use the getGrid(), getPrototypes() and getName() methods simulationLoaded = true; System.out.println("Load Complete"); } /** * Load a Prototype from a file * @param filename The name of the file with the saved Prototype * @return */ public Prototype loadPrototype(File f){ File file = f; BufferedReader reader = null; Prototype proto = null; try { reader = new BufferedReader(new FileReader(file)); //Skip the "PrototypeSnapshot" header String readLine = reader.readLine(); //Parse the required prototype data String name = reader.readLine(); //Read in the color and design String colorString = reader.readLine(); String[] colorToks = colorString.split("~"); Color color = new Color(Integer.parseInt(colorToks[0]), Integer.parseInt(colorToks[1]), Integer.parseInt(colorToks[2])); byte[] design = createByteArray(reader.readLine()); //Create the prototype proto = new Prototype(color, design, name); //Add the prototype's default fields readLine = reader.readLine(); while(readLine != null && readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split("~"); try { proto.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Prototype Field already exists"); e.printStackTrace(); } readLine = reader.readLine(); } //Add the prototype's triggers while(readLine != null && readLine.substring(0, 7).equals("Trigger")){ String[] tokens = readLine.split("~"); proto.addTrigger(new Trigger(tokens[1], Integer.parseInt(tokens[2]), new Expression(tokens[3]), new Expression(tokens[4]))); readLine = reader.readLine(); } } catch (FileNotFoundException e) { throw new RuntimeException("Could not find file: " + file.getAbsolutePath(), e); }catch (IOException e) { throw new RuntimeException("Could not read file: " + file.getAbsolutePath(), e); }catch(Exception e){ throw new RuntimeException("Oh no! The load file was somehow corrupted! What oh what will we do?", e); }finally { try { assert(reader!=null); reader.close(); } catch (IOException e) { throw new RuntimeException("Could not close stream", e); } } return proto; } /** * Create a byte array from a string * @param s String representing a byte array in the form "127~127~127~127~127~127~127" * @return The create byte array */ private static byte[] createByteArray(String s){ String[] tokens = s.split("~"); byte[] ret = new byte[tokens.length]; for(int i = 0; i < tokens.length; i++) ret[i] = (byte) Integer.parseInt(tokens[i]); return ret; } /** * Get the Prototype in this class's internal list with the supplied name * @param name The name of the prototype to retrieve * @return The prototype with the supplied name */ private Prototype getPrototype(String name){ Prototype ret = null; for(Prototype p : prototypes) if(p.getName().equals(name)) ret = p; if(ret == null) System.out.println("Parent Not Found"); return ret; } }
package com.zimbra.qa.selenium.projects.ajax.ui.addressbook; import java.awt.event.KeyEvent; import java.util.*; import org.apache.log4j.LogManager; import com.zimbra.qa.selenium.framework.items.*; import com.zimbra.qa.selenium.framework.ui.*; import com.zimbra.qa.selenium.framework.util.*; import com.zimbra.qa.selenium.framework.util.ZimbraSeleniumProperties.AppType; import com.zimbra.qa.selenium.projects.ajax.core.AjaxCommonTest; import com.zimbra.qa.selenium.projects.ajax.ui.*; import com.zimbra.qa.selenium.projects.ajax.ui.DialogTag; import com.zimbra.qa.selenium.projects.ajax.ui.mail.*; public class PageAddressbook extends AbsTab { public static class CONTEXT_MENU { public static final String LOCATOR = "id='zm__Contacts'"; //contact's context menu public static final ContextMenuItem CONTACT_SEARCH = new ContextMenuItem("POPUP_SEARCH_MENU","Find Emails...","div[class*='ImgSearch']"," div[class*='ImgCascade']"); public static final ContextMenuItem CONTACT_NEW_EMAIL = new ContextMenuItem("POPUP_NEW_MESSAGE","New Email","div[class*='ImgNewMessage']",":contains('nm')"); //TODO: contact group: "Edit Group" instead of "Edit Contact" public static final ContextMenuItem CONTACT_EDIT = new ContextMenuItem("POPUP_CONTACT","Edit Contact","div[class*='ImgEdit']",""); public static final ContextMenuItem CONTACT_FORWARD = new ContextMenuItem("POPUP_SEND_CONTACTS_IN_EMAIL","Forward Contact","div[class*='ImgMsgStatusSent']",""); //TODO: contact group: "Tag Group" instead of "Tag Contact" public static final ContextMenuItem CONTACT_TAG = new ContextMenuItem("POPUP_TAG_MENU","Tag Contact","div[class*='ImgTag']"," div[class='ImgCascade']"); public static final ContextMenuItem CONTACT_DELETE = new ContextMenuItem("POPUP_DELETE","Delete","div[class*='ImgDelete']",":contains('Del')"); public static final ContextMenuItem CONTACT_MOVE = new ContextMenuItem("POPUP_MOVE","Move","div[class*='ImgMoveToFolder']",""); public static final ContextMenuItem CONTACT_PRINT = new ContextMenuItem("POPUP_PRINT_CONTACT","Print","div[class*='ImgPrint']",":contains('p')"); public static final ContextMenuItem CONTACT_GROUP = new ContextMenuItem("POPUP_CONTACTGROUP_MENU","Contact Group","div[class*='ImgGroup']",""); public static final ContextMenuItem CONTACT_QUICK_COMMAND = new ContextMenuItem("POPUP_QUICK_COMMANDS","Quick Commands","div[class='ImgQuickCommand']",""); } public static class CONTEXT_SUB_MENU { public static final ContextMenuItem CONTACT_SUB_NEW_TAG = new ContextMenuItem("div#contacts_newtag","New Tag","div[class='ImgNewTag']",":contains('nt')"); public static final ContextMenuItem CONTACT_SUB_REMOVE_TAG = new ContextMenuItem("div[id*='contacts_removetag']","Remove Tag","div[class='ImgDeleteTag']",""); //public static final ContextMenuItem CONTACT_SUB_REMOVE_TAG = new ContextMenuItem("td#zmi__Contacts__TAG_MENU|MENU|REMOVETAG_title","Remove Tag","div[class='ImgDeleteTag']",""); public static final ContextMenuItem CONTACT_SUB_RECEIVED_FROM_CONTACT = new ContextMenuItem("tr[id^=SEARCH__DWT]","Received From Contact","div[class='ImgSearch']",""); public static final ContextMenuItem CONTACT_SUB_SENT_TO_CONTACT = new ContextMenuItem("tr[id^=SEARCH_TO__DWT]","Sent To Contact","div[class='ImgSearch']",""); public static final ContextMenuItem CONTACT_SUB_NEW_CONTACT_GROUP = new ContextMenuItem("div[id^='CONTACTGROUP_MENU__DWT'][id$='|GROUP_MENU|NEWGROUP']","New Contact Group","div[class='ImgNewGroup']",""); } /** * A mapping of letter characters (upper case) to addressbook buttons, e.g. 'A' -> Button.B_AB_A */ public static final HashMap<Character, Button> buttons = new HashMap<Character, Button>() { private static final long serialVersionUID = -8341258587369022596L; { put(Character.valueOf('A'), Button.B_AB_A); put(Character.valueOf('B'), Button.B_AB_B); put(Character.valueOf('C'), Button.B_AB_C); put(Character.valueOf('D'), Button.B_AB_D); put(Character.valueOf('E'), Button.B_AB_E); put(Character.valueOf('F'), Button.B_AB_F); put(Character.valueOf('G'), Button.B_AB_G); put(Character.valueOf('H'), Button.B_AB_H); put(Character.valueOf('I'), Button.B_AB_I); put(Character.valueOf('J'), Button.B_AB_J); put(Character.valueOf('K'), Button.B_AB_K); put(Character.valueOf('L'), Button.B_AB_L); put(Character.valueOf('M'), Button.B_AB_M); put(Character.valueOf('N'), Button.B_AB_N); put(Character.valueOf('O'), Button.B_AB_O); put(Character.valueOf('P'), Button.B_AB_P); put(Character.valueOf('Q'), Button.B_AB_Q); put(Character.valueOf('R'), Button.B_AB_R); put(Character.valueOf('S'), Button.B_AB_S); put(Character.valueOf('T'), Button.B_AB_T); put(Character.valueOf('U'), Button.B_AB_U); put(Character.valueOf('V'), Button.B_AB_V); put(Character.valueOf('W'), Button.B_AB_W); put(Character.valueOf('X'), Button.B_AB_X); put(Character.valueOf('Y'), Button.B_AB_Y); put(Character.valueOf('Z'), Button.B_AB_Z); }}; public PageAddressbook(AbsApplication application) { super(application); logger.info("new " + PageAddressbook.class.getCanonicalName()); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#isActive() */ @Override public boolean zIsActive() throws HarnessException { // Make sure the main page is active if ( !((AppAjaxClient)MyApplication).zPageMain.zIsActive() ) { ((AppAjaxClient)MyApplication).zPageMain.zNavigateTo(); } boolean active=sIsElementPresent("css=div[id='zb__App__Contacts'][class*=ZSelected]"); String locator = null; // On Zimbra Desktop, there is no Address book folder, but there is only // account root folder if(ZimbraSeleniumProperties.getAppType() == AppType.DESKTOP) { locator = TreeMail.Locators.zTreeItems.replace(TreeMail.stringToReplace, AjaxCommonTest.defaultAccountName); } else { //make sure Addressbook folder is displayed locator = "css=div#ztih__main_Contacts__ADDRBOOK_div"; } active &= this.sIsElementPresent(locator); return (active); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#myPageName() */ @Override public String myPageName() { return (this.getClass().getName()); } /* (non-Javadoc) * @see projects.admin.ui.AbsPage#navigateTo() */ @Override public void zNavigateTo() throws HarnessException { // Check if this page is already active. if ( zIsActive() ) { return; } if ( !((AppAjaxClient)MyApplication).zPageMain.zIsActive() ) { ((AppAjaxClient)MyApplication).zPageMain.zNavigateTo(); } tracer.trace("Navigate to "+ this.myPageName()); if (!GeneralUtility.waitForElementPresent(this,PageMain.Locators.zAppbarContact)) { throw new HarnessException("Can't locate addressbook icon"); } // Click on Addressbook icon zClickAt(PageMain.Locators.zAppbarContact,"0,0"); zWaitForActive(); } /* (non-Javadoc) * @see com.zimbra.qa.selenium.framework.ui.AbsTab#zRefresh() */ public void zRefresh() throws HarnessException { // Click refresh on the main app ((AppAjaxClient)this.MyApplication).zPageMain.zToolbarPressButton(Button.B_REFRESH); } //get subFolders public List<FolderItem> zListGetFolders(ZimbraAccount account, FolderItem parentFolder) throws HarnessException { List <FolderItem> list = new ArrayList<FolderItem>(); String folderId = "zti" + ((parentFolder.getName().equals("USER_ROOT"))?"h":"") + "__main_Contacts__" + ((parentFolder.getName().equals("USER_ROOT"))?"ADDRBOOK":parentFolder.getId()) +"_div"; //ensure it is in Addressbook main page zNavigateTo(); String elements="window.document.getElementById('" + folderId + "').nextSibling.childNodes"; int length = Integer.parseInt(sGetEval(elements + ".length")); for (int i=0; i<length; i++) { String id= sGetEval(elements + "[" + i +"].id"); if (id.contains("Contacts")) { list.add(FolderItem.importFromSOAP(account, sGetText("css=td#" + id + "_textCell"))); } } return list; } public boolean zIsContactDisplayed(ContactItem contactItem) throws HarnessException { boolean isContactFound = false; //ensure it is in Addressbook main page zNavigateTo(); //assume that this is a list view String listLocator = "div[id='zv__CNS-main']"; String rowLocator = "div[id^='zli__CNS-main__']"; String noResultLocator = "td.NoResults"; String fileAsLocator = " td[id^=zlif__CNS-main__][id$=__fileas]"; //actually this is a search view if (zIsInSearchView()) { listLocator= "div[id=zv__CNS-SR-Contacts-1]"; rowLocator= "div[id^=zli__CNS-SR-Contacts-1__]"; fileAsLocator=" td[id^=zlif__CNS-SR-Contacts-1__][id$=__fileas]"; } // if there is no result if (sIsElementPresent("css=" + listLocator + " " + noResultLocator)) { return false; } if (!this.sIsElementPresent("css=" + listLocator + ">" + rowLocator)) { throw new HarnessException("css=" + listLocator + ">" + rowLocator + " not present"); } //Get the number of contacts (String) int count = this.sGetCssCount("css=" + listLocator + ">" + rowLocator); logger.info(myPageName() + " zIsContactDisplayed: number of contacts: "+ count); // Get each contact's data from the table list for (int i = 1; i <= count && !isContactFound; i++) { String commonLocator = "css=" + listLocator + ">div:nth-child(" + i +")"; String contactType = getContactType(commonLocator); String contactDisplayedLocator = commonLocator + fileAsLocator; String fileAs = sGetText(contactDisplayedLocator); logger.info("...found "+ contactType + " - " + fileAs ); isContactFound = ((contactType.equals(ContactGroupItem.IMAGE_CLASS) && contactItem instanceof ContactGroupItem) || (contactType.equals(ContactItem.IMAGE_CLASS) && contactItem instanceof ContactItem)) && (contactItem.fileAs.equals(fileAs.trim())); } return isContactFound; } // only return the list with a certain contact type // contactType should be one of ContactGroupItem.IMAGE_CLASS , ContactItem.IMAGE_CLASS public List<ContactItem> zListGetContacts(String contactType) throws HarnessException { List <ContactItem> list= new ArrayList<ContactItem>(); //ensure it is in Addressbook main page //zNavigateTo(); //assume that this is a list view String listLocator = "div[id='zv__CNS-main']"; String rowLocator = "div[id^='zli__CNS-main__']"; String fileAsLocator = " td[id^=zlif__CNS-main__][id$=__fileas]"; String noResultLocator = " td.NoResults"; //actually this is a search view if (zIsInSearchView()) { listLocator= "div[id=zv__CNS-SR-Contacts-1]"; rowLocator= "div[id^=zli__CNS-SR-Contacts-1__]"; fileAsLocator=" td[id^=zlif__CNS-SR-Contacts-1__][id$=__fileas]"; } // no result if (sIsElementPresent("css=" + listLocator + noResultLocator)) { return list; } if (!this.sIsElementPresent("css=" + listLocator + ">" + rowLocator)) { throw new HarnessException("css=" + listLocator + ">" + rowLocator + " not present"); } int count = this.sGetCssCount("css=" + listLocator + ">" + rowLocator); logger.info(myPageName() + " zListGetContacts: number of contacts: "+ count); // Get each contact's data from the table list for (int i = 1; i <= count; i++) { String commonLocator = "css=" + listLocator + ">div:nth-child(" + i +")"; if (sIsElementPresent(commonLocator + " div[class*=" + contactType + "]")) { ContactItem ci=null; String contactDisplayedLocator = commonLocator + fileAsLocator; String fileAs = sGetText(contactDisplayedLocator); logger.info(" found " + fileAs); //check contact type if ( contactType.equals(ContactGroupItem.IMAGE_CLASS)) { ci=new ContactGroupItem(fileAs); } else if ( contactType.equals(ContactItem.IMAGE_CLASS) || contactType.equals(ContactItem.GAL_IMAGE_CLASS)) { ci=new ContactItem(fileAs); } else { throw new HarnessException("Image not neither conntact group nor contact."); } list.add(ci); } } return list; } public List<ContactItem> zListGetContacts() throws HarnessException { List <ContactItem> list= new ArrayList<ContactItem>(); //ensure it is in Addressbook main page zNavigateTo(); if ( !this.sIsElementPresent("id=zv__CNS-main") ) //maybe return empty list????? throw new HarnessException("Contact List is not present "+ "id='zv__CNS-main'"); //Get the number of contacts (String) int count = this.sGetCssCount("css=div[id='zv__CNS-main']>div[id^=zli__CNS-main__]"); logger.info(myPageName() + " zListGetContacts: number of contacts: "+ count); // Get each contact's data from the table list for (int i = 1; i <= count; i++) { String commonLocator = "css=div[id='zv__CNS-main'] div:nth-child("+ i +")"; String contactType = getContactType(commonLocator); ContactItem ci=null; String contactDisplayedLocator = commonLocator + " td[id^=zlif__CNS-main__][id$=__fileas]"; String fileAs = sGetText(contactDisplayedLocator); logger.info(" found " + fileAs); //check if it is a contact. contactgroup, gal, or dlist item if ( contactType.equals(ContactGroupItem.IMAGE_CLASS)) { ci=new ContactGroupItem(fileAs); } else if ( contactType.equals(ContactItem.IMAGE_CLASS) ) { ci=new ContactItem(fileAs); } else if ( contactType.equals(GALItem.IMAGE_CLASS) ) { ci=new GALItem(fileAs); } else if ( contactType.equals(DistributionListItem.IMAGE_CLASS) ) { ci=new DistributionListItem(fileAs,fileAs); //TODO??? } else { throw new HarnessException("Image type not valid."); } list.add(ci); } return list; } @Override public AbsPage zToolbarPressButton(Button button) throws HarnessException { logger.info(myPageName() + " zToolbarPressButton("+ button +")"); tracer.trace("Press the "+ button +" button"); if ( button == null ) throw new HarnessException("Button cannot be null!"); // Default behavior variables // String locator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned if ( button == Button.B_REFRESH ) { return (((AppAjaxClient)this.MyApplication).zPageMain.zToolbarPressButton(Button.B_REFRESH)); } else if ( button == Button.B_NEW ) { // For "NEW" without a specified pulldown option, just return the default item // To use "NEW" with a pulldown option, see zToolbarPressPulldown(Button, Button) locator = "css=div#zb__NEW_MENU td[id$='_title']"; page = new FormContactNew(this.MyApplication); } else if ( button == Button.B_DELETE ) { String id = "zb__CNS-main__DELETE"; if (this.zIsElementDisabled("css=div#" + id)) { throw new HarnessException("Tried clicking on "+ button +" but it was disabled "+ id); } locator = "id="+ id; } else if ( button == Button.B_EDIT ) { String id = "zb__CNS-main__EDIT"; if (zIsElementDisabled("css=div#" + id )) { throw new HarnessException("Tried clicking on "+ button +" but it was disabled "+ id); } locator = "id="+ id; page = newFormSelected(); } else if ( button == Button.B_MOVE) { String id = "zb__CNS__MOVE_left_icon"; if (sIsElementPresent("css=td#" + id + " div[class*=ZDisabledImage]")) { throw new HarnessException("Tried clicking on "+ button +" but it was disabled "+ id); } locator = "id="+ id; page = new DialogMove(MyApplication, this); } else if ( button == Button.B_FORWARD) { locator = "css=div[id^=zb__CN-][id$=__SEND_CONTACTS_IN_EMAIL]"; if (zIsElementDisabled(locator)) { throw new HarnessException("Tried clicking on "+ button +" but it was disabled "); } page = new FormMailNew(MyApplication); } else if ( button == Button.B_CANCEL) { //String id ="dizb__CN__CANCEL"; locator = "css=div[id^=zb__CN][id$=__CANCEL]" ; if (zIsElementDisabled(locator)) { throw new HarnessException("Tried clicking on "+ locator +" but it was disabled "); } page = new DialogWarning(DialogWarning.DialogWarningID.CancelCreateContact, this.MyApplication, ((AppAjaxClient)this.MyApplication).zPageAddressbook); //click close without changing contact contents } else if ( button == Button.B_CLOSE){ locator = "css=div[id^=zb__CN][id$=__CANCEL]" ; if (zIsElementDisabled(locator)) { throw new HarnessException("Tried clicking on "+ locator +" but it was disabled "); } } else if (isAlphabetButton(button)) { locator=DisplayContactGroup.ALPHABET_PREFIX + button.toString() + DisplayContactGroup.ALPHABET_POSTFIX; //TODO //page = ??? } if ( locator == null ) throw new HarnessException("locator was null for button "+ button); // Default behavior, process the locator by clicking on it // // Make sure the button exists if ( !sIsElementPresent(locator) ) throw new HarnessException("Button is not present locator="+ locator +" button="+ button); // Click it zClickAt(locator,"0,0"); if (isAlphabetButton(button)) { //for addressbook alphabet button only sClick(locator); } zWaitForBusyOverlay(); if ( page != null ) { //sWaitForPageToLoad(); page.zWaitForActive(); } return (page); } public AbsPage zKeyboardShortcut(Shortcut shortcut) throws HarnessException { logger.info(myPageName() + " zKeyboardShortcut("+ shortcut.getKeys() +")"); tracer.trace("Click the shortcut "+ shortcut.getKeys() ); // Default behavior variables AbsPage page = null; // If set, this page will be returned if ( shortcut == Shortcut.S_NEWTAG) { page = new DialogTag(MyApplication,((AppAjaxClient) MyApplication).zPageAddressbook); } else if (shortcut == Shortcut.S_MOVE) { page = new DialogMove(MyApplication, this); } else if ( shortcut == Shortcut.S_ASSISTANT ) { page = new DialogAssistant(MyApplication, ((AppAjaxClient) MyApplication).zPageAddressbook); } else if ( shortcut == Shortcut.S_MAIL_REMOVETAG ) { page = null; } else { throw new HarnessException("No logic for shortcut : "+ shortcut); } // Click it //zKeyboardTypeString(shortcut.getKeys()); zKeyboard.zTypeCharacters(shortcut.getKeys()); zWaitForBusyOverlay(); if ( page != null ) { page.zWaitForActive(); } return (page); } public void clickDistributionListsFolder(AppAjaxClient app) throws HarnessException { FolderItem contactFolder = FolderItem.importFromSOAP(app.zGetActiveAccount(), "Distribution Lists"); app.zTreeContacts.zTreeItem(Action.A_LEFTCLICK, contactFolder); } @Override public AbsPage zToolbarPressPulldown(Button pulldown, Button option) throws HarnessException { logger.info(myPageName() + " zToolbarPressButtonWithPulldown("+ pulldown +", "+ option +")"); tracer.trace("Click pulldown "+ pulldown +" then "+ option); if ( pulldown == null ) throw new HarnessException("Button cannot be null!"); // Default behavior variables // String pulldownLocator = null; // If set, this will be expanded String optionLocator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned if ( pulldown == Button.B_TAG ) { if ( option == Button.O_TAG_NEWTAG ) { pulldownLocator = "css=td#zb__CNS-main__TAG_MENU_dropdown div.ImgSelectPullDownArrow"; optionLocator = "css=td#contacts_newtag_title"; page = new DialogTag(this.MyApplication, this); } else if ( option == Button.O_TAG_REMOVETAG ) { pulldownLocator = "css=td#zb__CNS-main__TAG_MENU_dropdown div.ImgSelectPullDownArrow"; optionLocator = "css=div[id='zb__CNS-main__TAG_MENU|MENU'] div[id^=contacts_removetag] td.ZWidgetTitle"; page = null; } } else if ( pulldown == Button.B_NEW ) { pulldownLocator = "css=div#zb__NEW_MENU td#zb__NEW_MENU_dropdown"; if ( option == Button.O_NEW_CONTACT ) { // TODO: Bug 58365 for Desktop if (ZimbraSeleniumProperties.getAppType() == AppType.DESKTOP) { optionLocator="css=div[class='ActionMenu ZHasIcon'] div[class*='ZMenuItem ZWidget ZHasLeftIcon ZHasText'] table[class*='ZWidgetTable ZMenuItemTable']:contains('Contact')"; } else { optionLocator="css=div#zb__NEW_MENU_NEW_CONTACT"; } page = new FormContactNew(this.MyApplication); } else if ( option == Button.O_NEW_CONTACTGROUP) { // TODO: Bug 58365 for Desktop if (ZimbraSeleniumProperties.getAppType() == AppType.DESKTOP) { optionLocator="css=div[class='ActionMenu ZHasIcon'] div[class*='ZMenuItem ZWidget ZHasLeftIcon ZHasText'] table[class*='ZWidgetTable ZMenuItemTable']:contains('Contact Group')"; } else { optionLocator="css=div#zb__NEW_MENU_NEW_GROUP"; } page = new FormContactGroupNew(this.MyApplication); } else if ( option == Button.O_NEW_TAG ) { optionLocator = "css=div#zb__NEW_MENU_NEW_TAG td#zb__NEW_MENU_NEW_TAG_title"; page = new DialogTag(this.MyApplication, this); } else if ( option == Button.O_NEW_ADDRESSBOOK ) { optionLocator = "css=div#zb__NEW_MENU_NEW_ADDRBOOK td#zb__NEW_MENU_NEW_ADDRBOOK_title"; page = new DialogCreateFolder(MyApplication, ((AppAjaxClient)MyApplication).zPageAddressbook); } else { //option not suppored pulldownLocator=null; } } // Default behavior if ( pulldownLocator != null ) { // Make sure the locator exists if ( !sIsElementPresent(pulldownLocator) ) { throw new HarnessException("Button "+ pulldown +" option "+ option +" pulldownLocator "+ pulldownLocator +" not present!"); } //central coordinate "x,y" //String center= sGetElementWidth(pulldownLocator)/2 + "," + sGetElementHeight(pulldownLocator)/2; if ( this.zIsBrowserMatch(BrowserMasks.BrowserMaskIE)){ // TODO check if the following code make the test case CreateContactGroup.GroupOfNewEmail() pass in wdc /* sGetEval("return var evObj = document.createEventObject();" + "var x = selenium.browserbot.findElementOrNull('" + pulldownLocator + "');" + "x.focus();x.blur();x.fireEvent('onclick');"); */ //the following code failed in wdc, but pass in my machine : sClickAt(pulldownLocator,""); } else { //others zClickAt(pulldownLocator,""); } zWaitForBusyOverlay(); if ( optionLocator != null ) { // Make sure the locator exists and visible zWaitForElementPresent(optionLocator); if (!zIsElementDisabled(optionLocator)) { zClick(optionLocator); zWaitForBusyOverlay(); } } // If we click on pulldown/option and the page is specified, then // wait for the page to go active if ( page != null ) { //sWaitForPageToLoad(); page.zWaitForActive(); } } return page; } public AbsPage zToolbarPressPulldown(Button pulldown, IItem item) throws HarnessException { logger.info(myPageName() + " zToolbarPressButtonWithPulldown("+ pulldown +", "+ item +")"); tracer.trace("Click pulldown "+ pulldown +" then "+ item); if ( pulldown == null ) throw new HarnessException("Button cannot be null!"); String pulldownLocator = null; // If set, this will be expanded String optionLocator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned if ( pulldown == Button.B_MOVE ) { if ( item instanceof FolderItem) { FolderItem folder = (FolderItem) item; pulldownLocator = "css=td#zb__CNS-main__MOVE_MENU_dropdown.ZDropDown"; optionLocator = "css=td#zti__ZmFolderChooser_ContactsCNS-main__" + folder.getId() + "_textCell.DwtTreeItem-Text"; //TODO page=? } } else if ( pulldown == Button.B_TAG ) { if ( item instanceof TagItem) { pulldownLocator = "css=td#zb__CNS-main__TAG_MENU_dropdown div.ImgSelectPullDownArrow"; //Selenium cannot find the following optionLocator //optionLocator = "css=div#zb__CNS-main__TAG_MENU|MENU div:contains('" +((TagItem)item).getName() + "'"; page = null; } } if ( pulldownLocator != null ) { // Make sure the locator exists if ( !sIsElementPresent(pulldownLocator) ) { throw new HarnessException("Button "+ pulldown +" folder "+ item +" pulldownLocator "+ pulldownLocator +" not present!"); } //central coordinate "x,y" String center= sGetElementWidth(pulldownLocator)/2 + "," + sGetElementHeight(pulldownLocator)/2; zClickAt(pulldownLocator,center); zWaitForBusyOverlay(); // find optionLocator if ( pulldown == Button.B_TAG ) { String tagName = ((TagItem)item).getName(); //get number of menu's options int countOption= Integer.parseInt(sGetEval("window.document.getElementById('zb__CNS-main__TAG_MENU|MENU').children[0].children[0].children.length")); String id= null; //find option id contains the tag name for (int i=0; i <countOption; i++) { id= sGetEval("window.document.getElementById('zb__CNS-main__TAG_MENU|MENU').children[0].children[0].children[" + i + "].children[0].children[0].id"); if (sGetText("css=div#" + id).contains(tagName)) { optionLocator = "css=div#" + id ; break; } } } if ( optionLocator != null ) { // Make sure the locator exists and visible zWaitForElementPresent(optionLocator); if (zIsVisiblePerPosition(optionLocator,0,0)) { zClick(optionLocator); zWaitForBusyOverlay(); } } // If we click on pulldown/option and the page is specified, then // wait for the page to go active //if ( page != null ) { // page.zWaitForActive(); //} } return page; } public AbsPage zToolbarPressPulldown(Button pulldown, Button option, Object item) throws HarnessException { logger.info(myPageName() + " zToolbarPressButtonWithPulldown("+ pulldown +", "+ option + " , " + item +")"); tracer.trace("Click pulldown "+ pulldown +" then "+ option + " and " + item); if ( pulldown == null ) throw new HarnessException("Button cannot be null!"); String pulldownLocator = null; // If set, this will be expanded String optionLocator = null; // If set, this will be clicked String subOptionLocator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned if ( pulldown == Button.B_TAG ) { pulldownLocator = "css=td#zb__CNS-main__TAG_MENU_dropdown div.ImgSelectPullDownArrow"; if (option == Button.O_TAG_REMOVETAG) { optionLocator = "css=div[id='zb__CNS-main__TAG_MENU|MENU'] div[id^='contacts_removetag'] td[id^='contacts_removetag'][id$=_title]"; } page = null; } if ( pulldownLocator != null ) { // Make sure the locator exists if ( !sIsElementPresent(pulldownLocator) ) { throw new HarnessException("Button "+ pulldown +" folder "+ item +" pulldownLocator "+ pulldownLocator +" not present!"); } //central coordinate "x,y" String center= sGetElementWidth(pulldownLocator)/2 + "," + sGetElementHeight(pulldownLocator)/2; zClickAt(pulldownLocator,center); zWaitForBusyOverlay(); // find optionLocator if ( optionLocator != null ) { // Make sure the locator exists and visible zWaitForElementPresent(optionLocator); if (zIsVisiblePerPosition(optionLocator,0,0)) { sMouseOver(optionLocator); zWaitForBusyOverlay(); if (item instanceof TagItem) { if (item == TagItem.Remove_All_Tags) { subOptionLocator = "css=div[id=REMOVE_TAG_MENU_TAG_MENU|MENU] div[id=REMOVE_ALL_TAGS]"; } else { subOptionLocator = "css=div[id=REMOVE_TAG_MENU_TAG_MENU|MENU] td[id=^Remove_tag_][id$=_title]:contains('" + ((TagItem) item).getName() + "')"; } // find active menu id /* //get number of z_shell's children int countOption= Integer.parseInt(sGetEval("window.document.getElementById('z_shell').children.length")); String parentMenuid= null; //find id of the active menu for (int i=countOption-1; i>0; i--) { parentMenuid= sGetEval("window.document.getElementById('z_shell').children[" + i + "].id"); if (sGetEval("window.document.getElementById('" + parentMenuid + "').getAttribute('class')").contains("ActionMenu ZHasIcon") && sIsVisible(parentMenuid)){ subOptionLocator = "css=div#" + parentMenuid + " td[id$=title]:contains(" + tagName + ")"; break; } } */ } if (subOptionLocator != null) { // Make sure the locator exists and visible zWaitForElementPresent(subOptionLocator); //if (zIsVisiblePerPosition(subOptionLocator,0,0)) { zClick(subOptionLocator); zWaitForBusyOverlay(); //} } } } } //if ( page != null ) { // page.zWaitForActive(); //} return page; } // return the type of a contact private String getContactType(String locator) throws HarnessException { String imageLocator = locator +" div[class*="; if (sIsElementPresent(imageLocator + ContactGroupItem.IMAGE_CLASS + "]")) { return ContactGroupItem.IMAGE_CLASS; } else if (sIsElementPresent(imageLocator + ContactItem.IMAGE_CLASS + "]")) { return ContactItem.IMAGE_CLASS; } else if (sIsElementPresent(imageLocator + DistributionListItem.IMAGE_CLASS + "]")) { return DistributionListItem.IMAGE_CLASS; } logger.info(sGetAttribute(locator+ " div@class") + " not contain neither " + ContactGroupItem.IMAGE_CLASS + " nor " + ContactItem.IMAGE_CLASS ); return null; } // return the xpath locator of a contact private String getContactLocator(String contact) throws HarnessException { //assume that this is a list view String listLocator = "div[id='zv__CNS-main']"; String rowLocator = "div[id^='zli__CNS-main__']"; String contactLocator = null; //actually this is a search view if (zIsInSearchView()) { listLocator= "div[id=zv__CNS-SR-Contacts-1]"; rowLocator= "div[id^=zli__CNS-SR-Contacts-1__]"; } if (!this.sIsElementPresent("css=" + listLocator + ">" + rowLocator)) { throw new HarnessException("css=" + listLocator + ">" + rowLocator + " not present"); } //Get the number of contacts (String) int count = this.sGetCssCount("css=" + listLocator + ">" + rowLocator); logger.debug(myPageName() + " zListItem: number of contacts: "+ count); if ( count == 0 ) throw new HarnessException("List count was zero"); // Get each contact's data from the table list for (int i = 1; i<=count; i++) { String itemLocator = "css=" + listLocator + ">div:nth-child(" + i +")"; if ( !this.sIsElementPresent(itemLocator) ) { throw new HarnessException("unable to locate item " + itemLocator); } String displayAs = sGetText(itemLocator); // Log this item to the debug output LogManager.getLogger("projects").info("zListItem: found contact "+ displayAs); if ( displayAs != null ) { if ( displayAs.toLowerCase().contains(contact.toLowerCase()) ) { // Found the item! contactLocator = itemLocator; break; } } } if (contactLocator == null) { throw new HarnessException("Never found the contact "+ contact); } return contactLocator; } //get selected contacts locators private ArrayList<String> getSelectedContactLocator() throws HarnessException { String listLocator = "div#zv__CNS-main"; String rowLocator = "div[id^='zli__CNS-main__']"; ArrayList<String> arrayList = new ArrayList<String>(); if ( !sIsElementPresent("css=" + listLocator) ) throw new HarnessException("List View Rows is not present "+ listLocator); if ( !sIsElementPresent("css=" + rowLocator) ) return arrayList; //an empty arraylist //Get the number of contacts (String) int count = sGetCssCount("css=" + listLocator + ">" + rowLocator); logger.debug(myPageName() + " getSelectedContactLocator: number of contacts: "+ count); if ( count == 0 ) throw new HarnessException("List count was zero"); // Get each contact's data from the table list for (int i = 1; i<=count; i++) { String itemLocator = "css=" + listLocator + " div:nth-child(" + i +")"; if ( !sIsElementPresent(itemLocator) ) { logger.info("reach the end of list - unable to locate item " + itemLocator); break; } if (sIsElementPresent(itemLocator+ "[class*=Row-selected]")) { arrayList.add(itemLocator); } // Log this item to the debug output LogManager.getLogger("projects").info("getSelectedContactLocator: found selected contact "+ itemLocator); } return arrayList; } public AbsPage zListItem(Action action, Button option ,Button subOption, String contact) throws HarnessException { String locator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned String parentLocator = null; String extraLocator=""; tracer.trace(action +" then "+ option +" then "+ subOption +" on contact = "+ contact); if ( action == Action.A_RIGHTCLICK ) { ContextMenuItem cmi=null; ContextMenuItem sub_cmi = null; zRightClickAt(getContactLocator(contact),"0,0"); if (option == Button.B_TAG) { cmi=CONTEXT_MENU.CONTACT_TAG; if (subOption == Button.O_TAG_NEWTAG) { sub_cmi = CONTEXT_SUB_MENU.CONTACT_SUB_NEW_TAG; page = new DialogTag(this.MyApplication, this); } else if (subOption == Button.O_TAG_REMOVETAG) { sub_cmi = CONTEXT_SUB_MENU.CONTACT_SUB_REMOVE_TAG; //parentLocator= "div[id^=TAG_MENU__DWT][id$=|MENU]"; parentLocator= "div[id='TAG_MENU|MENU']"; page = null; } } else if (option == Button.B_CONTACTGROUP) { if (subOption == Button.O_NEW_CONTACTGROUP) { cmi= CONTEXT_MENU.CONTACT_GROUP; sub_cmi= CONTEXT_SUB_MENU.CONTACT_SUB_NEW_CONTACT_GROUP; page = new DialogNewContactGroup(MyApplication, this); } } else if (option == Button.B_SEARCH) { cmi=CONTEXT_MENU.CONTACT_SEARCH; if (subOption == Button.O_SEARCH_MAIL_SENT_TO_CONTACT) { sub_cmi = CONTEXT_SUB_MENU.CONTACT_SUB_SENT_TO_CONTACT; page = ((AppAjaxClient)MyApplication).zPageSearch; } else if (subOption == Button.O_SEARCH_MAIL_RECEIVED_FROM_CONTACT) { sub_cmi = CONTEXT_SUB_MENU.CONTACT_SUB_RECEIVED_FROM_CONTACT; page = ((AppAjaxClient)MyApplication).zPageSearch; } } else { throw new HarnessException("option " + option + " not supported."); } if ((cmi == null) || (sub_cmi == null)) { throw new HarnessException("option " + option + " not supported."); } if (zIsInSearchView()) { locator = "css=div[id^=zm__Contacts__DWT]"; } else { locator = "css=div#zm__Contacts"; } if ( ((option == Button.B_CONTACTGROUP) && (subOption == Button.O_NEW_CONTACTGROUP)) || (option == Button.B_SEARCH) ) { locator = locator + " tr[id^="+ cmi.locator + "]"; } else { locator = locator + " tr#"+ cmi.locator; } //locator = "id="+ id; // Make sure the context menu exists zWaitForElementPresent(locator) ; // TODO: Check if the item is enabled //if (zIsElementDisabled("div#" + id )) { // throw new HarnessException("Tried clicking on "+ cmi.text +" but it was disabled "); //} //For Safari // as an alternative for sMouseOver(locator) if (zIsBrowserMatch(BrowserMasks.BrowserMaskSafari)) { zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); ArrayList<String> selectedContactArrayList=getSelectedContactLocator(); String contactType = getContactType(selectedContactArrayList.get(0)); //check if it is a contact if ( contactType.equals(ContactItem.IMAGE_CLASS) ) { zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); zKeyboard.zTypeKeyEvent(KeyEvent.VK_DOWN); } zKeyboard.zTypeKeyEvent(KeyEvent.VK_RIGHT); } else { // Mouse over the option sFocus(locator); sMouseOver(locator); } zWaitForBusyOverlay(); if (option == Button.B_SEARCH) { //find parent locators try { int total= Integer.parseInt(sGetEval("window.document.getElementById('z_shell').childNodes.length")) -1; for (int i=total; i>=0 ; i--, parentLocator=null) { parentLocator = sGetEval("window.document.getElementById('z_shell').childNodes[" + i + "].id" ); if ( parentLocator.startsWith("POPUP_DWT") && zIsVisiblePerPosition(parentLocator, 0, 0)) { logger.info("parent = " + parentLocator); parentLocator = "div#" + parentLocator; break; } } } catch (Exception e) { parentLocator=null; logger.info("cannot find parent id for " + sub_cmi.locator + " " + e.getMessage()); } } if (parentLocator != null) { locator = "css=" + parentLocator + " " + sub_cmi.locator + extraLocator; } else { locator = "css=" + sub_cmi.locator + extraLocator; } // Make sure the sub context menu exists zWaitForElementPresent(locator) ; // make sure the sub context menu enabled //zWaitForElementEnabled(locator); } //ExecuteHarnessMain.ResultListener.captureScreen(); // SleepUtil.sleep(987654321); //else { if (option == Button.B_SEARCH) { if (subOption == Button.O_SEARCH_MAIL_SENT_TO_CONTACT) { locator="css=td[id^=SEARCH_TO__DWT][id$=_title]:contains('Sent To Contact')"; } else if (subOption == Button.O_SEARCH_MAIL_RECEIVED_FROM_CONTACT) { locator="css=td[id^=SEARCH__DWT][id$=_title]:contains('Received From Contact')"; } } //if (subOption == Button.O_TAG_REMOVETAG) { // ExecuteHarnessMain.ResultListener.captureScreen(); //} sFocus(locator); sMouseOver(locator); SleepUtil.sleepSmall(); //jClick(locator); //zClickAt(locator, "0,0"); sClickAt(locator, "0,0"); //} zWaitForBusyOverlay(); if ( page != null ) { //sWaitForPageToLoad(); page.zWaitForActive(); } return (page); } /** * Action -> Option -> suboption -> object on contact. For example, * Right click -> Tag -> Remove Tag -> tagname on ContactA * @param action e.g. A_RIGHTCLICK * @param option e.g B_TAG * @param subOption e.g O_TAG_REMOVETAG * @param choice e.g. String tagname * @param contact The contact to take the action on * @return * @throws HarnessException */ public AbsPage zListItem(Action action, Button option, Button subOption, Object choice, String contact) throws HarnessException { AbsPage page = null; // If set, this page will be returned String contactLocator = getContactLocator(contact); String locator = null; tracer.trace(action +" then "+ option +" then "+ subOption + " and choose " + choice + " on contact = "+ contact); if ( action == Action.A_RIGHTCLICK ) { if (option == Button.B_TAG) { if (subOption == Button.O_TAG_REMOVETAG) { if ( !(choice instanceof String) ) { throw new HarnessException("choice must be a string of the tag name! "+ choice); } String tagName = (String)choice; String tagContactLocator = "css=div[id^='zm__Contacts'] div[id^='TAG_MENU'] td[id$='_title']"; String removeTagLocator = "css=div[id^='TAG_MENU|MENU'] div[id^='contacts_removetag'] td[id$='_title']"; locator = "css=div[id='REMOVE_TAG_MENU_TAG_MENU|MENU'] td[id=^Remove_tag_][id$=_title]:contains('" + tagName + "')"; // Right click on contact zRightClickAt(contactLocator,"0,0"); zWaitForBusyOverlay(); // Left Click "Tag" this.sMouseOver(tagContactLocator); SleepUtil.sleepMedium(); zClickAt(tagContactLocator, ""); SleepUtil.sleepMedium(); zWaitForBusyOverlay(); /* The context menu has two different looks, depending on how many tags are on the item. If 0 tags, then the "remove tag" option is disabled. If 1 tag, then the "remove tag" option appears without a sub menu. If 1+ tags, then the "remove tag" option appears with a sub menu, where a specific tag may be chosen. */ if ( this.sIsElementPresent("css=div[id^='TAG_MENU|MENU'] div[id^='contacts_removetag'].ZHasDropDown") ) { // Has sub menu // Mouse over "remove tag", then Left Click "<tag name>" this.sMouseOver(removeTagLocator); zWaitForBusyOverlay(); // Left Click "<tag name>" zClickAt(locator, ""); zWaitForBusyOverlay(); } else { // No sub menu, just Left Click "Remove Tag" zClickAt(removeTagLocator, ""); zWaitForBusyOverlay(); } return (page); } } } return (page); } public AbsPage zListItem(Action action, Button option, IItem item, String contact) throws HarnessException { AbsPage page = null; String contactLocator = getContactLocator(contact); String optionLocator = null; String itemLocator = null; tracer.trace(action +" then "+ option +" then "+ item +" on contact = "+ contact); if ( action == Action.A_RIGHTCLICK ) { if (option == Button.B_TAG) { // Hover over the context menu "tags" item optionLocator = "css=div#zm__Contacts div#TAG_MENU td[id$='_title']"; if (item instanceof TagItem) { // Left click the existing tag itemLocator = "css=div[id^='TAG_MENU|MENU'] td[id$='_title']:contains('" + item.getName() + "')"; } } else if (option == Button.B_CONTACTGROUP) { optionLocator = "css=div#zm__Contacts div[id^='CONTACTGROUP_MENU'] td[id$='_title']"; if ( item instanceof ContactGroupItem) { itemLocator = "css=div[id^='CONTACTGROUP_MENU'] td[id$='_title']:contains('"+ item.getName() +"')"; } } if ( !this.sIsElementPresent(contactLocator) ) { throw new HarnessException("Unable to right click on contact"); } // Right click on contact zRightClickAt(contactLocator,"0,0"); this.zWaitForBusyOverlay(); if ( !this.sIsElementPresent(optionLocator) ) { throw new HarnessException("Unable to hover over context menu"); } // Mouse over the option sMouseOver(optionLocator); this.zWaitForBusyOverlay(); // It seems to take a while to draw the context menu // Sleep a bit to let it draw. SleepUtil.sleepLong(); if ( !this.sIsElementPresent(itemLocator) ) { throw new HarnessException("Unable to click on sub-menu"); } // Left click the sub-option this.zClickAt(itemLocator, ""); this.zWaitForBusyOverlay(); } if ( page != null ) { page.zWaitForActive(); } return (page); } @Override public AbsPage zListItem(Action action, Button option, String contact) throws HarnessException { String locator = null; // If set, this will be clicked AbsPage page = null; // If set, this page will be returned String contactLocator = getContactLocator(contact); tracer.trace(action +" then "+ option +" on contact = "+ contact); if ( action == Action.A_RIGHTCLICK ) { ContextMenuItem cmi=null; if (option == Button.B_DELETE){ cmi=CONTEXT_MENU.CONTACT_DELETE; } else if (option == Button.B_MOVE) { cmi=CONTEXT_MENU.CONTACT_MOVE; page = new DialogMove(MyApplication, this); } else if (option == Button.B_EDIT) { cmi=CONTEXT_MENU.CONTACT_EDIT; page = newFormSelected(); // select the item only zClickAt(contactLocator,"0,0"); } else if (option == Button.B_NEW) { cmi=CONTEXT_MENU.CONTACT_NEW_EMAIL; page = new FormMailNew(MyApplication); } else if (option == Button.B_PRINT) { cmi=CONTEXT_MENU.CONTACT_PRINT; page = new PagePrint(MyApplication); } else if (option == Button.B_FORWARD) { cmi=CONTEXT_MENU.CONTACT_FORWARD; page = new FormMailNew(MyApplication); } else { throw new HarnessException("option " + option + " not supported"); } zRightClickAt(contactLocator,"0,0"); locator = "css=div#zm__Contacts tr#"+ cmi.locator; if (option == Button.B_NEW) { locator = "css=div#zm__Contacts tr[id^="+ cmi.locator +"]"; } //locator = "id="+ id; // Make sure the context menu exists zWaitForElementPresent(locator) ; // Check if the item is enabled if (sIsElementPresent(locator + "[class*=ZDisabled]")) { throw new HarnessException("Tried clicking on "+cmi.text +" but it was disabled "); } } zClickAt(locator,"0,0"); zWaitForBusyOverlay(); if ( page != null ) { page.zWaitForActive(); } return (page); } public DisplayDList getDisplayDList() { return new DisplayDList(MyApplication); } /* (non-Javadoc) * @see com.zimbra.qa.selenium.framework.ui.AbsTab#zListItem(com.zimbra.qa.selenium.framework.ui.Action, java.lang.String) */ @Override public AbsPage zListItem(Action action, String contact) throws HarnessException { logger.info(myPageName() + " zListItem("+ action +", "+ contact +")"); String contactLocator=getContactLocator(contact); AbsPage page = null; tracer.trace(action +" on contact = "+ contact); if ( action == Action.A_LEFTCLICK ) { //click zClick(contactLocator); zWaitForBusyOverlay(); ArrayList<String> selectedContactArrayList=getSelectedContactLocator(); String contactType = getContactType(selectedContactArrayList.get(0)); //check if it is a contact or a contact group item if ( contactType.equals(ContactGroupItem.IMAGE_CLASS)) { page = new DisplayContactGroup(MyApplication); } else if ( contactType.equals(ContactItem.IMAGE_CLASS) ) { page = new DisplayContact(MyApplication); } else { throw new HarnessException(" Error: not support the contact type"); } } else if ( action == Action.A_CHECKBOX) { //enable user preference for checkbox //get the checkbox locator contactLocator=contactLocator + " div.ImgCheckboxUnchecked"; //check the box zClick(contactLocator); //zWaitForBusyOverlay(); ArrayList<String> selectedContactArrayList=getSelectedContactLocator(); String contactType = getContactType(selectedContactArrayList.get(0)); //check if it is a contact or a contact group item if ( contactType.equals(ContactGroupItem.IMAGE_CLASS)) { page = new DisplayContactGroup(MyApplication); } else if ( contactType.equals(ContactItem.IMAGE_CLASS) ) { page = new DisplayContact(MyApplication); } else { throw new HarnessException(" Error: not support the contact type"); } } else if (action == Action.A_RIGHTCLICK ) { zRightClickAt(contactLocator,"0,0"); //zWaitForBusyOverlay(); return (new ContextMenu(MyApplication)); } else if (action == Action.A_DOUBLECLICK) { sDoubleClick(contactLocator) ; page = newFormSelected(); } else { throw new HarnessException("Action " + action + " not supported"); } if (page != null) { page.zWaitForActive(); } return page; } private AbsPage newFormSelected() throws HarnessException { AbsPage page = null; ArrayList<String> selectedContactArrayList=getSelectedContactLocator(); if (selectedContactArrayList.size() == 0) { throw new HarnessException("No selected contact/contact group "); } /*if (selectedContactArrayList.size() > 1) { for (int i=0; i<selectedContactArrayList.size(); i++) { logger.info(selectedContactArrayList.get(i)); } throw new HarnessException("Cannot edit more than one contact/contact group "); }*/ String contactType = getContactType(selectedContactArrayList.get(0)); //check if it is a contact or a contact group item if ( contactType.equals(ContactGroupItem.IMAGE_CLASS)) { page = new FormContactGroupNew(MyApplication); } else if ( contactType.equals(ContactItem.IMAGE_CLASS) ) { page = new FormContactNew(MyApplication); } return page; } private boolean isAlphabetButton(Button button) { return (button == Button.B_AB_ALL) || (button == Button.B_AB_123) || (button == Button.B_AB_A) || (button == Button.B_AB_B) || (button == Button.B_AB_C) || (button == Button.B_AB_D) || (button == Button.B_AB_E) || (button == Button.B_AB_F) || (button == Button.B_AB_G) || (button == Button.B_AB_H) || (button == Button.B_AB_I) || (button == Button.B_AB_J) || (button == Button.B_AB_K) || (button == Button.B_AB_L) || (button == Button.B_AB_M) || (button == Button.B_AB_N) || (button == Button.B_AB_O) || (button == Button.B_AB_P) || (button == Button.B_AB_Q) || (button == Button.B_AB_R) || (button == Button.B_AB_S) || (button == Button.B_AB_T) || (button == Button.B_AB_U) || (button == Button.B_AB_V) || (button == Button.B_AB_W) || (button == Button.B_AB_X) || (button == Button.B_AB_Y) || (button == Button.B_AB_Z); } private boolean zIsInSearchView() throws HarnessException { return zIsVisiblePerPosition("css=div#z_filterPanel__SR-Contacts-1",0,0); } }
/* * Copyright 2014 BitPOS Pty Ltd. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.xpydev.paycoinj.store; import io.xpydev.paycoinj.core.*; import io.xpydev.paycoinj.script.Script; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigInteger; import java.sql.*; import java.util.LinkedList; import java.util.List; import java.util.Properties; /** * <p>A full pruned block store using the Postgres database engine. As an added bonus an address index is calculated, * so you can use {@link #calculateBalanceForAddress(io.xpydev.paycoinj.core.Address)} to quickly look up * the quantity of peercoins controlled by that address.</p> */ public class PostgresFullPrunedBlockStore implements FullPrunedBlockStore { private static final Logger log = LoggerFactory.getLogger(PostgresFullPrunedBlockStore.class); private static final String POSTGRES_DUPLICATE_KEY_ERROR_CODE = "23505"; private Sha256Hash chainHeadHash; private StoredBlock chainHeadBlock; private Sha256Hash verifiedChainHeadHash; private StoredBlock verifiedChainHeadBlock; private NetworkParameters params; private ThreadLocal<Connection> conn; private List<Connection> allConnections; private String connectionURL; private int fullStoreDepth; private String username; private String password; private String schemaName; private static final String driver = "org.postgresql.Driver"; private static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings (\n" + " name character varying(32) NOT NULL,\n" + " value bytea\n" + ");"; private static final String CHAIN_HEAD_SETTING = "chainhead"; private static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead"; private static final String VERSION_SETTING = "version"; private static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers (" + " hash bytea NOT NULL," + " chainwork bytea NOT NULL," + " height integer NOT NULL," + " header bytea NOT NULL," + " wasundoable boolean NOT NULL" + ");"; private static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableblocks (" + " hash bytea NOT NULL," + " height integer NOT NULL," + " txoutchanges bytea," + " transactions bytea" + ");"; private static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openoutputs (" + " hash bytea NOT NULL," + " index integer NOT NULL," + " height integer NOT NULL," + " value bytea NOT NULL," + " scriptbytes bytea NOT NULL," + " toaddress character varying(35)," + " addresstargetable integer" + ");"; private static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)"; // Some indexes to speed up inserts private static final String CREATE_HEADERS_HASH_INDEX = "CREATE INDEX headershashindex ON headers USING btree (hash);"; private static final String CREATE_OUTPUTS_ADDRESS_INDEX = "CREATE INDEX idx_address ON openoutputs USING btree (hash, index, height, toaddress);"; private static final String CREATE_OUTPUT_ADDRESS_TYPE_INDEX = "CREATE INDEX idx_addresstargetable ON openoutputs USING btree (addresstargetable);"; private static final String CREATE_OUTPUTS_HASH_INDEX = "CREATE INDEX openoutputshash ON openoutputs USING btree (hash);"; private static final String CREATE_OUTPUTS_HASH_INDEX_INDEX = "CREATE INDEX openoutputshashindex ON openoutputs USING btree (hash, index);"; private static final String CREATE_UNDOABLE_HASH_INDEX = "CREATE INDEX undoableblockshashindex ON undoableblocks USING btree (hash);"; /** * Creates a new PostgresFullPrunedBlockStore. * * @param params A copy of the NetworkParameters used * @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe) * @param hostname The hostname of the database to connect to * @param dbName The database to connect to * @param username The database username * @param password The password to the database * @throws BlockStoreException if the database fails to open for any reason */ public PostgresFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth, String hostname, String dbName, String username, String password) throws BlockStoreException { this(params, "jdbc:postgresql://" + hostname + "/" + dbName, fullStoreDepth, username, password, null); } /** * <p>Create a new PostgresFullPrunedBlockStore, storing the tables in the schema specified. You may want to * specify a schema to avoid name collisions, or just to keep the database better organized. The schema is not * required, and if one is not provided than the default schema for the username will be used. See * <a href="http://www.postgres.org/docs/9.3/static/ddl-schemas.html">the postgres schema docs</a> for more on * schemas.</p> * * @param params A copy of the NetworkParameters used. * @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe). * @param hostname The hostname of the database to connect to. * @param dbName The database to connect to. * @param username The database username. * @param password The password to the database. * @param schemaName The name of the schema to put the tables in. May be null if no schema is being used. * @throws BlockStoreException If the database fails to open for any reason. */ public PostgresFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth, String hostname, String dbName, String username, String password, @Nullable String schemaName) throws BlockStoreException { this(params, "jdbc:postgresql://" + hostname + "/" + dbName, fullStoreDepth, username, password, schemaName); } /** * <p>Create a new PostgresFullPrunedBlockStore, using the full connection URL instead of a hostname and password, * and optionally allowing a schema to be specified.</p> * * <p>The connection URL will be passed to the database driver, and should look like * "jdbc:postrgresql://host[:port]/databasename". You can use this to change the port, or specify additional * parameters. See <a href="http://jdbc.postgresql.org/documentation/head/connect.html#connection-parameters"> * the PostgreSQL JDBC documentation</a> for more on the connection URL.</p> * * <p>This constructor also accepts a schema name to use, which can be used to avoid name collisions, or to keep the * database organized. If no schema is provided the default schema for the username will be used. See * <a href="http://www.postgres.org/docs/9.3/static/ddl-schemas.html">the postgres schema docs</a> for more on * schemas.</p> * * * @param params A copy of the NetworkParameters used. * @param connectionURL The jdbc url to connect to the database. * @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe). * @param username The database username. * @param password The password to the database. * @param schemaName The name of the schema to put the tables in. May be null if no schema is being used. * @throws BlockStoreException If the database fails to open for any reason. */ public PostgresFullPrunedBlockStore(NetworkParameters params, String connectionURL, int fullStoreDepth, String username, String password, @Nullable String schemaName) throws BlockStoreException { this.params = params; this.fullStoreDepth = fullStoreDepth; this.connectionURL = connectionURL; this.schemaName = schemaName; this.username = username; this.password = password; conn = new ThreadLocal<Connection>(); allConnections = new LinkedList<Connection>(); try { Class.forName(driver); log.info(driver + " loaded. "); } catch (java.lang.ClassNotFoundException e) { log.error("check CLASSPATH for Postgres jar ", e); } maybeConnect(); try { // Create tables if needed if (!tableExists("settings")) createTables(); initFromDatabase(); } catch (SQLException e) { throw new BlockStoreException(e); } } private synchronized void maybeConnect() throws BlockStoreException { try { if (conn.get() != null && !conn.get().isClosed()) return; Properties props = new Properties(); props.setProperty("user", this.username); props.setProperty("password", this.password); conn.set(DriverManager.getConnection(connectionURL, props)); Connection connection = conn.get(); // set the schema if one is needed if(schemaName != null) { Statement s = connection.createStatement(); s.execute("CREATE SCHEMA IF NOT EXISTS " + schemaName + ";"); s.execute("set search_path to '" + schemaName +"';"); } allConnections.add(conn.get()); log.info("Made a new connection to database " + connectionURL); } catch (SQLException ex) { throw new BlockStoreException(ex); } } @Override public synchronized void close() { for (Connection conn : allConnections) { try { if(!conn.getAutoCommit()) { conn.rollback(); } conn.close(); if(conn == this.conn.get()) { this.conn.set(null); } } catch (SQLException ex) { throw new RuntimeException(ex); } } allConnections.clear(); } public void resetStore() throws BlockStoreException { maybeConnect(); try { Statement s = conn.get().createStatement(); s.execute("DROP TABLE settings"); s.execute("DROP TABLE headers"); s.execute("DROP TABLE undoableBlocks"); s.execute("DROP TABLE openOutputs"); s.close(); createTables(); initFromDatabase(); } catch (SQLException ex) { throw new RuntimeException(ex); } } private void createTables() throws SQLException, BlockStoreException { Statement s = conn.get().createStatement(); if (log.isDebugEnabled()) log.debug("PostgresFullPrunedBlockStore : CREATE headers table"); s.executeUpdate(CREATE_HEADERS_TABLE); if (log.isDebugEnabled()) log.debug("PostgresFullPrunedBlockStore : CREATE settings table"); s.executeUpdate(CREATE_SETTINGS_TABLE); if (log.isDebugEnabled()) log.debug("PostgresFullPrunedBlockStore : CREATE undoable block table"); s.executeUpdate(CREATE_UNDOABLE_TABLE); if (log.isDebugEnabled()) log.debug("PostgresFullPrunedBlockStore : CREATE undoable block index"); s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX); if (log.isDebugEnabled()) log.debug("PostgresFullPrunedBlockStore : CREATE open output table"); s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE); // Create indexes.. s.executeUpdate(CREATE_HEADERS_HASH_INDEX); s.executeUpdate(CREATE_OUTPUT_ADDRESS_TYPE_INDEX); s.executeUpdate(CREATE_OUTPUTS_ADDRESS_INDEX); s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX); s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX_INDEX); s.executeUpdate(CREATE_UNDOABLE_HASH_INDEX); s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)"); s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)"); s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERSION_SETTING + "', '03')"); s.close(); createNewStore(params); } private void initFromDatabase() throws SQLException, BlockStoreException { Statement s = conn.get().createStatement(); ResultSet rs; rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'"); if (!rs.next()) { throw new BlockStoreException("corrupt Postgres block store - no chain head pointer"); } Sha256Hash hash = new Sha256Hash(rs.getBytes(1)); rs.close(); this.chainHeadBlock = get(hash); this.chainHeadHash = hash; if (this.chainHeadBlock == null) { throw new BlockStoreException("corrupt Postgres block store - head block not found"); } rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'"); if (!rs.next()) { throw new BlockStoreException("corrupt Postgres block store - no verified chain head pointer"); } hash = new Sha256Hash(rs.getBytes(1)); rs.close(); s.close(); this.verifiedChainHeadBlock = get(hash); this.verifiedChainHeadHash = hash; if (this.verifiedChainHeadBlock == null) { throw new BlockStoreException("corrupt Postgres block store - verified head block not found"); } } private void createNewStore(NetworkParameters params) throws BlockStoreException { try { // Set up the genesis block. When we start out fresh, it is by // definition the top of the chain. StoredBlock storedGenesisHeader = new StoredBlock(params.getGenesisBlock().cloneAsHeader(), params.getGenesisBlock().getWork(), 0); // The coinbase in the genesis block is not spendable. This is because of how the reference client inits // its database - the genesis transaction isn't actually in the db so its spent flags can never be updated. List<Transaction> genesisTransactions = Lists.newLinkedList(); StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.getGenesisBlock().getHash(), genesisTransactions); put(storedGenesisHeader, storedGenesis); setChainHead(storedGenesisHeader); setVerifiedChainHead(storedGenesisHeader); } catch (VerificationException e) { throw new RuntimeException(e); // Cannot happen. } } private boolean tableExists(String table) throws SQLException { Statement s = conn.get().createStatement(); try { ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2"); results.close(); return true; } catch (SQLException ex) { return false; } finally { s.close(); } } /** * Dumps information about the size of actual data in the database to standard output * The only truly useless data counted is printed in the form "N in id indexes" * This does not take database indexes into account */ public void dumpSizes() throws SQLException, BlockStoreException { maybeConnect(); Statement s = conn.get().createStatement(); long size = 0; long totalSize = 0; int count = 0; ResultSet rs = s.executeQuery("SELECT name, value FROM settings"); while (rs.next()) { size += rs.getString(1).length(); size += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; rs = s.executeQuery("SELECT chainWork, header FROM headers"); while (rs.next()) { size += 28; // hash size += rs.getBytes(1).length; size += 4; // height size += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks"); while (rs.next()) { size += 28; // hash size += 4; // height byte[] txOutChanges = rs.getBytes(1); byte[] transactions = rs.getBytes(2); if (txOutChanges == null) size += transactions.length; else size += txOutChanges.length; // size += the space to represent NULL count++; } rs.close(); System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count); totalSize += size; size = 0; count = 0; long scriptSize = 0; rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs"); while (rs.next()) { size += 32; // hash size += 4; // index size += 4; // height size += rs.getBytes(1).length; size += rs.getBytes(2).length; scriptSize += rs.getBytes(2).length; count++; } rs.close(); System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n", size, count, (double)size/count, (double)scriptSize/count, count * 8); totalSize += size; log.debug("Total Size: {}", totalSize); s.close(); } private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException { try { PreparedStatement s = conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)" + " VALUES(?, ?, ?, ?, ?)"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); s.setBytes(2, storedBlock.getChainWork().toByteArray()); s.setInt(3, storedBlock.getHeight()); s.setBytes(4, storedBlock.getHeader().unsafePaycoinSerialize()); s.setBoolean(5, wasUndoable); s.executeUpdate(); s.close(); } catch (SQLException e) { // It is possible we try to add a duplicate StoredBlock if we upgraded // In that case, we just update the entry to mark it wasUndoable if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)) || !wasUndoable) throw e; PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?"); s.setBoolean(1, true); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); s.setBytes(2, hashBytes); s.executeUpdate(); s.close(); } } @Override public void put(StoredBlock storedBlock) throws BlockStoreException { maybeConnect(); try { putUpdateStoredBlock(storedBlock, false); } catch (SQLException e) { throw new BlockStoreException(e); } } @Override public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException { maybeConnect(); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28); int height = storedBlock.getHeight(); byte[] transactions = null; byte[] txOutChanges = null; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); if (undoableBlock.getTxOutChanges() != null) { undoableBlock.getTxOutChanges().serializeToStream(bos); txOutChanges = bos.toByteArray(); } else { int numTxn = undoableBlock.getTransactions().size(); bos.write((int) (0xFF & (numTxn >> 0))); bos.write((int) (0xFF & (numTxn >> 8))); bos.write((int) (0xFF & (numTxn >> 16))); bos.write((int) (0xFF & (numTxn >> 24))); for (Transaction tx : undoableBlock.getTransactions()) tx.paycoinSerialize(bos); transactions = bos.toByteArray(); } bos.close(); } catch (IOException e) { throw new BlockStoreException(e); } try { if (log.isDebugEnabled()) log.debug("Looking for undoable block with hash: " + Utils.HEX.encode(hashBytes)); PreparedStatement findS = conn.get().prepareStatement("select 1 from undoableBlocks where hash = ?"); findS.setBytes(1, hashBytes); ResultSet rs = findS.executeQuery(); if (rs.next()) { // We already have this output, update it. findS.close(); // Postgres insert-or-updates are very complex (and finnicky). This level of transaction isolation // seems to work for peercoinj PreparedStatement s = conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?" + " WHERE hash = ?"); s.setBytes(3, hashBytes); if (log.isDebugEnabled()) log.debug("Updating undoable block with hash: " + Utils.HEX.encode(hashBytes)); if (transactions == null) { s.setBytes(1, txOutChanges); s.setNull(2, Types.BINARY); } else { s.setNull(1, Types.BINARY); s.setBytes(2, transactions); } s.executeUpdate(); s.close(); return; } PreparedStatement s = conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)" + " VALUES(?, ?, ?, ?)"); s.setBytes(1, hashBytes); s.setInt(2, height); if (log.isDebugEnabled()) log.debug("Inserting undoable block with hash: " + Utils.HEX.encode(hashBytes) + " at height " + height); if (transactions == null) { s.setBytes(3, txOutChanges); s.setNull(4, Types.BINARY); } else { s.setNull(3, Types.BINARY); s.setBytes(4, transactions); } s.executeUpdate(); s.close(); try { putUpdateStoredBlock(storedBlock, true); } catch (SQLException e) { throw new BlockStoreException(e); } } catch (SQLException e) { if (!e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)) throw new BlockStoreException(e); } } public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException { // Optimize for chain head if (chainHeadHash != null && chainHeadHash.equals(hash)) return chainHeadBlock; if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash)) return verifiedChainHeadBlock; maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. if (wasUndoableOnly && !results.getBoolean(4)) return null; BigInteger chainWork = new BigInteger(results.getBytes(1)); int height = results.getInt(2); Block b = new Block(params, results.getBytes(3)); b.verifyHeader(); StoredBlock stored = new StoredBlock(b, chainWork, height); return stored; } catch (SQLException ex) { throw new BlockStoreException(ex); } catch (ProtocolException e) { // Corrupted database. throw new BlockStoreException(e); } catch (VerificationException e) { // Should not be able to happen unless the database contains bad // blocks. throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } @Override public StoredBlock get(Sha256Hash hash) throws BlockStoreException { return get(hash, false); } @Override public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException { return get(hash, true); } @Override public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?"); // We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes byte[] hashBytes = new byte[28]; System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28); s.setBytes(1, hashBytes); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. byte[] txOutChanges = results.getBytes(1); byte[] transactions = results.getBytes(2); StoredUndoableBlock block; if (txOutChanges == null) { int offset = 0; int numTxn = ((transactions[offset++] & 0xFF) << 0) | ((transactions[offset++] & 0xFF) << 8) | ((transactions[offset++] & 0xFF) << 16) | ((transactions[offset++] & 0xFF) << 24); List<Transaction> transactionList = new LinkedList<Transaction>(); for (int i = 0; i < numTxn; i++) { Transaction tx = new Transaction(params, transactions, offset); transactionList.add(tx); offset += tx.getMessageSize(); } block = new StoredUndoableBlock(hash, transactionList); } else { TransactionOutputChanges outChangesObject = new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges)); block = new StoredUndoableBlock(hash, outChangesObject); } return block; } catch (SQLException ex) { throw new BlockStoreException(ex); } catch (NullPointerException e) { // Corrupted database. throw new BlockStoreException(e); } catch (ClassCastException e) { // Corrupted database. throw new BlockStoreException(e); } catch (ProtocolException e) { // Corrupted database. throw new BlockStoreException(e); } catch (IOException e) { // Corrupted database. throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } @Override public StoredBlock getChainHead() throws BlockStoreException { return chainHeadBlock; } @Override public void setChainHead(StoredBlock chainHead) throws BlockStoreException { Sha256Hash hash = chainHead.getHeader().getHash(); this.chainHeadHash = hash; this.chainHeadBlock = chainHead; maybeConnect(); try { PreparedStatement s = conn.get() .prepareStatement("UPDATE settings SET value = ? WHERE name = ?"); s.setString(2, CHAIN_HEAD_SETTING); s.setBytes(1, hash.getBytes()); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } } @Override public StoredBlock getVerifiedChainHead() throws BlockStoreException { return verifiedChainHeadBlock; } @Override public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException { Sha256Hash hash = chainHead.getHeader().getHash(); this.verifiedChainHeadHash = hash; this.verifiedChainHeadBlock = chainHead; maybeConnect(); try { PreparedStatement s = conn.get() .prepareStatement("UPDATE settings SET value = ? WHERE name = ?"); s.setString(2, VERIFIED_CHAIN_HEAD_SETTING); s.setBytes(1, hash.getBytes()); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } if (this.chainHeadBlock.getHeight() < chainHead.getHeight()) setChainHead(chainHead); removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth); } private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException { try { PreparedStatement s = conn.get() .prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?"); s.setInt(1, height); if (log.isDebugEnabled()) log.debug("Deleting undoable undoable block with height <= " + height); s.executeUpdate(); s.close(); } catch (SQLException ex) { throw new BlockStoreException(ex); } } @Override public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT height, value, scriptBytes FROM openOutputs " + "WHERE hash = ? AND index = ?"); s.setBytes(1, hash.getBytes()); // index is actually an unsigned int s.setInt(2, (int)index); ResultSet results = s.executeQuery(); if (!results.next()) { return null; } // Parse it. int height = results.getInt(1); Coin value = Coin.valueOf(new BigInteger(results.getBytes(2)).longValue()); // Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height StoredTransactionOutput txout = new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3)); return txout; } catch (SQLException ex) { throw new BlockStoreException(ex); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } @Override public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; // Calculate the toAddress (if any) String dbAddress = ""; int type = 0; Script outputScript = null; try { outputScript = new Script(out.getScriptBytes()); } catch (ScriptException e) { // Unparseable, but this isn't an error - it's an output not containing an address log.info("Could not parse script for output: " + out.getHash().toString()); } if (outputScript != null && (outputScript.isSentToAddress() || outputScript.isSentToRawPubKey() || outputScript.isPayToScriptHash())) { if (outputScript.isSentToAddress()) { Address targetAddr = new Address(params, outputScript.getPubKeyHash()); dbAddress = targetAddr.toString(); type = 1; } else if (outputScript.isSentToRawPubKey()) { /* * Note we use the deprecated getFromAddress here. Coinbase outputs seem to have the target address * in the pubkey of the script - perhaps we can rename this function? */ dbAddress = outputScript.getFromAddress(params).toString(); type = 2; } else { dbAddress = Address.fromP2SHHash(params, outputScript.getPubKeyHash()).toString(); type = 3; } } try { s = conn.get().prepareStatement("INSERT INTO openOutputs (hash, index, height, value, scriptBytes, toAddress, addressTargetable) " + "VALUES (?, ?, ?, ?, ?, ?, ?)"); s.setBytes(1, out.getHash().getBytes()); // index is actually an unsigned int s.setInt(2, (int)out.getIndex()); s.setInt(3, out.getHeight()); s.setBytes(4, BigInteger.valueOf(out.getValue().value).toByteArray()); s.setBytes(5, out.getScriptBytes()); s.setString(6, dbAddress); s.setInt(7, type); s.executeUpdate(); s.close(); } catch (SQLException e) { if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE))) throw new BlockStoreException(e); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException(e); } } } @Override public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException { maybeConnect(); // TODO: This should only need one query (maybe a stored procedure) if (getTransactionOutput(out.getHash(), out.getIndex()) == null) throw new BlockStoreException("Tried to remove a StoredTransactionOutput from PostgresFullPrunedBlockStore that it didn't have!"); try { PreparedStatement s = conn.get() .prepareStatement("DELETE FROM openOutputs WHERE hash = ? AND index = ?"); s.setBytes(1, out.getHash().getBytes()); // index is actually an unsigned int s.setInt(2, (int)out.getIndex()); s.executeUpdate(); s.close(); } catch (SQLException e) { throw new BlockStoreException(e); } } @Override public void beginDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); if (log.isDebugEnabled()) log.debug("Starting database batch write with connection: " + conn.get().toString()); try { conn.get().setAutoCommit(false); } catch (SQLException e) { throw new BlockStoreException(e); } } @Override public void commitDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); if (log.isDebugEnabled()) log.debug("Committing database batch write with connection: " + conn.get().toString()); try { conn.get().commit(); conn.get().setAutoCommit(true); } catch (SQLException e) { throw new BlockStoreException(e); } } @Override public void abortDatabaseBatchWrite() throws BlockStoreException { maybeConnect(); if (log.isDebugEnabled()) log.debug("Rollback database batch write with connection: " + conn.get().toString()); try { if (!conn.get().getAutoCommit()) { conn.get().rollback(); conn.get().setAutoCommit(true); } else { log.warn("Warning: Rollback attempt without transaction"); } } catch (SQLException e) { throw new BlockStoreException(e); } } @Override public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get() .prepareStatement("SELECT COUNT(*) FROM openOutputs WHERE hash = ?"); s.setBytes(1, hash.getBytes()); ResultSet results = s.executeQuery(); if (!results.next()) { throw new BlockStoreException("Got no results from a COUNT(*) query"); } int count = results.getInt(1); return count != 0; } catch (SQLException ex) { throw new BlockStoreException(ex); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); } } } /** * Calculate the balance for a coinbase, to-address, or p2sh address. * @param address The address to calculate the balance of * @return The balance of the address supplied. If the address has not been seen, or there are no outputs open for this * address, the return value is 0 * @throws BlockStoreException */ public BigInteger calculateBalanceForAddress(Address address) throws BlockStoreException { maybeConnect(); PreparedStatement s = null; try { s = conn.get().prepareStatement("select sum(('x'||lpad(substr(value::text, 3, 50),16,'0'))::bit(64)::bigint) " + "from openoutputs where toaddress = ?"); s.setString(1, address.toString()); ResultSet rs = s.executeQuery(); if (rs.next()) { return BigInteger.valueOf(rs.getLong(1)); } else { throw new BlockStoreException("Failed to execute balance lookup"); } } catch (SQLException ex) { throw new BlockStoreException(ex); } finally { if (s != null) try { s.close(); } catch (SQLException e) { throw new BlockStoreException("Could not close statement"); } } } }
/* * Copyright 2016 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.storage; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import com.google.api.client.googleapis.json.GoogleJsonError; import com.google.api.services.storage.model.StorageObject; import com.google.cloud.storage.Storage.BlobGetOption; import com.google.cloud.storage.Storage.BlobSourceOption; import com.google.cloud.storage.Storage.BlobTargetOption; import com.google.cloud.storage.spi.v1.RpcBatch; import com.google.cloud.storage.spi.v1.StorageRpc; import com.google.common.annotations.VisibleForTesting; import java.util.Map; /** * A batch of operations to be submitted to Google Cloud Storage using a single RPC request. * * <p>Example of using a batch request to delete, update and get a blob: * * <pre>{@code * StorageBatch batch = storage.batch(); * BlobId firstBlob = BlobId.of("bucket", "blob1")); * BlobId secondBlob = BlobId.of("bucket", "blob2")); * batch.delete(firstBlob).notify(new BatchResult.Callback<Boolean, StorageException>() { * public void success(Boolean result) { * // deleted successfully * } * * public void error(StorageException exception) { * // delete failed * } * }); * batch.update(BlobInfo.builder(secondBlob).contentType("text/plain").build()); * StorageBatchResult<Blob> result = batch.get(secondBlob); * batch.submit(); * Blob blob = result.get(); // returns get result or throws StorageException * }</pre> */ public class StorageBatch { private final RpcBatch batch; private final StorageRpc storageRpc; private final StorageOptions options; StorageBatch(StorageOptions options) { this.options = options; this.storageRpc = options.getStorageRpcV1(); this.batch = storageRpc.createBatch(); } @VisibleForTesting Object getBatch() { return batch; } @VisibleForTesting StorageRpc getStorageRpc() { return storageRpc; } @VisibleForTesting StorageOptions getOptions() { return options; } /** * Adds a request representing the "delete blob" operation to this batch. Calling {@link * StorageBatchResult#get()} on the return value yields {@code true} upon successful deletion, * {@code false} if the blob was not found, or throws a {@link StorageException} if the operation * failed. */ public StorageBatchResult<Boolean> delete( String bucket, String blob, BlobSourceOption... options) { return delete(BlobId.of(bucket, blob), options); } /** * Adds a request representing the "delete blob" operation to this batch. Calling {@link * StorageBatchResult#get()} on the return value yields {@code true} upon successful deletion, * {@code false} if the blob was not found, or throws a {@link StorageException} if the operation * failed. */ public StorageBatchResult<Boolean> delete(BlobId blob, BlobSourceOption... options) { StorageBatchResult<Boolean> result = new StorageBatchResult<>(); RpcBatch.Callback<Void> callback = createDeleteCallback(result); Map<StorageRpc.Option, ?> optionMap = StorageImpl.optionMap(blob, options); batch.addDelete(blob.toPb(), callback, optionMap); return result; } /** * Adds a request representing the "update blob" operation to this batch. The {@code options} can * be used in the same way as for {@link Storage#update(BlobInfo, BlobTargetOption...)}. Calling * {@link StorageBatchResult#get()} on the return value yields the updated {@link Blob} if * successful, or throws a {@link StorageException} if the operation failed. */ public StorageBatchResult<Blob> update(BlobInfo blobInfo, BlobTargetOption... options) { StorageBatchResult<Blob> result = new StorageBatchResult<>(); RpcBatch.Callback<StorageObject> callback = createUpdateCallback(this.options, result); Map<StorageRpc.Option, ?> optionMap = StorageImpl.optionMap(blobInfo, options); batch.addPatch(blobInfo.toPb(), callback, optionMap); return result; } /** * Adds a request representing the "get blob" operation to this batch. The {@code options} can be * used in the same way as for {@link Storage#get(BlobId, BlobGetOption...)}. Calling {@link * StorageBatchResult#get()} on the return value yields the requested {@link Blob} if successful, * {@code null} if no such blob exists, or throws a {@link StorageException} if the operation * failed. */ public StorageBatchResult<Blob> get(String bucket, String blob, BlobGetOption... options) { return get(BlobId.of(bucket, blob), options); } /** * Adds a request representing the "get blob" operation to this batch. The {@code options} can be * used in the same way as for {@link Storage#get(BlobId, BlobGetOption...)}. Calling {@link * StorageBatchResult#get()} on the return value yields the requested {@link Blob} if successful, * {@code null} if no such blob exists, or throws a {@link StorageException} if the operation * failed. */ public StorageBatchResult<Blob> get(BlobId blob, BlobGetOption... options) { StorageBatchResult<Blob> result = new StorageBatchResult<>(); RpcBatch.Callback<StorageObject> callback = createGetCallback(this.options, result); Map<StorageRpc.Option, ?> optionMap = StorageImpl.optionMap(blob, options); batch.addGet(blob.toPb(), callback, optionMap); return result; } /** Submits this batch for processing using a single RPC request. */ public void submit() { batch.submit(); } private RpcBatch.Callback<Void> createDeleteCallback(final StorageBatchResult<Boolean> result) { return new RpcBatch.Callback<Void>() { @Override public void onSuccess(Void response) { result.success(true); } @Override public void onFailure(GoogleJsonError googleJsonError) { StorageException serviceException = new StorageException(googleJsonError); if (serviceException.getCode() == HTTP_NOT_FOUND) { result.success(false); } else { result.error(serviceException); } } }; } private RpcBatch.Callback<StorageObject> createGetCallback( final StorageOptions serviceOptions, final StorageBatchResult<Blob> result) { return new RpcBatch.Callback<StorageObject>() { @Override public void onSuccess(StorageObject response) { result.success( response == null ? null : Blob.fromPb(serviceOptions.getService(), response)); } @Override public void onFailure(GoogleJsonError googleJsonError) { StorageException serviceException = new StorageException(googleJsonError); if (serviceException.getCode() == HTTP_NOT_FOUND) { result.success(null); } else { result.error(serviceException); } } }; } private RpcBatch.Callback<StorageObject> createUpdateCallback( final StorageOptions serviceOptions, final StorageBatchResult<Blob> result) { return new RpcBatch.Callback<StorageObject>() { @Override public void onSuccess(StorageObject response) { result.success( response == null ? null : Blob.fromPb(serviceOptions.getService(), response)); } @Override public void onFailure(GoogleJsonError googleJsonError) { result.error(new StorageException(googleJsonError)); } }; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.snapshots; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexShardStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.test.ESBackcompatTestCase; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException { logger.info("--> creating repository"); assertAcked(client().admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", randomRepoPath().toAbsolutePath()) .put("compress", randomBoolean()) .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); String[] indicesBefore = new String[randomIntBetween(2,5)]; String[] indicesAfter = new String[randomIntBetween(2,5)]; for (int i = 0; i < indicesBefore.length; i++) { indicesBefore[i] = "index_before_" + i; createIndex(indicesBefore[i]); } for (int i = 0; i < indicesAfter.length; i++) { indicesAfter[i] = "index_after_" + i; createIndex(indicesAfter[i]); } String[] indices = new String[indicesBefore.length + indicesAfter.length]; System.arraycopy(indicesBefore, 0, indices, 0, indicesBefore.length); System.arraycopy(indicesAfter, 0, indices, indicesBefore.length, indicesAfter.length); ensureYellow(); logger.info("--> indexing some data"); IndexRequestBuilder[] buildersBefore = new IndexRequestBuilder[randomIntBetween(10, 200)]; for (int i = 0; i < buildersBefore.length; i++) { buildersBefore[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), "foo", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } "); } IndexRequestBuilder[] buildersAfter = new IndexRequestBuilder[randomIntBetween(10, 200)]; for (int i = 0; i < buildersAfter.length; i++) { buildersAfter[i] = client().prepareIndex(RandomPicks.randomFrom(getRandom(), indicesBefore), "bar", Integer.toString(i)).setSource("{ \"foo\" : \"bar\" } "); } indexRandom(true, buildersBefore); indexRandom(true, buildersAfter); assertThat(client().prepareSearch(indices).setSize(0).get().getHits().totalHits(), equalTo((long) (buildersBefore.length + buildersAfter.length))); long[] counts = new long[indices.length]; for (int i = 0; i < indices.length; i++) { counts[i] = client().prepareSearch(indices[i]).setSize(0).get().getHits().totalHits(); } logger.info("--> snapshot subset of indices before upgrage"); CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true).setIndices("index_before_*").get(); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); assertThat(client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap-1").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); logger.info("--> delete some data from indices that were already snapshotted"); int howMany = randomIntBetween(1, buildersBefore.length); for (int i = 0; i < howMany; i++) { IndexRequestBuilder indexRequestBuilder = RandomPicks.randomFrom(getRandom(), buildersBefore); IndexRequest request = indexRequestBuilder.request(); client().prepareDelete(request.index(), request.type(), request.id()).get(); } refresh(); final long numDocs = client().prepareSearch(indices).setSize(0).get().getHits().totalHits(); assertThat(client().prepareSearch(indices).setSize(0).get().getHits().totalHits(), lessThan((long) (buildersBefore.length + buildersAfter.length))); disableAllocation(indices); backwardsCluster().allowOnAllNodes(indices); logClusterState(); boolean upgraded; do { logClusterState(); SearchResponse countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); upgraded = backwardsCluster().upgradeOneNode(); ensureYellow(); countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); } while (upgraded); enableAllocation(indices); logger.info("--> close indices"); client().admin().indices().prepareClose("index_before_*").get(); logger.info("--> verify repository"); client().admin().cluster().prepareVerifyRepository("test-repo").get(); logger.info("--> restore all indices from the snapshot"); RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true).execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); ensureYellow(); assertThat(client().prepareSearch(indices).setSize(0).get().getHits().totalHits(), equalTo((long) (buildersBefore.length + buildersAfter.length))); for (int i = 0; i < indices.length; i++) { assertThat(counts[i], equalTo(client().prepareSearch(indices[i]).setSize(0).get().getHits().totalHits())); } logger.info("--> snapshot subset of indices after upgrade"); createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-2").setWaitForCompletion(true).setIndices("index_*").get(); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); // Test restore after index deletion logger.info("--> delete indices"); String index = RandomPicks.randomFrom(getRandom(), indices); cluster().wipeIndices(index); logger.info("--> restore one index after deletion"); restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-2").setWaitForCompletion(true).setIndices(index).execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); ensureYellow(); assertThat(client().prepareSearch(indices).setSize(0).get().getHits().totalHits(), equalTo((long) (buildersBefore.length + buildersAfter.length))); for (int i = 0; i < indices.length; i++) { assertThat(counts[i], equalTo(client().prepareSearch(indices[i]).setSize(0).get().getHits().totalHits())); } } public void testSnapshotMoreThanOnce() throws ExecutionException, InterruptedException, IOException { Client client = client(); final Path tempDir = randomRepoPath().toAbsolutePath(); logger.info("--> creating repository"); assertAcked(client.admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.settingsBuilder() .put("location", tempDir) .put("compress", randomBoolean()) .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); // only one shard assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) )); ensureYellow(); logger.info("--> indexing"); final int numDocs = randomIntBetween(10, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; for (int i = 0; i < builders.length; i++) { builders[i] = client().prepareIndex("test", "doc", Integer.toString(i)).setSource("foo", "bar" + i); } indexRandom(true, builders); flushAndRefresh(); assertNoFailures(client().admin().indices().prepareForceMerge("test").setFlush(true).setMaxNumSegments(1).get()); CreateSnapshotResponse createSnapshotResponseFirst = client.admin().cluster().prepareCreateSnapshot("test-repo", "test").setWaitForCompletion(true).setIndices("test").get(); assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponseFirst.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseFirst.getSnapshotInfo().totalShards())); assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); { SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test").get().getSnapshots().get(0); List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards(); for (SnapshotIndexShardStatus status : shards) { assertThat(status.getStats().getProcessedFiles(), greaterThan(1)); } } if (frequently()) { logger.info("--> upgrade"); disableAllocation("test"); backwardsCluster().allowOnAllNodes("test"); logClusterState(); boolean upgraded; do { logClusterState(); SearchResponse countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); upgraded = backwardsCluster().upgradeOneNode(); ensureYellow(); countResponse = client().prepareSearch().setSize(0).get(); assertHitCount(countResponse, numDocs); } while (upgraded); enableAllocation("test"); } if (cluster().numDataNodes() > 1 && randomBoolean()) { // only bump the replicas if we have enough nodes logger.info("--> move from 0 to 1 replica"); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); } logger.debug("---> repo exists: " + Files.exists(tempDir.resolve("indices/test/0")) + " files: " + Arrays.toString(FileSystemUtils.files(tempDir.resolve("indices/test/0")))); // it's only one shard! CreateSnapshotResponse createSnapshotResponseSecond = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-1").setWaitForCompletion(true).setIndices("test").get(); assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponseSecond.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseSecond.getSnapshotInfo().totalShards())); assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-1").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); { SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-1").get().getSnapshots().get(0); List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards(); for (SnapshotIndexShardStatus status : shards) { assertThat(status.getStats().getProcessedFiles(), equalTo(1)); // we flush before the snapshot such that we have to process the segments_N files } } client().prepareDelete("test", "doc", "1").get(); CreateSnapshotResponse createSnapshotResponseThird = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-2").setWaitForCompletion(true).setIndices("test").get(); assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponseThird.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponseThird.getSnapshotInfo().totalShards())); assertThat(client.admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-2").get().getSnapshots().get(0).state(), equalTo(SnapshotState.SUCCESS)); { SnapshotStatus snapshotStatus = client.admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-2").get().getSnapshots().get(0); List<SnapshotIndexShardStatus> shards = snapshotStatus.getShards(); for (SnapshotIndexShardStatus status : shards) { assertThat(status.getStats().getProcessedFiles(), equalTo(2)); // we flush before the snapshot such that we have to process the segments_N files plus the .del file } } } }
/* Derby - Class org.apache.derbyTesting.unitTests.store.T_DaemonService Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.unitTests.store; import org.apache.derbyTesting.unitTests.harness.T_Fail; import com.pivotal.gemfirexd.internal.iapi.store.raw.*; import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager; import com.pivotal.gemfirexd.internal.iapi.services.io.FormatIdUtil; import com.pivotal.gemfirexd.internal.iapi.services.io.Formatable; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.store.raw.log.LogInstant; import com.pivotal.gemfirexd.internal.iapi.util.ByteArray; import com.pivotal.gemfirexd.internal.iapi.services.io.DynamicByteArrayOutputStream; import java.io.IOException; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.StreamCorruptedException; import java.util.Vector; import java.io.OutputStream; import java.io.InputStream; import com.pivotal.gemfirexd.internal.iapi.services.io.LimitObjectInput; import com.pivotal.gemfirexd.internal.shared.common.StoredFormatIds; // // Tracing can be done like so (commented out) //import org.apache.derbyTesting.unitTests.util.MsgTrace; public class T_Undoable implements Undoable { static final int REMOVE_NONE = 0; static final int REMOVE_ME = 1; static final int REMOVE_TO_ME = 2; protected ByteArray preparedLog; protected DynamicByteArrayOutputStream logBuffer; GlobalTransactionId tid = null; int tranSeq = -1; int recordSeq = -1; int removeWhat = REMOVE_NONE; int LWMTranSeq = 0; boolean rollBack = true; int lastRecord = -1; boolean lastTransaction = false; int optionalDataLen; boolean verbose; // no-arg constructor, required by Formatable public T_Undoable() { super(); } T_Undoable(GlobalTransactionId tid, int tranSeq, int recordSeq, int removeWhat, int LWMTranSeq, boolean rollBack, int lastRecord, boolean lastTransaction, int optionalDataLen,boolean verbose) throws T_Fail { //MsgTrace.traceString("{{{tu.new"); T_Fail.T_ASSERT((removeWhat >= REMOVE_NONE) && (removeWhat <= REMOVE_TO_ME)); T_Fail.T_ASSERT(rollBack == (recordSeq < 0)); T_Fail.T_ASSERT(rollBack == (tranSeq < 0)); this.tid = tid; this.tranSeq = tranSeq; this.recordSeq = recordSeq; this.removeWhat = removeWhat; this.LWMTranSeq = LWMTranSeq; this.rollBack = rollBack; this.lastRecord = lastRecord; this.lastTransaction = lastTransaction; this.optionalDataLen = optionalDataLen; this.verbose = verbose; try { writeOptionalDataToBuffer(); } catch (IOException ioe) { throw T_Fail.exceptionFail(ioe); } catch (StandardException se) { throw T_Fail.exceptionFail(se); } //MsgTrace.traceString("}}}tu.new"); } private void writeOptionalDataToBuffer() throws StandardException, IOException { if (logBuffer == null) { // YYZ: need to revisit this. Do we really want to allocate this much for a buffer every time? logBuffer = new DynamicByteArrayOutputStream(1024); // init size 1K } else { logBuffer.reset(); } int optionalDataStart = logBuffer.getPosition(); if (SanityManager.DEBUG) { SanityManager.ASSERT(optionalDataStart == 0, "Buffer for writing the optional data should start at position 0"); } //MsgTrace.traceString("{{{tu.writeOpetionalData"); if (optionalDataLen > 0) { byte[] buf = new byte[optionalDataLen]; for (int ix=0;ix <optionalDataLen;ix++) buf[ix] = (byte)ix; logBuffer.write(buf); } //MsgTrace.traceString("}}}tu.writeOpetionalData"); int optionalDataLength = logBuffer.getPosition() - optionalDataStart; if (SanityManager.DEBUG) { if (optionalDataLength != logBuffer.getUsed()) SanityManager.THROWASSERT("wrong optional data length, optionalDataLength = " + optionalDataLength + ", logBuffer.getUsed() = " + logBuffer.getUsed()); } // set the position to the beginning of the buffer logBuffer.setPosition(optionalDataStart); this.preparedLog = new ByteArray (logBuffer.getByteArray(), optionalDataStart, optionalDataLength); } /* Loggable methods */ public void doMe(Transaction xact, LogInstant instant, LimitObjectInput in) { if (verbose) System.out.println("Loggable.doMe("+toString()+")"); return; } /* methods to support prepared log the following two methods should not be called during recover */ public ByteArray getPreparedLog() { return this.preparedLog; } public boolean needsRedo(Transaction xact) {return false;} public void releaseResource(Transaction xact) {return;} public int group () { return Loggable.RAWSTORE ; }; /* Undoable methods. */ public Compensation generateUndo(Transaction xact, LimitObjectInput in) throws StandardException, IOException { //MsgTrace.traceString("+++tu.generateUndo"); return new T_Compensation(); } /* Formatable methods */ /** @exception IOException thrown on error */ public void writeExternal(ObjectOutput out) throws IOException { //MsgTrace.traceString("{{{tu.writeExternal"); if (SanityManager.DEBUG) { SanityManager.ASSERT((removeWhat >= REMOVE_NONE) && (removeWhat <= REMOVE_TO_ME)); SanityManager.ASSERT(rollBack == (recordSeq < 0)); SanityManager.ASSERT(rollBack == (tranSeq < 0)); } out.writeObject(tid); out.writeInt(tranSeq); out.writeInt(recordSeq); out.writeInt(removeWhat); out.writeInt(LWMTranSeq); out.writeBoolean(rollBack); out.writeInt(lastRecord); out.writeBoolean(lastTransaction); out.writeInt(optionalDataLen); //MsgTrace.traceString("}}}tu.writeExternal"); } public void readExternal(ObjectInput in) throws IOException,ClassNotFoundException { //MsgTrace.traceString("{{{tu.readExternal"); try { tid = (GlobalTransactionId)in.readObject(); tranSeq = in.readInt(); recordSeq = in.readInt(); removeWhat = in.readInt(); LWMTranSeq = in.readInt(); rollBack = in.readBoolean(); lastRecord = in.readInt(); lastTransaction = in.readBoolean(); optionalDataLen = in.readInt(); } catch ( ClassCastException exception ) { //MsgTrace.traceString("{{{tu.readExternal---exception"); throw new StreamCorruptedException(); } //MsgTrace.traceString("}}}tu.readExternal"); } public int getTypeFormatId() { return StoredFormatIds.SERIALIZABLE_FORMAT_ID; } /* Object methods. */ public String toString() { String traceTid = "tid: null"; if (tid !=null) traceTid = "tid: "+tid; String traceRemoveWhat; switch (removeWhat) { case REMOVE_NONE: traceRemoveWhat = "REMOVE_NONE"; break; case REMOVE_ME: traceRemoveWhat = "REMOVE_ME"; break; case REMOVE_TO_ME: traceRemoveWhat = "REMOVE_TO_ME"; break; default: traceRemoveWhat = "removeWhat: invalidValue"; break; } return traceTid+" "+ "tranSeq: "+tranSeq+" "+ "recordSeq: "+recordSeq+" "+ traceRemoveWhat+" "+ "LWMTranSeq: "+LWMTranSeq+" "+ "rollback: "+rollBack+" "+ "lastRecord: "+lastRecord+" "+ "optionalDataLen: "+optionalDataLen+" "+ "lastTransaction: "+lastTransaction; } }
/* Copyright 1999-2003,2005 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.gvt; import java.awt.Shape; import java.awt.Rectangle; import java.awt.geom.AffineTransform; import java.awt.geom.Rectangle2D; import java.lang.ref.WeakReference; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.batik.gvt.event.GraphicsNodeChangeAdapter; import org.apache.batik.gvt.event.GraphicsNodeChangeEvent; import org.apache.batik.ext.awt.image.renderable.Filter; /** * This class tracks the changes on a GVT tree * * @author <a href="mailto:Thomas.DeWeeese@Kodak.com">Thomas DeWeese</a> * @version $Id$ */ public class UpdateTracker extends GraphicsNodeChangeAdapter { Map dirtyNodes = null; Map fromBounds = new HashMap(); protected static Rectangle2D NULL_RECT = new Rectangle(); public UpdateTracker(){ } /** * Tells whether the GVT tree has changed. */ public boolean hasChanged() { return (dirtyNodes != null); } /** * Returns the list of dirty areas on GVT. */ public List getDirtyAreas() { if (dirtyNodes == null) return null; List ret = new LinkedList(); Set keys = dirtyNodes.keySet(); Iterator i = keys.iterator(); while (i.hasNext()) { WeakReference gnWRef = (WeakReference)i.next(); GraphicsNode gn = (GraphicsNode)gnWRef.get(); // GraphicsNode srcGN = gn; // if the weak ref has been cleared then this node is no // longer part of the GVT tree (and the change should be // reflected in some ancestor that should also be in the // dirty list). if (gn == null) continue; AffineTransform oat; oat = (AffineTransform)dirtyNodes.get(gnWRef); if (oat != null){ oat = new AffineTransform(oat); } Rectangle2D srcORgn = (Rectangle2D)fromBounds.remove(gnWRef); Rectangle2D srcNRgn = null; AffineTransform nat = null; if (!(srcORgn instanceof ChngSrcRect)) { // For change srcs don't use the new bounds of parent node. srcNRgn = gn.getBounds(); nat = gn.getTransform(); if (nat != null) nat = new AffineTransform(nat); } // System.out.println("Rgns: " + srcORgn + " - " + srcNRgn); // System.out.println("ATs: " + oat + " - " + nat); do { // f.invalidateCache(oRng); // f.invalidateCache(nRng); // f = gn.getEnableBackgroundGraphicsNodeRable(false); // (need to push rgn through filter chain if any...) // f.invalidateCache(oRng); // f.invalidateCache(nRng); gn = gn.getParent(); if (gn == null) break; // We reached the top of the tree Filter f= gn.getFilter(); if ( f != null) { srcNRgn = f.getBounds2D(); nat = null; } // Get the parent's current Affine AffineTransform at = gn.getTransform(); // Get the parent's Affine last time we rendered. gnWRef = gn.getWeakReference(); AffineTransform poat = (AffineTransform)dirtyNodes.get(gnWRef); if (poat == null) poat = at; if (poat != null) { if (oat != null) oat.preConcatenate(poat); else oat = new AffineTransform(poat); } if (at != null){ if (nat != null) nat.preConcatenate(at); else nat = new AffineTransform(at); } } while (true); if (gn == null) { // We made it to the root graphics node so add them. // System.out.println // ("Adding: " + oat + " - " + nat + "\n" + // srcORgn + "\n" + srcNRgn + "\n"); // <!> Shape oRgn = srcORgn; if ((oRgn != null) && (oRgn != NULL_RECT)) { if (oat != null) oRgn = oat.createTransformedShape(srcORgn); // System.err.println("GN: " + srcGN); // System.err.println("Src: " + oRgn.getBounds2D()); ret.add(oRgn); } if (srcNRgn != null) { Shape nRgn = srcNRgn; if (nat != null) nRgn = nat.createTransformedShape(srcNRgn); if (nRgn != null) ret.add(nRgn); } } } fromBounds.clear(); dirtyNodes.clear(); return ret; } /** * This returns the dirty region for gn in the coordinate system * given by <code>at</code>. * @param gn Node tree to return dirty region for. * @param at Affine transform to coordinate space to accumulate * dirty regions in. */ public Rectangle2D getNodeDirtyRegion(GraphicsNode gn, AffineTransform at) { WeakReference gnWRef = gn.getWeakReference(); AffineTransform nat = (AffineTransform)dirtyNodes.get(gnWRef); if (nat == null) nat = gn.getTransform(); if (nat != null) { at = new AffineTransform(at); at.concatenate(nat); } Filter f= gn.getFilter(); Rectangle2D ret = null; if (gn instanceof CompositeGraphicsNode) { CompositeGraphicsNode cgn = (CompositeGraphicsNode)gn; Iterator iter = cgn.iterator(); while (iter.hasNext()) { GraphicsNode childGN = (GraphicsNode)iter.next(); Rectangle2D r2d = getNodeDirtyRegion(childGN, at); if (r2d != null) { if (f != null) { // If we have a filter and a change region // Update our full filter extents. Shape s = at.createTransformedShape(f.getBounds2D()); ret = s.getBounds2D(); break; } if ((ret == null) || (ret == NULL_RECT)) ret = r2d; else ret = ret.createUnion(r2d); } } } else { ret = (Rectangle2D)fromBounds.remove(gnWRef); if (ret == null) { if (f != null) ret = f.getBounds2D(); else ret = gn.getBounds(); } else if (ret == NULL_RECT) ret = null; if (ret != null) ret = at.createTransformedShape(ret).getBounds2D(); } return ret; } public Rectangle2D getNodeDirtyRegion(GraphicsNode gn) { return getNodeDirtyRegion(gn, new AffineTransform()); } /** * Receives notification of a change to a GraphicsNode. * @param gnce The event object describing the GraphicsNode change. */ public void changeStarted(GraphicsNodeChangeEvent gnce) { // System.out.println("A node has changed for: " + this); GraphicsNode gn = gnce.getGraphicsNode(); WeakReference gnWRef = gn.getWeakReference(); boolean doPut = false; if (dirtyNodes == null) { dirtyNodes = new HashMap(); doPut = true; } else if (!dirtyNodes.containsKey(gnWRef)) doPut = true; if (doPut) { AffineTransform at = gn.getTransform(); if (at != null) at = (AffineTransform)at.clone(); else at = new AffineTransform(); dirtyNodes.put(gnWRef, at); } GraphicsNode chngSrc = gnce.getChangeSrc(); Rectangle2D rgn = null; if (chngSrc != null) { // A child node is moving in the tree so assign it's dirty // regions to this node before it moves. Rectangle2D drgn = getNodeDirtyRegion(chngSrc); if (drgn != null) rgn = new ChngSrcRect(drgn); } else { // Otherwise just use gn's current region. rgn = gn.getBounds(); } // Add this dirty region to any existing dirty region. Rectangle2D r2d = (Rectangle2D)fromBounds.remove(gnWRef); if (rgn != null) { if ((r2d != null) && (r2d != NULL_RECT)) { // System.err.println("GN: " + gn); // System.err.println("R2d: " + r2d); // System.err.println("Rgn: " + rgn); r2d = r2d.createUnion(rgn); // System.err.println("Union: " + r2d); } else r2d = rgn; } // if ((gn instanceof CompositeGraphicsNode) && // (r2d.getWidth() > 200)) { // new Exception("Adding Large: " + gn).printStackTrace(); // } // Store the bounds for the future. if (r2d == null) r2d = NULL_RECT; fromBounds.put(gnWRef, r2d); } class ChngSrcRect extends Rectangle2D.Float { ChngSrcRect(Rectangle2D r2d) { super((float)r2d.getX(), (float)r2d.getY(), (float)r2d.getWidth(), (float)r2d.getHeight()); } } /** * Clears the tracker. */ public void clear() { dirtyNodes = null; } }
/* * Copyright (c) 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.api.client.auth.openidconnect; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.webtoken.JsonWebSignature; import com.google.api.client.json.webtoken.JsonWebToken; import com.google.api.client.util.Beta; import com.google.api.client.util.Key; import java.io.IOException; import java.util.Collection; import java.util.List; /** * {@link Beta} <br/> * ID token as described in <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id_token">ID Token</a>. * * <p> * Use {@link #parse(JsonFactory, String)} to parse an ID token from a string. Then, use the * {@code verify} methods to verify the ID token as required by the specification. * </p> * * <p> * Implementation is not thread-safe. * </p> * * @since 1.14 * @author Yaniv Inbar */ @Beta public class IdToken extends JsonWebSignature { /** * @param header header * @param payload payload * @param signatureBytes bytes of the signature * @param signedContentBytes bytes of the signature content */ public IdToken(Header header, Payload payload, byte[] signatureBytes, byte[] signedContentBytes) { super(header, payload, signatureBytes, signedContentBytes); } @Override public Payload getPayload() { return (Payload) super.getPayload(); } /** * Returns whether the issuer in the payload matches the given expected issuer as specified in * step 1 of <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id.token.validation">ID Token * Validation</a>. * * @param expectedIssuer expected issuer */ public final boolean verifyIssuer(String expectedIssuer) { return expectedIssuer.equals(getPayload().getIssuer()); } /** * Returns whether the audience in the payload contains only client IDs that are trusted as * specified in step 2 of <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id.token.validation">ID Token * Validation</a>. * * @param trustedClientIds list of trusted client IDs */ public final boolean verifyAudience(Collection<String> trustedClientIds) { return trustedClientIds.containsAll(getPayload().getAudienceAsList()); } /** * Returns whether the {@link Payload#getExpirationTimeSeconds} and * {@link Payload#getIssuedAtTimeSeconds} are valid relative to the current time, allowing for a * clock skew as specified in steps 5 and 6 of <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id.token.validation">ID Token * Validation</a>. * * @param currentTimeMillis current time in milliseconds (typically * {@link System#currentTimeMillis()}) * @param acceptableTimeSkewSeconds seconds of acceptable clock skew */ public final boolean verifyTime(long currentTimeMillis, long acceptableTimeSkewSeconds) { return verifyExpirationTime(currentTimeMillis, acceptableTimeSkewSeconds) && verifyIssuedAtTime(currentTimeMillis, acceptableTimeSkewSeconds); } /** * Returns whether the {@link Payload#getExpirationTimeSeconds} is valid relative to the current * time, allowing for a clock skew as specified in step 5 of <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id.token.validation">ID Token * Validation</a>. * * @param currentTimeMillis current time in milliseconds (typically * {@link System#currentTimeMillis()}) * @param acceptableTimeSkewSeconds seconds of acceptable clock skew */ public final boolean verifyExpirationTime( long currentTimeMillis, long acceptableTimeSkewSeconds) { return currentTimeMillis <= (getPayload().getExpirationTimeSeconds() + acceptableTimeSkewSeconds) * 1000; } /** * Returns whether the {@link Payload#getIssuedAtTimeSeconds} is valid relative to the current * time, allowing for a clock skew as specified in step 6 of <a * href="http://openid.net/specs/openid-connect-basic-1_0-27.html#id.token.validation">ID Token * Validation</a>. * * @param currentTimeMillis current time in milliseconds (typically * {@link System#currentTimeMillis()}) * @param acceptableTimeSkewSeconds seconds of acceptable clock skew */ public final boolean verifyIssuedAtTime(long currentTimeMillis, long acceptableTimeSkewSeconds) { return currentTimeMillis >= (getPayload().getIssuedAtTimeSeconds() - acceptableTimeSkewSeconds) * 1000; } /** * Parses the given ID token string and returns the parsed ID token. * * @param jsonFactory JSON factory * @param idTokenString ID token string * @return parsed ID token */ public static IdToken parse(JsonFactory jsonFactory, String idTokenString) throws IOException { JsonWebSignature jws = JsonWebSignature.parser(jsonFactory).setPayloadClass(Payload.class).parse(idTokenString); return new IdToken(jws.getHeader(), (Payload) jws.getPayload(), jws.getSignatureBytes(), jws.getSignedContentBytes()); } /** * {@link Beta} <br/> * ID token payload. */ @Beta public static class Payload extends JsonWebToken.Payload { /** Time (in seconds) of end-user authorization or {@code null} for none. */ @Key("auth_time") private Long authorizationTimeSeconds; /** Authorized party or {@code null} for none. */ @Key("azp") private String authorizedParty; /** Value used to associate a client session with an ID token or {@code null} for none. */ @Key private String nonce; /** Access token hash value or {@code null} for none. */ @Key("at_hash") private String accessTokenHash; /** Authentication context class reference or {@code null} for none. */ @Key("acr") private String classReference; /** Authentication methods references or {@code null} for none. */ @Key("amr") private List<String> methodsReferences; /** Returns the time (in seconds) of end-user authorization or {@code null} for none. */ public final Long getAuthorizationTimeSeconds() { return authorizationTimeSeconds; } /** * Sets the time (in seconds) of end-user authorization or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> */ public Payload setAuthorizationTimeSeconds(Long authorizationTimeSeconds) { this.authorizationTimeSeconds = authorizationTimeSeconds; return this; } /** * Returns the authorized party or {@code null} for none. * * <p> * Upgrade warning: in prior version 1.15 this method returned an {@link Object}, but starting * with version 1.16 it returns a {@link String}. * </p> */ public final String getAuthorizedParty() { return authorizedParty; } /** * Sets the authorized party or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> * * <p> * Upgrade warning: in prior version 1.15 the parameter was an {@link Object}, but starting with * version 1.16 the parameter is a {@link String}. * </p> */ public Payload setAuthorizedParty(String authorizedParty) { this.authorizedParty = authorizedParty; return this; } /** * Returns the value used to associate a client session with an ID token or {@code null} for * none. * * @since 1.16 */ public final String getNonce() { return nonce; } /** * Sets the value used to associate a client session with an ID token or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> * * @since 1.16 */ public Payload setNonce(String nonce) { this.nonce = nonce; return this; } /** * Returns the access token hash value or {@code null} for none. * * @since 1.16 */ public final String getAccessTokenHash() { return accessTokenHash; } /** * Sets the access token hash value or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> * * @since 1.16 */ public Payload setAccessTokenHash(String accessTokenHash) { this.accessTokenHash = accessTokenHash; return this; } /** * Returns the authentication context class reference or {@code null} for none. * * @since 1.16 */ public final String getClassReference() { return classReference; } /** * Sets the authentication context class reference or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> * * @since 1.16 */ public Payload setClassReference(String classReference) { this.classReference = classReference; return this; } /** * Returns the authentication methods references or {@code null} for none. * * @since 1.16 */ public final List<String> getMethodsReferences() { return methodsReferences; } /** * Sets the authentication methods references or {@code null} for none. * * <p> * Overriding is only supported for the purpose of calling the super implementation and changing * the return type, but nothing else. * </p> * * @since 1.16 */ public Payload setMethodsReferences(List<String> methodsReferences) { this.methodsReferences = methodsReferences; return this; } @Override public Payload setExpirationTimeSeconds(Long expirationTimeSeconds) { return (Payload) super.setExpirationTimeSeconds(expirationTimeSeconds); } @Override public Payload setNotBeforeTimeSeconds(Long notBeforeTimeSeconds) { return (Payload) super.setNotBeforeTimeSeconds(notBeforeTimeSeconds); } @Override public Payload setIssuedAtTimeSeconds(Long issuedAtTimeSeconds) { return (Payload) super.setIssuedAtTimeSeconds(issuedAtTimeSeconds); } @Override public Payload setIssuer(String issuer) { return (Payload) super.setIssuer(issuer); } @Override public Payload setAudience(Object audience) { return (Payload) super.setAudience(audience); } @Override public Payload setJwtId(String jwtId) { return (Payload) super.setJwtId(jwtId); } @Override public Payload setType(String type) { return (Payload) super.setType(type); } @Override public Payload setSubject(String subject) { return (Payload) super.setSubject(subject); } @Override public Payload set(String fieldName, Object value) { return (Payload) super.set(fieldName, value); } @Override public Payload clone() { return (Payload) super.clone(); } } }
package tr.xip.wanikani.models; import java.io.Serializable; /** * Created by Hikari on 9/2/14. */ public class BaseItem implements Serializable { public static final String TYPE_RADICAL = "radical"; public static final String TYPE_KANJI = "kanji"; public static final String TYPE_VOCABULARY = "vocabulary"; int id; String character; String kana; String meaning; String image; String onyomi; String kunyomi; String important_reading; int level; UserSpecific user_specific; ItemType typeEnum; public BaseItem() { } public BaseItem(int id, String character, String kana, String meaning, String image, String onyomi, String kunyomi, String importantReading, int level, String type, String srs, long unlockDate, long availableDate, boolean burned, long burnedDate, int meaningCorrect, int meaningIncorrect, int meaningMaxStreak, int meaningCurrentStreak, int readingCorrect, int readingIncorrect, int readingMaxStreak, int readingCurrentStreak, String meaningNote, String[] userSynonyms, String readingNote) { this.id = id; this.character = character; this.kana = kana; this.meaning = meaning; this.image = image; this.onyomi = onyomi; this.kunyomi = kunyomi; this.important_reading = importantReading; this.level = level; this.typeEnum = ItemType.fromString(type); if (unlockDate != 0) this.user_specific = new UserSpecific( srs, unlockDate, availableDate, burned, burnedDate, meaningCorrect, meaningIncorrect, meaningMaxStreak, meaningCurrentStreak, readingCorrect, readingIncorrect, readingMaxStreak, readingCurrentStreak, meaningNote, userSynonyms, readingNote ); } public int getId() { return id; } public ItemType getType() { return typeEnum; } public void setType(ItemType type) { this.typeEnum = type; } public String getCharacter() { return character; } public String getKana() { return getType() == ItemType.VOCABULARY ? kana : null; } public String getMeaning() { return meaning; } public String getOnyomi() { return getType() == ItemType.KANJI ? onyomi : null; } public String getKunyomi() { return getType() == ItemType.KANJI ? kunyomi : null; } public String getImportantReading() { return getType() == ItemType.KANJI ? important_reading : null; } public String getImage() { return image; } public int getLevel() { return level; } public boolean isUnlocked() { return user_specific != null; } public String getSrsLevel() { return isUnlocked() ? user_specific.srs : null; } public long getUnlockDate() { return isUnlocked() ? user_specific.unlocked_date * 1000 : 0; } public long getUnlockDateInSeconds() { return isUnlocked() ? user_specific.unlocked_date : 0; } public long getAvailableDate() { return isUnlocked() ? user_specific.available_date * 1000 : 0; } public long getAvailableDateInSeconds() { return isUnlocked() ? user_specific.available_date : 0; } public boolean isBurned() { return isUnlocked() ? user_specific.burned : false; } public long getBurnedDate() { return isUnlocked() ? user_specific.burned_date * 1000 : 0; } public long getBurnedDateInSeconds() { return isUnlocked() ? user_specific.burned_date : 0; } public int getMeaningCorrect() { return isUnlocked() ? user_specific.meaning_correct : 0; } public int getMeaningIncorrect() { return isUnlocked() ? user_specific.meaning_incorrect : 0; } public int getMeaningMaxStreak() { return isUnlocked() ? user_specific.meaning_max_streak : 0; } public int getMeaningCurrentStreak() { return isUnlocked() ? user_specific.meaning_current_streak : 0; } public int getMeaningAnswersCount() { return isUnlocked() ? user_specific.meaning_correct + user_specific.meaning_incorrect : 0; } public int getMeaningCorrectPercentage() { return isUnlocked() ? (int) ((double) user_specific.meaning_correct / getMeaningAnswersCount() * 100) : 0; } public int getMeaningIncorrectPercentage() { return isUnlocked() ? (int) ((double) user_specific.meaning_incorrect / getMeaningAnswersCount() * 100) : 0; } public int getReadingCorrect() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_correct : 0 : 0; } public int getReadingIncorrect() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_incorrect : 0 : 0; } public int getReadingMaxStreak() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_max_streak : 0 : 0; } public int getReadingCurrentStreak() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_current_streak : 0 : 0; } public int getReadingAnswersCount() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_correct + user_specific.reading_incorrect : 0 : 0; } public int getReadingCorrectPercentage() { return isUnlocked() ? getType() == ItemType.KANJI ? (int) ((double) user_specific.reading_correct / getReadingAnswersCount() * 100) : 0 : 0; } public int getReadingIncorrectPercentage() { return isUnlocked() ? getType() == ItemType.KANJI ? (int) ((double) user_specific.reading_incorrect / getReadingAnswersCount() * 100) : 0 : 0; } public String getMeaningNote() { return isUnlocked() ? user_specific.meaning_note : null; } public String[] getUserSynonyms() { return isUnlocked() ? user_specific.user_synonyms : null; } public String getUserSynonymsAsString() { if (getUserSynonyms() == null) return null; String synonyms = ""; for (int i = 0; i < getUserSynonyms().length; i++) if (i == 0) synonyms = getUserSynonyms()[i]; else synonyms += ", " + getUserSynonyms()[i]; return synonyms; } public String getReadingNote() { return isUnlocked() ? getType() == ItemType.KANJI ? user_specific.reading_note : null : null; } public enum ItemType implements Serializable { RADICAL, KANJI, VOCABULARY; public static ItemType fromString(String type) { if (type.equals(TYPE_RADICAL)) return ItemType.RADICAL; if (type.equals(TYPE_KANJI)) return ItemType.KANJI; if (type.equals(TYPE_VOCABULARY)) return ItemType.VOCABULARY; else return null; } public String toString() { switch (this) { case RADICAL: return TYPE_RADICAL; case KANJI: return TYPE_KANJI; case VOCABULARY: return TYPE_VOCABULARY; default: return null; } } } class UserSpecific implements Serializable { private String srs; private long unlocked_date; private long available_date; private boolean burned; private long burned_date; private int meaning_correct; private int meaning_incorrect; private int meaning_max_streak; private int meaning_current_streak; private int reading_correct; private int reading_incorrect; private int reading_max_streak; private int reading_current_streak; private String meaning_note; private String[] user_synonyms; private String reading_note; public UserSpecific(String srs, long unlockDate, long availableDate, boolean burned, long burnedDate, int meaningCorrect, int meaningIncorrect, int meaningMaxStreak, int meaningCurrentStreak, int readingCorrect, int readingIncorrect, int readingMaxStreak, int readingCurrentStreak, String meaningNote, String[] userSynonyms, String readingNote) { this.srs = srs; this.unlocked_date = unlockDate; this.available_date = availableDate; this.burned = burned; this.burned_date = burnedDate; this.meaning_correct = meaningCorrect; this.meaning_incorrect = meaningIncorrect; this.meaning_max_streak = meaningMaxStreak; this.meaning_current_streak = meaningCurrentStreak; this.reading_correct = readingCorrect; this.reading_incorrect = readingIncorrect; this.reading_max_streak = readingMaxStreak; this.reading_current_streak = readingCurrentStreak; this.meaning_note = meaningNote; this.user_synonyms = userSynonyms; this.reading_note = readingNote; } } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; /** * Test that warnings are generated in appropriate cases and appropriate * cases only by VariableReferenceCheck * */ public final class VariableReferenceCheckTest extends Es6CompilerTestCase { private static final String VARIABLE_RUN = "var a = 1; var b = 2; var c = a + b, d = c;"; private boolean enableUnusedLocalAssignmentCheck = false; @Override public CompilerOptions getOptions() { CompilerOptions options = super.getOptions(); if (enableUnusedLocalAssignmentCheck) { options.setWarningLevel(DiagnosticGroups.LINT_CHECKS, CheckLevel.WARNING); } return options; } @Override public CompilerPass getProcessor(Compiler compiler) { // Treats bad reads as errors, and reports bad write warnings. return new VariableReferenceCheck(compiler); } @Override public void setUp() throws Exception { super.setUp(); } public void testCorrectCode() { assertNoWarning("function foo(d) { (function() { d.foo(); }); d.bar(); } "); assertNoWarning("function foo() { bar(); } function bar() { foo(); } "); assertNoWarning("function f(d) { d = 3; }"); assertNoWarning(VARIABLE_RUN); assertNoWarning("if (a) { var x; }"); assertNoWarning("function f() { " + VARIABLE_RUN + "}"); } public void testCorrectShadowing() { assertNoWarning(VARIABLE_RUN + "function f() { " + VARIABLE_RUN + "}"); } public void testCorrectRedeclare() { assertNoWarning( "function f() { if (1) { var a = 2; } else { var a = 3; } }"); } public void testCorrectRecursion() { assertNoWarning("function f() { var x = function() { x(); }; }"); } public void testCorrectCatch() { assertNoWarning("function f() { try { var x = 2; } catch (x) {} }"); assertNoWarning("function f(e) { e = 3; try {} catch (e) {} }"); } public void testRedeclare() { // Only test local scope since global scope is covered elsewhere assertRedeclare("function f() { var a = 2; var a = 3; }"); assertRedeclare("function f(a) { var a = 2; }"); assertRedeclare("function f(a) { if (!a) var a = 6; }"); } public void testEarlyReference() { assertUndeclared("function f() { a = 2; var a = 3; }"); } public void testCorrectEarlyReference() { assertNoWarning("var goog = goog || {}"); assertNoWarning("function f() { a = 2; } var a = 2;"); } public void testUnreferencedBleedingFunction() { assertNoWarning("var x = function y() {}"); assertNoWarning("var x = function y() {}; var y = 1;"); } public void testReferencedBleedingFunction() { assertNoWarning("var x = function y() { return y(); }"); } public void testDoubleDeclaration() { assertRedeclare("function x(y) { if (true) { var y; } }"); } public void testDoubleDeclaration2() { assertRedeclare("function x() { var y; if (true) { var y; } }"); } public void testHoistedFunction1() { assertNoWarning("f(); function f() {}"); } public void testHoistedFunction2() { assertNoWarning("function g() { f(); function f() {} }"); } public void testNonHoistedFunction() { assertUndeclared("if (true) { f(); function f() {} }"); } public void testNonHoistedFunction2() { assertNoWarning("if (false) { function f() {} f(); }"); } public void testNonHoistedFunction3() { assertNoWarning("function g() { if (false) { function f() {} f(); }}"); } public void testNonHoistedFunction4() { assertAmbiguous("if (false) { function f() {} } f();"); } public void testNonHoistedFunction5() { assertAmbiguous("function g() { if (false) { function f() {} } f(); }"); } public void testNonHoistedFunction6() { assertUndeclared("if (false) { f(); function f() {} }"); } public void testNonHoistedFunction7() { assertUndeclared("function g() { if (false) { f(); function f() {} }}"); } public void testNonHoistedRecursiveFunction1() { assertNoWarning("if (false) { function f() { f(); }}"); } public void testNonHoistedRecursiveFunction2() { assertNoWarning("function g() { if (false) { function f() { f(); }}}"); } public void testNonHoistedRecursiveFunction3() { assertNoWarning("function g() { if (false) { function f() { f(); g(); }}}"); } public void testDestructuringInFor() { testSameEs6("for (let [key, val] of X){}"); testSameEs6("for (let [key, [nestKey, nestVal], val] of X){}"); testSameEs6("var {x: a, y: b} = {x: 1, y: 2}; a++; b++;"); testWarningEs6("a++; var {x: a} = {x: 1};", VariableReferenceCheck.EARLY_REFERENCE); } public void testNoWarnInExterns1() { // Verify duplicate suppressions are properly recognized. String externs = "var google; /** @suppress {duplicate} */ var google"; String code = ""; testSame(externs, code, null); } public void testNoWarnInExterns2() { // Verify we don't complain about early references in externs String externs = "window; var window;"; String code = ""; testSame(externs, code, null); } public void testUnusedLocalVar() { enableUnusedLocalAssignmentCheck = true; assertUnused("function f() { var a; }"); assertUnused("function f() { var a = 2; }"); assertUnused("function f() { var a; a = 2; }"); } /** * Inside a goog.scope, don't warn because the alias might be used in a type annotation. */ public void testUnusedLocalVarInGoogScope() { enableUnusedLocalAssignmentCheck = true; testSame("goog.scope(function f() { var a; });"); testSame("goog.scope(function f() { /** @typedef {some.long.name} */ var a; });"); testSame("goog.scope(function f() { var a = some.long.name; });"); } public void testUnusedLocalLet() { enableUnusedLocalAssignmentCheck = true; assertUnusedEs6("function f() { let a; }"); assertUnusedEs6("function f() { let a = 2; }"); assertUnusedEs6("function f() { let a; a = 2; }"); } public void testUnusedLocalConst() { enableUnusedLocalAssignmentCheck = true; assertUnusedEs6("function f() { const a = 2; }"); } public void testUnusedLocalArgNoWarning() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("function f(a) {}"); } public void testUnusedGlobalNoWarning() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("var a = 2;"); } public void testUnusedAssignedInInnerFunction() { enableUnusedLocalAssignmentCheck = true; assertUnused("function f() { var x = 1; function g() { x = 2; } }"); } public void testIncrementDecrementResultUsed() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("function f() { var x = 5; while (x-- > 0) {} }"); assertNoWarning("function f() { var x = -5; while (x++ < 0) {} }"); assertNoWarning("function f() { var x = 5; while (--x > 0) {} }"); assertNoWarning("function f() { var x = -5; while (++x < 0) {} }"); } public void testUsedInInnerFunction() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("function f() { var x = 1; function g() { use(x); } }"); } public void testUsedInShorthandObjLit() { enableUnusedLocalAssignmentCheck = true; testSameEs6("function f() { var x = 1; return {x}; }"); } public void testUnusedCatch() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("function f() { try {} catch (x) {} }"); } public void testIncrementCountsAsUse() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("var a = 2; var b = []; b[a++] = 1;"); } public void testForIn() { enableUnusedLocalAssignmentCheck = true; assertNoWarning("for (var prop in obj) {}"); } /** * Expects the JS to generate one bad-read error. */ private void assertRedeclare(String js) { testWarning(js, VariableReferenceCheck.REDECLARED_VARIABLE); } /** * Expects the JS to generate one bad-write warning. */ private void assertUndeclared(String js) { testWarning(js, VariableReferenceCheck.EARLY_REFERENCE); } /** * Expects the JS to generate one bad-write warning. */ private void assertAmbiguous(String js) { testError(js, VariableReferenceCheck.AMBIGUOUS_FUNCTION_DECL, LanguageMode.ECMASCRIPT5); testSameEs6(js); // In ES6, these are block scoped functions, so no ambiguity. } /** * Expects the JS to generate one unused local error. */ private void assertUnused(String js) { testWarning(js, VariableReferenceCheck.UNUSED_LOCAL_ASSIGNMENT); } /** * Expects the JS to generate one unused local error. */ private void assertUnusedEs6(String js) { testWarningEs6(js, VariableReferenceCheck.UNUSED_LOCAL_ASSIGNMENT); } /** * Expects the JS to generate no errors or warnings. */ private void assertNoWarning(String js) { testSame(js); } }
package move; import game.Game; import gamevalue.GameValue; import gamevalue.GameValueFactory; import hash.BoardSet; import hash.BoardSetFactory; import java.util.Arrays; import movecomparator.HeuristicComparator; import board.Board; import board.Piece; /** * @author David Wu * */ public class MoveGeneratorMetric extends AbstractMoveGenerator { private Move[] uniqueMovesSoFar; private int numUniqueMovesSoFar; private Move[] tempMoves; private int numTempMoves; BoardSet boardSet; private final HeuristicComparator heuristicComparator; public MoveGeneratorMetric(HeuristicComparator heuristicComparator) { this.heuristicComparator = heuristicComparator; } public void createNewArrayForUniqueMoves(int maxNumChildren) { uniqueMovesSoFar = new Move[maxNumChildren]; } /* All the steps of generateMoves is located in the same method so we can * immediately return upon seeing a winning move. This makes it easier for * optimizations. * (non-Javadoc) * @see move.AbstractMoveGenerator#generateMoves() */ @Override public MoveGeneratorResults generateMoves(Game game) { Move currMove; int index, currSubBoard; GameValue currGameState; final Board gameBoard = game.getBoard(); // Stage -1: Create a new array each time so this can be used with alpha beta createNewArrayForUniqueMoves(game.getMaxNumChildren()); tempMoves = new Move[game.getMaxNumChildren()]; boardSet = BoardSetFactory.createBoardHashSetBasic(); //Stage 0: Make sure this isn't already a terminal state currGameState = game.getGameState(); if (currGameState.isTerminalValue()) { throw new RuntimeException("Board is already in a terminal state: " + gameBoard.toString()); } //Stage 1: Generate all possible moves for (index = 0, numTempMoves = 0; index < gameBoard.getNumberOfSpotsOnBoard(); index++) { if (gameBoard.getPieceAt(index) != Piece.BLANK) { continue; } for (currSubBoard = 0; currSubBoard <= 4; currSubBoard++) { if (currSubBoard == 0) { //This ordering is weird so we don't have to create multiple objects tempMoves[numTempMoves] = MoveFactory.createMove(index, 0, false, GameValueFactory.getUndetermined()); currMove = tempMoves[numTempMoves]; final Game gameAfterDoMove = game.doMove(currMove); currGameState = gameAfterDoMove.getGameState().oppositeGameValue(); //just in case we run into the same hashed value twice. This can only happen for a WIN // if (GameValue.isTerminalValue(currGameState)) { // boardSet.add(gameBoard); // } // game.undoMove(currMove); tempMoves[numTempMoves].setResultingGameValue(currGameState); if (currGameState.isWin()) { numTempMoves += 1; break; } } else { tempMoves[numTempMoves] = MoveFactory.createMove(index, currSubBoard, true, GameValueFactory.getUndetermined()); numTempMoves += 1; /* OPTIMIZATION: moveCpy instead of moveCreate */ tempMoves[numTempMoves] = MoveFactory.createMove(index, currSubBoard, false, GameValueFactory.getUndetermined()); numTempMoves += 1; } } } saveMoveFiltering(game.getMaxNumChildren()); hookAfterStage1(new MoveGeneratorMetricInfo(uniqueMovesSoFar, numUniqueMovesSoFar, boardSet)); //Stage 2: Remove duplicate moves w/o symmetries removeDuplicates(game, false); saveMoveFiltering(game.getMaxNumChildren()); hookAfterStage2(new MoveGeneratorMetricInfo(uniqueMovesSoFar, numUniqueMovesSoFar, boardSet)); //Stage 3: Remove symmetries boardSet = BoardSetFactory.createBoardHashSetBasic(); // clear out the old entries removeDuplicates(game, true); saveMoveFiltering(game.getMaxNumChildren()); hookAfterStage3(new MoveGeneratorMetricInfo(uniqueMovesSoFar, numUniqueMovesSoFar, boardSet)); //Stage 4: sort by ascending or descending game state values to help in the pruning process. Arrays.sort(uniqueMovesSoFar, 0, numUniqueMovesSoFar, heuristicComparator); hookAfterStage4(new MoveGeneratorMetricInfo(uniqueMovesSoFar, numUniqueMovesSoFar, boardSet)); return createResults(); } public void removeDuplicates(Game game, boolean useSymmetries) { Move currMove; int index; for (index = 0, numTempMoves = 0; index < numUniqueMovesSoFar; index++) { currMove = uniqueMovesSoFar[index]; final Game gameAfterDoMove = game.doMove(currMove); final Board board = gameAfterDoMove.getBoard(); if (currMove.getSubBoard() == 0) { if (!boardSet.contains(board, useSymmetries)) { boardSet.add(board); tempMoves[numTempMoves] = MoveFactory.createMove(currMove.getIndex(), currMove.getSubBoard(), currMove.getIsClockwise(), currMove.getResultingGameValue()); numTempMoves += 1; } } else { if (!boardSet.contains(board, useSymmetries)) { GameValue currGameValue; //if in tt { // currGameValue = TT value //} else { currGameValue = gameAfterDoMove.getGameState().oppositeGameValue(); boardSet.add(board); tempMoves[numTempMoves] = MoveFactory.createMove(currMove.getIndex(), currMove.getSubBoard(), currMove.getIsClockwise(), currGameValue); numTempMoves += 1; } } // game.undoMove(currMove); } } public void saveMoveFiltering(int maxNumChildren) { uniqueMovesSoFar = tempMoves; numUniqueMovesSoFar = numTempMoves; tempMoves = new Move[maxNumChildren]; numTempMoves = 0; } public MoveGeneratorResults createResults() { return new MoveGeneratorResults(uniqueMovesSoFar, numUniqueMovesSoFar); } /* Override these in tests to find out more information about the stages * Be sure to call "super" in the subclasses and comment these out when running * the final version. */ public void hookAfterStage1(MoveGeneratorMetricInfo info) { // System.out.println("hookAfterStage1:\n"); } public void hookAfterStage2(MoveGeneratorMetricInfo info) { // System.out.println("hookAfterStage2:\n"); } public void hookAfterStage3(MoveGeneratorMetricInfo info) { // System.out.println("hookAfterStage3:\n"); } public void hookAfterStage4(MoveGeneratorMetricInfo info) { // System.out.println("hookAfterStage4:\n"); } /* Used to provide information for the hooks */ public class MoveGeneratorMetricInfo { private final Move[] children; private final int numChildren; private final BoardSet boardSet; public MoveGeneratorMetricInfo(Move[] children, int numChildren, BoardSet boardSet) { this.children = children; this.numChildren = numChildren; this.boardSet = boardSet; } public Move[] getChildren() { return children; } public int getNumChildren() { return numChildren; } public BoardSet getBoardSet() { return boardSet; } } }
package com.intellij.vcs.log.ui.frame; import com.google.common.primitives.Ints; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.util.ProgressWindow; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vcs.VcsDataKeys; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.TextRevisionNumber; import com.intellij.openapi.vcs.changes.committed.CommittedChangesTreeBrowser; import com.intellij.openapi.vcs.changes.committed.RepositoryChangesBrowser; import com.intellij.openapi.vcs.changes.ui.ChangesBrowser; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.*; import com.intellij.ui.components.JBLoadingPanel; import com.intellij.ui.components.panels.Wrapper; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.table.ComponentsListFocusTraversalPolicy; import com.intellij.vcs.CommittedChangeListForRevision; import com.intellij.vcs.log.*; import com.intellij.vcs.log.data.VcsLogDataManager; import com.intellij.vcs.log.data.VcsLogUiProperties; import com.intellij.vcs.log.data.VisiblePack; import com.intellij.vcs.log.impl.VcsLogUtil; import com.intellij.vcs.log.ui.VcsLogActionPlaces; import com.intellij.vcs.log.ui.VcsLogUiImpl; import com.intellij.vcs.log.ui.actions.IntelliSortChooserPopupAction; import com.intellij.vcs.log.ui.filter.VcsLogClassicFilterUi; import com.intellij.vcs.log.util.BekUtil; import com.intellij.vcs.log.util.VcsUserUtil; import net.miginfocom.swing.MigLayout; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.util.*; import java.util.List; import static com.intellij.util.ObjectUtils.assertNotNull; import static com.intellij.util.containers.ContainerUtil.getFirstItem; public class MainFrame extends JPanel implements DataProvider, Disposable { @NotNull private final VcsLogDataManager myLogDataManager; @NotNull private final VcsLogUiImpl myUi; @NotNull private final VcsLog myLog; @NotNull private final VcsLogClassicFilterUi myFilterUi; @NotNull private final JBLoadingPanel myChangesLoadingPane; @NotNull private final VcsLogGraphTable myGraphTable; @NotNull private final BranchesPanel myBranchesPanel; @NotNull private final DetailsPanel myDetailsPanel; @NotNull private final Splitter myDetailsSplitter; @NotNull private final JComponent myToolbar; @NotNull private final RepositoryChangesBrowser myChangesBrowser; @NotNull private final Splitter myChangesBrowserSplitter; @NotNull private final SearchTextField myTextFilter; @NotNull private Runnable myTaskCompletedListener; @NotNull private Runnable myFullDetailsLoadedListener; @NotNull private Runnable myMiniDetailsLoadedListener; public MainFrame(@NotNull VcsLogDataManager logDataManager, @NotNull VcsLogUiImpl ui, @NotNull Project project, @NotNull VcsLogUiProperties uiProperties, @NotNull VcsLog log, @NotNull VisiblePack initialDataPack) { // collect info myLogDataManager = logDataManager; myUi = ui; myLog = log; myFilterUi = new VcsLogClassicFilterUi(myUi, logDataManager, uiProperties, initialDataPack); // initialize components myGraphTable = new VcsLogGraphTable(ui, logDataManager, initialDataPack); myBranchesPanel = new BranchesPanel(logDataManager, ui, initialDataPack.getRefs()); JComponent branchScrollPane = myBranchesPanel.createScrollPane(); branchScrollPane.setVisible(uiProperties.isShowBranchesPanel()); myDetailsPanel = new DetailsPanel(logDataManager, myGraphTable, ui.getColorManager(), initialDataPack); myChangesBrowser = new RepositoryChangesBrowser(project, null, Collections.<Change>emptyList(), null); myChangesBrowser.getViewer().setScrollPaneBorder(IdeBorderFactory.createBorder(SideBorder.TOP)); myChangesBrowser.getDiffAction().registerCustomShortcutSet(myChangesBrowser.getDiffAction().getShortcutSet(), getGraphTable()); myChangesBrowser.getEditSourceAction().registerCustomShortcutSet(CommonShortcuts.getEditSource(), getGraphTable()); setDefaultEmptyText(myChangesBrowser); myChangesLoadingPane = new JBLoadingPanel(new BorderLayout(), project, ProgressWindow.DEFAULT_PROGRESS_DIALOG_POSTPONE_TIME_MILLIS); myChangesLoadingPane.add(myChangesBrowser); final CommitSelectionListener selectionChangeListener = new CommitSelectionListener(myChangesBrowser); myGraphTable.getSelectionModel().addListSelectionListener(selectionChangeListener); myGraphTable.getSelectionModel().addListSelectionListener(myDetailsPanel); updateWhenDetailsAreLoaded(); myTextFilter = myFilterUi.createTextFilter(); myToolbar = createActionsToolbar(); myDetailsSplitter = new OnePixelSplitter(true, 0.7f); myDetailsSplitter.setFirstComponent(setupScrolledGraph()); setupDetailsSplitter(uiProperties.isShowDetails()); JComponent toolbars = new JPanel(new BorderLayout()); toolbars.add(myToolbar, BorderLayout.NORTH); toolbars.add(branchScrollPane, BorderLayout.CENTER); JComponent toolbarsAndTable = new JPanel(new BorderLayout()); toolbarsAndTable.add(toolbars, BorderLayout.NORTH); toolbarsAndTable.add(myDetailsSplitter, BorderLayout.CENTER); myChangesBrowserSplitter = new OnePixelSplitter(false, 0.7f); myChangesBrowserSplitter.setFirstComponent(toolbarsAndTable); myChangesBrowserSplitter.setSecondComponent(myChangesLoadingPane); setLayout(new BorderLayout()); add(myChangesBrowserSplitter); Disposer.register(logDataManager, this); myGraphTable.resetDefaultFocusTraversalKeys(); setFocusTraversalPolicyProvider(true); setFocusTraversalPolicy(new MyFocusPolicy()); } /** * Informs components that the actual DataPack has been updated (e.g. due to a log refresh). <br/> * Components may want to update their fields and/or rebuild. * * @param dataPack new data pack. * @param permGraphChanged true if permanent graph itself was changed. */ public void updateDataPack(@NotNull VisiblePack dataPack, boolean permGraphChanged) { myFilterUi.updateDataPack(dataPack); myDetailsPanel.updateDataPack(dataPack); myGraphTable.updateDataPack(dataPack, permGraphChanged); myBranchesPanel.updateDataPack(dataPack, permGraphChanged); } private void updateWhenDetailsAreLoaded() { myMiniDetailsLoadedListener = new Runnable() { @Override public void run() { myGraphTable.initColumnSize(); myGraphTable.repaint(); } }; myFullDetailsLoadedListener = new Runnable() { @Override public void run() { myDetailsPanel.valueChanged(null); } }; myTaskCompletedListener = new Runnable() { @Override public void run() { myDetailsPanel.valueChanged(null); myGraphTable.repaint(); // we may need to repaint highlighters } }; myLogDataManager.getMiniDetailsGetter().addDetailsLoadedListener(myMiniDetailsLoadedListener); myLogDataManager.getCommitDetailsGetter().addDetailsLoadedListener(myFullDetailsLoadedListener); myLogDataManager.getContainingBranchesGetter().addTaskCompletedListener(myTaskCompletedListener); } public void setupDetailsSplitter(boolean state) { myDetailsSplitter.setSecondComponent(state ? myDetailsPanel : null); } private JScrollPane setupScrolledGraph() { JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myGraphTable, SideBorder.TOP); myGraphTable.viewportSet(scrollPane.getViewport()); return scrollPane; } private static void setDefaultEmptyText(ChangesBrowser changesBrowser) { changesBrowser.getViewer().setEmptyText(""); } @NotNull public VcsLogGraphTable getGraphTable() { return myGraphTable; } @NotNull public VcsLogFilterUi getFilterUi() { return myFilterUi; } private JComponent createActionsToolbar() { DefaultActionGroup toolbarGroup = new DefaultActionGroup(); toolbarGroup.add(ActionManager.getInstance().getAction(VcsLogActionPlaces.TOOLBAR_ACTION_GROUP)); DefaultActionGroup mainGroup = new DefaultActionGroup(); mainGroup.add(myFilterUi.createActionGroup()); mainGroup.addSeparator(); if (BekUtil.isBekEnabled()) { if (BekUtil.isLinearBekEnabled()) { mainGroup.add(new IntelliSortChooserPopupAction()); // can not register both of the actions in xml file, choosing to register an action for the "outer world" // I can of course if linear bek is enabled replace the action on start but why bother } else { mainGroup.add(ActionManager.getInstance().getAction(VcsLogActionPlaces.VCS_LOG_INTELLI_SORT_ACTION)); } } mainGroup.add(toolbarGroup); ActionToolbar toolbar = createActionsToolbar(mainGroup); Wrapper textFilter = new Wrapper(myTextFilter); textFilter.setVerticalSizeReferent(toolbar.getComponent()); textFilter.setBorder(JBUI.Borders.emptyLeft(5)); ActionToolbar settings = createActionsToolbar(new DefaultActionGroup(ActionManager.getInstance().getAction(VcsLogActionPlaces.VCS_LOG_QUICK_SETTINGS_ACTION))); settings.setReservePlaceAutoPopupIcon(false); settings.setLayoutPolicy(ActionToolbar.NOWRAP_LAYOUT_POLICY); JPanel panel = new JPanel(new MigLayout("ins 0, fill", "[left]0[left, fill]push[right]")); panel.add(textFilter); panel.add(toolbar.getComponent()); panel.add(settings.getComponent()); return panel; } @NotNull private ActionToolbar createActionsToolbar(@NotNull DefaultActionGroup mainGroup) { ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.CHANGES_VIEW_TOOLBAR, mainGroup, true); toolbar.setTargetComponent(this); return toolbar; } public JComponent getMainComponent() { return this; } public void setBranchesPanelVisible(boolean visible) { JScrollPane scrollPane = UIUtil.getParentOfType(JScrollPane.class, myBranchesPanel); if (scrollPane != null) { scrollPane.setVisible(visible); } else { myBranchesPanel.setVisible(visible); } } @Nullable @Override public Object getData(@NonNls String dataId) { if (VcsLogDataKeys.VCS_LOG.is(dataId)) { return myLog; } else if (VcsLogDataKeys.VCS_LOG_UI.is(dataId)) { return myUi; } else if (VcsLogDataKeys.VCS_LOG_DATA_PROVIDER.is(dataId)) { return myLogDataManager; } else if (VcsDataKeys.CHANGES.is(dataId) || VcsDataKeys.SELECTED_CHANGES.is(dataId)) { return ArrayUtil.toObjectArray(myChangesBrowser.getCurrentDisplayedChanges(), Change.class); } else if (VcsDataKeys.CHANGE_LISTS.is(dataId)) { List<VcsFullCommitDetails> details = myLog.getSelectedDetails(); if (details.size() > VcsLogUtil.MAX_SELECTED_COMMITS) return null; return ContainerUtil .map2Array(details, CommittedChangeListForRevision.class, new Function<VcsFullCommitDetails, CommittedChangeListForRevision>() { @Override public CommittedChangeListForRevision fun(@NotNull VcsFullCommitDetails details) { return new CommittedChangeListForRevision(details.getSubject(), details.getFullMessage(), VcsUserUtil.getShortPresentation(details.getCommitter()), new Date(details.getCommitTime()), details.getChanges(), convertToRevisionNumber(details.getId())); } }); } else if (VcsDataKeys.VCS_REVISION_NUMBERS.is(dataId)) { List<CommitId> hashes = myLog.getSelectedCommits(); if (hashes.size() > VcsLogUtil.MAX_SELECTED_COMMITS) return null; return ArrayUtil.toObjectArray(ContainerUtil.map(hashes, new Function<CommitId, VcsRevisionNumber>() { @Override public VcsRevisionNumber fun(CommitId commitId) { return convertToRevisionNumber(commitId.getHash()); } }), VcsRevisionNumber.class); } else if (VcsDataKeys.VCS.is(dataId)) { int[] selectedRows = myGraphTable.getSelectedRows(); if (selectedRows.length == 0 || selectedRows.length > VcsLogUtil.MAX_SELECTED_COMMITS) return null; Set<VirtualFile> roots = ContainerUtil.map2Set(Ints.asList(selectedRows), new Function<Integer, VirtualFile>() { @Override public VirtualFile fun(@NotNull Integer row) { return myGraphTable.getModel().getRoot(row); } }); if (roots.size() == 1) { return myLogDataManager.getLogProvider(assertNotNull(getFirstItem(roots))).getSupportedVcs(); } } return null; } @NotNull public JComponent getToolbar() { return myToolbar; } @NotNull public SearchTextField getTextFilter() { return myTextFilter; } public boolean areGraphActionsEnabled() { return myGraphTable.getRowCount() > 0; } public void onFiltersChange(@NotNull VcsLogFilterCollection filters) { myBranchesPanel.onFiltersChange(filters); } @NotNull private static TextRevisionNumber convertToRevisionNumber(@NotNull Hash hash) { return new TextRevisionNumber(hash.asString(), hash.toShortString()); } public void showDetails(boolean state) { myDetailsSplitter.setSecondComponent(state ? myDetailsPanel : null); } @Override public void dispose() { myLogDataManager.getMiniDetailsGetter().removeDetailsLoadedListener(myMiniDetailsLoadedListener); myLogDataManager.getCommitDetailsGetter().removeDetailsLoadedListener(myFullDetailsLoadedListener); myLogDataManager.getContainingBranchesGetter().removeTaskCompletedListener(myTaskCompletedListener); myDetailsSplitter.dispose(); myChangesBrowserSplitter.dispose(); } private class CommitSelectionListener implements ListSelectionListener { private final ChangesBrowser myChangesBrowser; private ProgressIndicator myLastRequest; public CommitSelectionListener(ChangesBrowser changesBrowser) { myChangesBrowser = changesBrowser; } @Override public void valueChanged(@Nullable ListSelectionEvent event) { if (event != null && event.getValueIsAdjusting()) return; if (myLastRequest != null) myLastRequest.cancel(); myLastRequest = null; int rows = getGraphTable().getSelectedRowCount(); if (rows < 1) { myChangesLoadingPane.stopLoading(); myChangesBrowser.getViewer().setEmptyText("No commits selected"); myChangesBrowser.setChangesToDisplay(Collections.<Change>emptyList()); } else { myChangesBrowser.setChangesToDisplay(Collections.<Change>emptyList()); setDefaultEmptyText(myChangesBrowser); myChangesLoadingPane.startLoading(); final EmptyProgressIndicator indicator = new EmptyProgressIndicator(); myLastRequest = indicator; myLog.requestSelectedDetails(new Consumer<List<VcsFullCommitDetails>>() { @Override public void consume(List<VcsFullCommitDetails> detailsList) { if (myLastRequest == indicator && !(indicator.isCanceled())) { myLastRequest = null; List<Change> changes = ContainerUtil.newArrayList(); List<VcsFullCommitDetails> detailsListReversed = ContainerUtil.reverse(detailsList); for (VcsFullCommitDetails details : detailsListReversed) { changes.addAll(details.getChanges()); } changes = CommittedChangesTreeBrowser.zipChanges(changes); myChangesLoadingPane.stopLoading(); myChangesBrowser.setChangesToDisplay(changes); } } }, indicator); } } } private class MyFocusPolicy extends ComponentsListFocusTraversalPolicy { @NotNull @Override protected List<Component> getOrderedComponents() { return Arrays.<Component>asList(myGraphTable, myChangesBrowser.getPreferredFocusedComponent(), myTextFilter.getTextEditor()); } } }
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v2; import org.cloudfoundry.AbstractIntegrationTest; import org.cloudfoundry.client.CloudFoundryClient; import org.cloudfoundry.client.v2.applications.CreateApplicationRequest; import org.cloudfoundry.client.v2.applications.CreateApplicationResponse; import org.cloudfoundry.client.v2.privatedomains.CreatePrivateDomainRequest; import org.cloudfoundry.client.v2.privatedomains.CreatePrivateDomainResponse; import org.cloudfoundry.client.v2.routemappings.CreateRouteMappingRequest; import org.cloudfoundry.client.v2.routemappings.CreateRouteMappingResponse; import org.cloudfoundry.client.v2.routes.AssociateRouteApplicationRequest; import org.cloudfoundry.client.v2.routes.AssociateRouteApplicationResponse; import org.cloudfoundry.client.v2.routes.CreateRouteRequest; import org.cloudfoundry.client.v2.routes.CreateRouteResponse; import org.cloudfoundry.client.v2.routes.DeleteRouteRequest; import org.cloudfoundry.client.v2.routes.GetRouteRequest; import org.cloudfoundry.client.v2.routes.GetRouteResponse; import org.cloudfoundry.client.v2.routes.ListRouteApplicationsRequest; import org.cloudfoundry.client.v2.routes.ListRouteMappingsRequest; import org.cloudfoundry.client.v2.routes.ListRoutesRequest; import org.cloudfoundry.client.v2.routes.RemoveRouteApplicationRequest; import org.cloudfoundry.client.v2.routes.RouteEntity; import org.cloudfoundry.client.v2.routes.RouteExistsRequest; import org.cloudfoundry.client.v2.routes.UpdateRouteRequest; import org.cloudfoundry.util.JobUtils; import org.cloudfoundry.util.PaginationUtils; import org.cloudfoundry.util.ResourceUtils; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import reactor.util.function.Tuple3; import java.time.Duration; import java.util.concurrent.TimeoutException; import java.util.function.Consumer; import static org.assertj.core.api.Assertions.assertThat; import static org.cloudfoundry.util.tuple.TupleUtils.consumer; import static org.cloudfoundry.util.tuple.TupleUtils.function; public final class RoutesTest extends AbstractIntegrationTest { @Autowired private CloudFoundryClient cloudFoundryClient; @Autowired private Mono<String> organizationId; @Autowired private Mono<String> spaceId; @Autowired private Mono<String> stackId; @Test public void associateApplication() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void create() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( Mono.just(domainId), Mono.just(spaceId), this.cloudFoundryClient.routes() .create(CreateRouteRequest.builder() .domainId(domainId) .spaceId(spaceId) .build()) .map(ResourceUtils::getEntity)) )) .as(StepVerifier::create) .consumeNextWith(domainIdSpaceIdEquality()) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void delete() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> createRouteId(this.cloudFoundryClient, domainId, spaceId))) .delayUntil(routeId -> this.cloudFoundryClient.routes() .delete(DeleteRouteRequest.builder() .async(true) .routeId(routeId) .build()) .flatMap(job -> JobUtils.waitForCompletion(this.cloudFoundryClient, Duration.ofMinutes(5), job))) .flatMap(routeId -> requestGetRoute(this.cloudFoundryClient, routeId)) .as(StepVerifier::create) .consumeErrorWith(t -> assertThat(t).isInstanceOf(ClientV2Exception.class).hasMessageMatching("CF-RouteNotFound\\([0-9]+\\): The route could not be found: .*")) .verify(Duration.ofMinutes(5)); } @Test public void deleteAsyncFalse() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> createRouteId(this.cloudFoundryClient, domainId, spaceId))) .delayUntil(routeId -> this.cloudFoundryClient.routes() .delete(DeleteRouteRequest.builder() .async(false) .routeId(routeId) .build())) .flatMap(routeId -> requestGetRoute(this.cloudFoundryClient, routeId)) .as(StepVerifier::create) .consumeErrorWith(t -> assertThat(t).isInstanceOf(ClientV2Exception.class).hasMessageMatching("CF-RouteNotFound\\([0-9]+\\): The route could not be found: .*")) .verify(Duration.ofMinutes(5)); } @Test public void exists() { String domainName = this.nameFactory.getDomainName(); String hostName = this.nameFactory.getHostName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .delayUntil(function((domainId, spaceId) -> requestCreateRoute(this.cloudFoundryClient, domainId, hostName, spaceId))) .flatMap(function((domainId, spaceId) -> this.cloudFoundryClient.routes() .exists(RouteExistsRequest.builder() .domainId(domainId) .host(hostName) .build()))) .as(StepVerifier::create) .expectNext(true) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void existsDoesNotExist() { String domainName = this.nameFactory.getDomainName(); String hostName1 = this.nameFactory.getHostName(); String hostName2 = this.nameFactory.getHostName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .delayUntil(function((domainId, spaceId) -> this.cloudFoundryClient.routes() .create(CreateRouteRequest.builder() .domainId(domainId) .host(hostName1) .spaceId(spaceId) .build()))) .flatMap(function((domainId, spaceId) -> this.cloudFoundryClient.routes() .exists(RouteExistsRequest.builder() .domainId(domainId) .host(hostName2) .build()))) .as(StepVerifier::create) .expectNext(false) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void get() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( Mono.just(domainId), Mono.just(spaceId), createRouteId(this.cloudFoundryClient, domainId, spaceId)) )) .flatMap(function((domainId, spaceId, routeId) -> Mono.zip( Mono.just(domainId), Mono.just(spaceId), this.cloudFoundryClient.routes() .get(GetRouteRequest.builder() .routeId(routeId) .build()) .map(ResourceUtils::getEntity)) )) .as(StepVerifier::create) .consumeNextWith(domainIdSpaceIdEquality()) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplications() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplicationsFilterByDiego() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .diego(true) .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplicationsFilterByName() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .name(applicationName) .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplicationsFilterByOrganizationId() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); this.organizationId .flatMap(organizationId -> Mono.zip( createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId), this.spaceId, Mono.just(organizationId) )) .flatMap(function((domainId, spaceId, organizationId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId), Mono.just(organizationId) ))) .delayUntil(function((applicationId, routeId, organizationId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId, organizationId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .organizationId(organizationId) .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplicationsFilterBySpaceId() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId), Mono.just(spaceId) ))) .delayUntil(function((applicationId, routeId, spaceId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId, spaceId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .page(page) .routeId(routeId) .spaceId(spaceId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listApplicationsFilterByStackId() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId, this.stackId ) .flatMap(function((domainId, spaceId, stackId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, stackId), createRouteId(this.cloudFoundryClient, domainId, spaceId), Mono.just(stackId) ) )) .delayUntil(function((applicationId, routeId, stackId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((aplicationId, routeId, stackId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .page(page) .routeId(routeId) .stackId(stackId) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listFilterByDomainId() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .delayUntil(function((domainId, spaceId) -> requestCreateRoute(this.cloudFoundryClient, domainId, spaceId))) .flatMapMany(function((domainId, spaceId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .list(ListRoutesRequest.builder() .domainId(domainId) .page(page) .build())))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listFilterByHost() { String domainName = this.nameFactory.getDomainName(); String host = this.nameFactory.getHostName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> requestCreateRoute(this.cloudFoundryClient, domainId, host, spaceId))) .flatMapMany(response -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .list(ListRoutesRequest.builder() .host(host) .page(page) .build()))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listFilterByOrganizationId() { String domainName = this.nameFactory.getDomainName(); this.organizationId .flatMap(organizationId -> Mono.zip( createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId), this.spaceId, Mono.just(organizationId) )) .delayUntil(function((domainId, spaceId, organizationId) -> requestCreateRoute(this.cloudFoundryClient, domainId, spaceId))) .flatMapMany(function((domainId, spaceId, organizationId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .list(ListRoutesRequest.builder() .organizationId(organizationId) .page(page) .build())))) .count() .as(StepVerifier::create) .consumeNextWith(count -> assertThat(count).isGreaterThan(0)) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listFilterByPath() { String domainName = this.nameFactory.getDomainName(); String path = this.nameFactory.getPath(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> this.cloudFoundryClient.routes() .create(CreateRouteRequest.builder() .domainId(domainId) .path(path) .spaceId(spaceId) .build()))) .flatMapMany(response -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .list(ListRoutesRequest.builder() .page(page) .path(path) .build()))) .as(StepVerifier::create) .expectNextCount(1) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listMappings() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> requestCreateRouteMapping(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> Mono.zip( Mono.just(applicationId), PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listMappings(ListRouteMappingsRequest.builder() .page(page) .routeId(routeId) .build())) .single() .map(response -> ResourceUtils.getEntity(response).getApplicationId())))) .as(StepVerifier::create) .consumeNextWith(tupleEquality()) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void listMappingsFilterByApplicationId() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> requestCreateRouteMapping(this.cloudFoundryClient, applicationId, routeId))) .flatMapMany(function((applicationId, routeId) -> Mono.zip( Mono.just(applicationId), PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listMappings(ListRouteMappingsRequest.builder() .applicationId(applicationId) .page(page) .routeId(routeId) .build())) .single() .map(response -> ResourceUtils.getEntity(response).getApplicationId())))) .as(StepVerifier::create) .consumeNextWith(tupleEquality()) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void removeApplication() { String applicationName = this.nameFactory.getApplicationName(); String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> Mono.zip( createApplicationId(this.cloudFoundryClient, spaceId, applicationName, null), createRouteId(this.cloudFoundryClient, domainId, spaceId) ))) .delayUntil(function((applicationId, routeId) -> associateApplicationWithRoute(this.cloudFoundryClient, applicationId, routeId))) .delayUntil(function((applicationId, routeId) -> this.cloudFoundryClient.routes() .removeApplication(RemoveRouteApplicationRequest.builder() .applicationId(applicationId) .routeId(routeId) .build()))) .flatMapMany(function((applicationId, routeId) -> PaginationUtils .requestClientV2Resources(page -> this.cloudFoundryClient.routes() .listApplications(ListRouteApplicationsRequest.builder() .page(page) .routeId(routeId) .build())))) .as(StepVerifier::create) .expectComplete() .verify(Duration.ofMinutes(5)); } @Test public void update() { String domainName = this.nameFactory.getDomainName(); Mono .zip( this.organizationId .flatMap(organizationId -> createPrivateDomainId(this.cloudFoundryClient, domainName, organizationId)), this.spaceId ) .flatMap(function((domainId, spaceId) -> createRouteId(this.cloudFoundryClient, domainId, spaceId))) .flatMap(routeId -> this.cloudFoundryClient.routes() .update(UpdateRouteRequest.builder() .host("test-host") .routeId(routeId) .build()) .map(ResourceUtils::getEntity) .map(RouteEntity::getHost)) .as(StepVerifier::create) .expectNext("test-host") .expectComplete() .verify(Duration.ofMinutes(5)); } private static Mono<AssociateRouteApplicationResponse> associateApplicationWithRoute(CloudFoundryClient cloudFoundryClient, String applicationId, String routeId) { return cloudFoundryClient.routes() .associateApplication(AssociateRouteApplicationRequest.builder() .applicationId(applicationId) .routeId(routeId) .build()); } private static Mono<String> createApplicationId(CloudFoundryClient cloudFoundryClient, String spaceId, String applicationName, String stackId) { return requestCreateApplication(cloudFoundryClient, spaceId, applicationName, stackId) .map(ResourceUtils::getId); } private static Mono<String> createPrivateDomainId(CloudFoundryClient cloudFoundryClient, String name, String organizationId) { return requestCreatePrivateDomain(cloudFoundryClient, name, organizationId) .map(ResourceUtils::getId); } private static Mono<String> createRouteId(CloudFoundryClient cloudFoundryClient, String domainId, String spaceId) { return requestCreateRoute(cloudFoundryClient, domainId, spaceId) .map(ResourceUtils::getId); } private static Consumer<Tuple3<String, String, RouteEntity>> domainIdSpaceIdEquality() { return consumer((domainId, spaceId, entity) -> { assertThat(entity.getDomainId()).isEqualTo(domainId); assertThat(entity.getSpaceId()).isEqualTo(spaceId); }); } private static Mono<CreateApplicationResponse> requestCreateApplication(CloudFoundryClient cloudFoundryClient, String spaceId, String applicationName, String stackId) { return cloudFoundryClient.applicationsV2() .create(CreateApplicationRequest.builder() .diego(true) .name(applicationName) .spaceId(spaceId) .stackId(stackId) .build()); } private static Mono<CreatePrivateDomainResponse> requestCreatePrivateDomain(CloudFoundryClient cloudFoundryClient, String name, String organizationId) { return cloudFoundryClient.privateDomains() .create(CreatePrivateDomainRequest.builder() .name(name) .owningOrganizationId(organizationId) .build()); } private static Mono<CreateRouteResponse> requestCreateRoute(CloudFoundryClient cloudFoundryClient, String domainId, String spaceId) { return cloudFoundryClient.routes() .create(CreateRouteRequest.builder() .domainId(domainId) .spaceId(spaceId) .build()); } private static Mono<CreateRouteResponse> requestCreateRoute(CloudFoundryClient cloudFoundryClient, String domainId, String host, String spaceId) { return cloudFoundryClient.routes() .create(CreateRouteRequest.builder() .domainId(domainId) .host(host) .spaceId(spaceId) .build()); } private static Mono<CreateRouteMappingResponse> requestCreateRouteMapping(CloudFoundryClient cloudFoundryClient, String applicationId, String routeId) { return cloudFoundryClient.routeMappings() .create(CreateRouteMappingRequest.builder() .applicationId(applicationId) .routeId(routeId) .build()); } private static Mono<GetRouteResponse> requestGetRoute(CloudFoundryClient cloudFoundryClient, String routeId) { return cloudFoundryClient.routes() .get(GetRouteRequest.builder() .routeId(routeId) .build()); } }
// File generated from our OpenAPI spec package com.stripe.param; import com.google.gson.annotations.SerializedName; import com.stripe.net.ApiRequestParams; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import lombok.Getter; @Getter public class TransferCreateParams extends ApiRequestParams { /** A positive integer in %s representing how much to transfer. */ @SerializedName("amount") Long amount; /** 3-letter <a href="https://stripe.com/docs/payouts">ISO code for currency</a>. */ @SerializedName("currency") String currency; /** An arbitrary string attached to the object. Often useful for displaying to users. */ @SerializedName("description") String description; /** * The ID of a connected Stripe account. &lt;a * href=&quot;/docs/connect/charges-transfers&quot;&gt;See the Connect documentation&lt;/a&gt; for * details. */ @SerializedName("destination") String destination; /** Specifies which fields in the response should be expanded. */ @SerializedName("expand") List<String> expand; /** * Map of extra parameters for custom features not available in this client library. The content * in this map is not serialized under this field's {@code @SerializedName} value. Instead, each * key/value pair is serialized as if the key is a root-level field (serialized) name in this * param object. Effectively, this map is flattened to its parent instance. */ @SerializedName(ApiRequestParams.EXTRA_PARAMS_KEY) Map<String, Object> extraParams; /** * Set of <a href="https://stripe.com/docs/api/metadata">key-value pairs</a> that you can attach * to an object. This can be useful for storing additional information about the object in a * structured format. Individual keys can be unset by posting an empty value to them. All keys can * be unset by posting an empty value to {@code metadata}. */ @SerializedName("metadata") Map<String, String> metadata; /** * You can use this parameter to transfer funds from a charge before they are added to your * available balance. A pending balance will transfer immediately but the funds will not become * available until the original charge becomes available. <a * href="https://stripe.com/docs/connect/charges-transfers#transfer-availability">See the Connect * documentation</a> for details. */ @SerializedName("source_transaction") String sourceTransaction; /** * The source balance to use for this transfer. One of {@code bank_account}, {@code card}, or * {@code fpx}. For most users, this will default to {@code card}. */ @SerializedName("source_type") SourceType sourceType; /** * A string that identifies this transaction as part of a group. See the <a * href="https://stripe.com/docs/connect/charges-transfers#transfer-options">Connect * documentation</a> for details. */ @SerializedName("transfer_group") String transferGroup; private TransferCreateParams( Long amount, String currency, String description, String destination, List<String> expand, Map<String, Object> extraParams, Map<String, String> metadata, String sourceTransaction, SourceType sourceType, String transferGroup) { this.amount = amount; this.currency = currency; this.description = description; this.destination = destination; this.expand = expand; this.extraParams = extraParams; this.metadata = metadata; this.sourceTransaction = sourceTransaction; this.sourceType = sourceType; this.transferGroup = transferGroup; } public static Builder builder() { return new Builder(); } public static class Builder { private Long amount; private String currency; private String description; private String destination; private List<String> expand; private Map<String, Object> extraParams; private Map<String, String> metadata; private String sourceTransaction; private SourceType sourceType; private String transferGroup; /** Finalize and obtain parameter instance from this builder. */ public TransferCreateParams build() { return new TransferCreateParams( this.amount, this.currency, this.description, this.destination, this.expand, this.extraParams, this.metadata, this.sourceTransaction, this.sourceType, this.transferGroup); } /** A positive integer in %s representing how much to transfer. */ public Builder setAmount(Long amount) { this.amount = amount; return this; } /** 3-letter <a href="https://stripe.com/docs/payouts">ISO code for currency</a>. */ public Builder setCurrency(String currency) { this.currency = currency; return this; } /** An arbitrary string attached to the object. Often useful for displaying to users. */ public Builder setDescription(String description) { this.description = description; return this; } /** * The ID of a connected Stripe account. &lt;a * href=&quot;/docs/connect/charges-transfers&quot;&gt;See the Connect documentation&lt;/a&gt; * for details. */ public Builder setDestination(String destination) { this.destination = destination; return this; } /** * Add an element to `expand` list. A list is initialized for the first `add/addAll` call, and * subsequent calls adds additional elements to the original list. See {@link * TransferCreateParams#expand} for the field documentation. */ public Builder addExpand(String element) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.add(element); return this; } /** * Add all elements to `expand` list. A list is initialized for the first `add/addAll` call, and * subsequent calls adds additional elements to the original list. See {@link * TransferCreateParams#expand} for the field documentation. */ public Builder addAllExpand(List<String> elements) { if (this.expand == null) { this.expand = new ArrayList<>(); } this.expand.addAll(elements); return this; } /** * Add a key/value pair to `extraParams` map. A map is initialized for the first `put/putAll` * call, and subsequent calls add additional key/value pairs to the original map. See {@link * TransferCreateParams#extraParams} for the field documentation. */ public Builder putExtraParam(String key, Object value) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.put(key, value); return this; } /** * Add all map key/value pairs to `extraParams` map. A map is initialized for the first * `put/putAll` call, and subsequent calls add additional key/value pairs to the original map. * See {@link TransferCreateParams#extraParams} for the field documentation. */ public Builder putAllExtraParam(Map<String, Object> map) { if (this.extraParams == null) { this.extraParams = new HashMap<>(); } this.extraParams.putAll(map); return this; } /** * Add a key/value pair to `metadata` map. A map is initialized for the first `put/putAll` call, * and subsequent calls add additional key/value pairs to the original map. See {@link * TransferCreateParams#metadata} for the field documentation. */ public Builder putMetadata(String key, String value) { if (this.metadata == null) { this.metadata = new HashMap<>(); } this.metadata.put(key, value); return this; } /** * Add all map key/value pairs to `metadata` map. A map is initialized for the first * `put/putAll` call, and subsequent calls add additional key/value pairs to the original map. * See {@link TransferCreateParams#metadata} for the field documentation. */ public Builder putAllMetadata(Map<String, String> map) { if (this.metadata == null) { this.metadata = new HashMap<>(); } this.metadata.putAll(map); return this; } /** * You can use this parameter to transfer funds from a charge before they are added to your * available balance. A pending balance will transfer immediately but the funds will not become * available until the original charge becomes available. <a * href="https://stripe.com/docs/connect/charges-transfers#transfer-availability">See the * Connect documentation</a> for details. */ public Builder setSourceTransaction(String sourceTransaction) { this.sourceTransaction = sourceTransaction; return this; } /** * The source balance to use for this transfer. One of {@code bank_account}, {@code card}, or * {@code fpx}. For most users, this will default to {@code card}. */ public Builder setSourceType(SourceType sourceType) { this.sourceType = sourceType; return this; } /** * A string that identifies this transaction as part of a group. See the <a * href="https://stripe.com/docs/connect/charges-transfers#transfer-options">Connect * documentation</a> for details. */ public Builder setTransferGroup(String transferGroup) { this.transferGroup = transferGroup; return this; } } public enum SourceType implements ApiRequestParams.EnumParam { @SerializedName("bank_account") BANK_ACCOUNT("bank_account"), @SerializedName("card") CARD("card"), @SerializedName("fpx") FPX("fpx"); @Getter(onMethod_ = {@Override}) private final String value; SourceType(String value) { this.value = value; } } }
package org.wikipedia.server.mwapi; import org.wikipedia.page.Page; import org.wikipedia.page.PageProperties; import org.wikipedia.page.PageTitle; import org.wikipedia.page.Section; import org.wikipedia.server.PageLead; import org.wikipedia.server.PageLeadProperties; import org.wikipedia.util.log.L; import com.google.gson.JsonArray; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonParseException; import com.google.gson.annotations.Expose; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.VisibleForTesting; import java.lang.reflect.Type; import java.util.List; import static org.wikipedia.util.StringUtil.capitalizeFirstChar; /** * Gson POJO for loading the first stage of page content. */ public class MwPageLead implements PageLead { @Expose private MwServiceError error; @Expose private Mobileview mobileview; @Override public boolean hasError() { // if mobileview is not set something went terribly wrong return error != null || mobileview == null; } @Nullable public MwServiceError getError() { return error; } public void logError(String message) { if (error != null) { message += ": " + error.toString(); } L.e(message); } /** Note: before using this check that #getMobileview != null */ @Override public Page toPage(@NonNull PageTitle title) { return new Page(adjustPageTitle(title), mobileview.getSections(), mobileview.toPageProperties()); } private PageTitle adjustPageTitle(@NonNull PageTitle title) { if (mobileview.getRedirected() != null) { // Handle redirects properly. title = new PageTitle(mobileview.getRedirected(), title.getSite(), title.getThumbUrl()); } else if (mobileview.getNormalizedTitle() != null) { // We care about the normalized title only if we were not redirected title = new PageTitle(mobileview.getNormalizedTitle(), title.getSite(), title.getThumbUrl()); } title.setDescription(mobileview.getDescription()); return title; } public String getLeadSectionContent() { if (mobileview != null) { return mobileview.getSections().get(0).getContent(); } else { return ""; } } @VisibleForTesting public Mobileview getMobileview() { return mobileview; } /** * Almost everything is in this inner class. */ public static class Mobileview implements PageLeadProperties { @Expose private int id; @Expose private long revision; @Expose @Nullable private String lastmodified; @Expose @Nullable private String displaytitle; @Expose @Nullable private String redirected; @Expose @Nullable private String normalizedtitle; @Expose private int languagecount; @Expose private boolean editable; @Expose private boolean mainpage; @Expose private boolean disambiguation; @Expose @Nullable private String description; @Expose @Nullable private Image image; @Expose @Nullable private Thumb thumb; @Expose @Nullable private Protection protection; @Expose @Nullable private List<Section> sections; /** Converter */ public PageProperties toPageProperties() { return new PageProperties(this); } public int getId() { return id; } public long getRevision() { return revision; } @Nullable public String getLastModified() { return lastmodified; } public int getLanguageCount() { return languagecount; } @Nullable public String getDisplayTitle() { return displaytitle; } @Nullable public String getRedirected() { return redirected; } @Nullable public String getNormalizedTitle() { return normalizedtitle; } @Nullable public String getDescription() { return description != null ? capitalizeFirstChar(description) : null; } @Nullable public String getLeadImageUrl() { return thumb != null ? thumb.getUrl() : null; } @Nullable public String getLeadImageName() { return image != null ? image.getFile() : null; } @Nullable public String getFirstAllowedEditorRole() { return protection != null ? protection.getFirstAllowedEditorRole() : null; } public boolean isEditable() { return editable; } public boolean isMainPage() { return mainpage; } public boolean isDisambiguation() { return disambiguation; } @Nullable public List<Section> getSections() { return sections; } } /** * For the lead image File: page name */ public static class Image { @Expose private String file; public String getFile() { return file; } } /** * For the lead image URL */ public static class Thumb { @Expose private String url; public String getUrl() { return url; } } /** * Protection settings for this page */ public static class Protection { @Expose private String[] edit; public Protection() { this.edit = new String[]{}; } public Protection(String[] edit) { this.edit = edit; } // TODO should send them all, but callers need to be updated, too, (future patch) @Nullable public String getFirstAllowedEditorRole() { if (edit.length > 0) { return edit[0]; } return null; } /** * Need a custom Deserializer since the mediawiki API provides an inconsistent API. * Sometimes it returns an object, and other times when it's empty it returns an empty * array. See https://phabricator.wikimedia.org/T69054 */ public static class Deserializer implements JsonDeserializer<Protection> { /** * Gson invokes this call-back method during deserialization when it encounters a field * of the specified type. * <p>In the implementation of this call-back method, you should consider invoking * {@link JsonDeserializationContext#deserialize(JsonElement, Type)} method to create * objects for any non-trivial field of the returned object. However, you should never * invoke it on the the same type passing {@code json} since that will cause an infinite * loop (Gson will call your call-back method again). * * @param jsonEl The Json data being deserialized * @param typeOfT The type of the Object to deserialize to * @param jdc The deserialization context * @return a deserialized object of the specified type typeOfT which is * a subclass of {@code T} * @throws JsonParseException if json is not in the expected format of {@code typeofT} */ @Override public Protection deserialize(JsonElement jsonEl, Type typeOfT, JsonDeserializationContext jdc) throws JsonParseException { if (jsonEl.isJsonArray()) { JsonArray array = jsonEl.getAsJsonArray(); if (array.size() != 0) { L.w("Unexpected array size " + array.toString()); } } else { JsonElement editEl = jsonEl.getAsJsonObject().get("edit"); if (editEl != null) { JsonArray editorRolesJsonArray = editEl.getAsJsonArray(); String[] editorRoles = new String[editorRolesJsonArray.size()]; for (int i = 0; i < editorRolesJsonArray.size(); i++) { editorRoles[i] = editorRolesJsonArray.get(i).getAsString(); } return new Protection(editorRoles); } } return new Protection(); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import io.trino.operator.scalar.AbstractTestFunctions; import io.trino.spi.TrinoException; import io.trino.type.JoniRegexp; import io.trino.type.LikeFunctions; import org.testng.annotations.Test; import java.util.Optional; import static io.airlift.slice.Slices.utf8Slice; import static io.trino.spi.type.BooleanType.BOOLEAN; import static io.trino.type.LikeFunctions.isLikePattern; import static io.trino.type.LikeFunctions.likeChar; import static io.trino.type.LikeFunctions.likePattern; import static io.trino.type.LikeFunctions.likeVarchar; import static io.trino.type.LikeFunctions.patternConstantPrefixBytes; import static io.trino.type.LikeFunctions.unescapeLiteralLikePattern; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; public class TestLikeFunctions extends AbstractTestFunctions { private static Slice offsetHeapSlice(String value) { Slice source = Slices.utf8Slice(value); Slice result = Slices.allocate(source.length() + 5); result.setBytes(2, source); return result.slice(2, source.length()); } @Test public void testLikeBasic() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("f%b__")); assertTrue(likeVarchar(utf8Slice("foobar"), regex)); assertTrue(likeVarchar(offsetHeapSlice("foobar"), regex)); assertFunction("'foob' LIKE 'f%b__'", BOOLEAN, false); assertFunction("'foob' LIKE 'f%b'", BOOLEAN, true); } @Test public void testLikeChar() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("f%b__")); assertTrue(likeChar(6L, utf8Slice("foobar"), regex)); assertTrue(likeChar(6L, offsetHeapSlice("foobar"), regex)); assertTrue(likeChar(6L, utf8Slice("foob"), regex)); assertTrue(likeChar(6L, offsetHeapSlice("foob"), regex)); assertFalse(likeChar(7L, utf8Slice("foob"), regex)); assertFalse(likeChar(7L, offsetHeapSlice("foob"), regex)); assertFunction("cast('foob' as char(6)) LIKE 'f%b__'", BOOLEAN, true); assertFunction("cast('foob' as char(7)) LIKE 'f%b__'", BOOLEAN, false); } @Test public void testLikeSpacesInPattern() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("ala ")); assertTrue(likeVarchar(utf8Slice("ala "), regex)); assertFalse(likeVarchar(utf8Slice("ala"), regex)); regex = LikeFunctions.likePattern(5L, utf8Slice("ala")); assertTrue(likeVarchar(utf8Slice("ala "), regex)); assertFalse(likeVarchar(utf8Slice("ala"), regex)); } @Test public void testLikeNewlineInPattern() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("%o\nbar")); assertTrue(likeVarchar(utf8Slice("foo\nbar"), regex)); } @Test public void testLikeNewlineBeforeMatch() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("%b%")); assertTrue(likeVarchar(utf8Slice("foo\nbar"), regex)); } @Test public void testLikeNewlineInMatch() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("f%b%")); assertTrue(likeVarchar(utf8Slice("foo\nbar"), regex)); } @Test(timeOut = 1000) public void testLikeUtf8Pattern() { JoniRegexp regex = likePattern(utf8Slice("%\u540d\u8a89%"), utf8Slice("\\")); assertFalse(likeVarchar(utf8Slice("foo"), regex)); } @SuppressWarnings("NumericCastThatLosesPrecision") @Test(timeOut = 1000) public void testLikeInvalidUtf8Value() { Slice value = Slices.wrappedBuffer(new byte[] {'a', 'b', 'c', (byte) 0xFF, 'x', 'y'}); JoniRegexp regex = likePattern(utf8Slice("%b%"), utf8Slice("\\")); assertTrue(likeVarchar(value, regex)); } @Test public void testBackslashesNoSpecialTreatment() { JoniRegexp regex = LikeFunctions.compileLikePattern(utf8Slice("\\abc\\/\\\\")); assertTrue(likeVarchar(utf8Slice("\\abc\\/\\\\"), regex)); } @Test public void testSelfEscaping() { JoniRegexp regex = likePattern(utf8Slice("\\\\abc\\%"), utf8Slice("\\")); assertTrue(likeVarchar(utf8Slice("\\abc%"), regex)); } @Test public void testAlternateEscapedCharacters() { JoniRegexp regex = likePattern(utf8Slice("xxx%x_abcxx"), utf8Slice("x")); assertTrue(likeVarchar(utf8Slice("x%_abcx"), regex)); } @Test public void testInvalidLikePattern() { assertThatThrownBy(() -> likePattern(utf8Slice("#"), utf8Slice("#"))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> likePattern(utf8Slice("abc#abc"), utf8Slice("#"))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> likePattern(utf8Slice("abc#"), utf8Slice("#"))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); } @Test public void testIsLikePattern() { assertFalse(isLikePattern(utf8Slice("abc"), Optional.empty())); assertFalse(isLikePattern(utf8Slice("abc#_def"), Optional.of(utf8Slice("#")))); assertFalse(isLikePattern(utf8Slice("abc##def"), Optional.of(utf8Slice("#")))); assertFalse(isLikePattern(utf8Slice("abc#%def"), Optional.of(utf8Slice("#")))); assertTrue(isLikePattern(utf8Slice("abc%def"), Optional.empty())); assertTrue(isLikePattern(utf8Slice("abcdef_"), Optional.empty())); assertTrue(isLikePattern(utf8Slice("abcdef##_"), Optional.of(utf8Slice("#")))); assertTrue(isLikePattern(utf8Slice("%abcdef#_"), Optional.of(utf8Slice("#")))); assertThatThrownBy(() -> isLikePattern(utf8Slice("#"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> isLikePattern(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> isLikePattern(utf8Slice("abc#"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); } @Test public void testPatternConstantPrefixBytes() { assertEquals(patternConstantPrefixBytes(utf8Slice("abc"), Optional.empty()), 3); assertEquals(patternConstantPrefixBytes(utf8Slice("abc#_def"), Optional.of(utf8Slice("#"))), 8); assertEquals(patternConstantPrefixBytes(utf8Slice("abc##def"), Optional.of(utf8Slice("#"))), 8); assertEquals(patternConstantPrefixBytes(utf8Slice("abc#%def"), Optional.of(utf8Slice("#"))), 8); assertEquals(patternConstantPrefixBytes(utf8Slice("abc%def"), Optional.empty()), 3); assertEquals(patternConstantPrefixBytes(utf8Slice("abcdef_"), Optional.empty()), 6); assertEquals(patternConstantPrefixBytes(utf8Slice("abcdef##_"), Optional.of(utf8Slice("#"))), 8); assertEquals(patternConstantPrefixBytes(utf8Slice("%abcdef#_"), Optional.of(utf8Slice("#"))), 0); assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("#"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("abc#abc"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); assertThatThrownBy(() -> patternConstantPrefixBytes(utf8Slice("abc#"), Optional.of(utf8Slice("#")))) .isInstanceOf(TrinoException.class) .hasMessage("Escape character must be followed by '%', '_' or the escape character itself"); } @Test public void testUnescapeValidLikePattern() { assertEquals(unescapeLiteralLikePattern(utf8Slice("abc"), Optional.empty()), utf8Slice("abc")); assertEquals(unescapeLiteralLikePattern(utf8Slice("abc#_"), Optional.of(utf8Slice("#"))), utf8Slice("abc_")); assertEquals(unescapeLiteralLikePattern(utf8Slice("a##bc#_"), Optional.of(utf8Slice("#"))), utf8Slice("a#bc_")); assertEquals(unescapeLiteralLikePattern(utf8Slice("a###_bc"), Optional.of(utf8Slice("#"))), utf8Slice("a#_bc")); } }
/* * Copyright 2008-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sptools.api.endpoint.catalog; import org.broadleafcommerce.core.web.api.wrapper.CategoriesWrapper; import org.broadleafcommerce.core.web.api.wrapper.CategoryAttributeWrapper; import org.broadleafcommerce.core.web.api.wrapper.CategoryWrapper; import org.broadleafcommerce.core.web.api.wrapper.InventoryWrapper; import org.broadleafcommerce.core.web.api.wrapper.MediaWrapper; import org.broadleafcommerce.core.web.api.wrapper.ProductAttributeWrapper; import org.broadleafcommerce.core.web.api.wrapper.ProductWrapper; import org.broadleafcommerce.core.web.api.wrapper.RelatedProductWrapper; import org.broadleafcommerce.core.web.api.wrapper.SearchResultsWrapper; import org.broadleafcommerce.core.web.api.wrapper.SkuAttributeWrapper; import org.broadleafcommerce.core.web.api.wrapper.SkuWrapper; import org.springframework.http.MediaType; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.util.List; import javax.servlet.http.HttpServletRequest; /** * This is a reference REST API endpoint for catalog. This can be modified, used as is, or removed. * The purpose is to provide an out of the box RESTful catalog service implementation, but also * to allow the implementor to have fine control over the actual API, URIs, and general JAX-RS annotations. * * @author Kelly Tisdell * */ @RestController @RequestMapping(value = "/catalog/", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}, consumes = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) public class CatalogEndpoint extends org.broadleafcommerce.core.web.api.endpoint.catalog.CatalogEndpoint { @Override @RequestMapping(value = "product/{id}", method = RequestMethod.GET) public ProductWrapper findProductById(HttpServletRequest request, @PathVariable("id") Long id) { return super.findProductById(request, id); } @Override @RequestMapping(value = "search", method = RequestMethod.GET) public SearchResultsWrapper findSearchResultsByQuery(HttpServletRequest request, @RequestParam("q") String q, @RequestParam(value = "pageSize", defaultValue = "15") Integer pageSize, @RequestParam(value = "page", defaultValue = "1") Integer page) { return super.findSearchResultsByQuery(request, q, pageSize, page); } @Override @RequestMapping(value = "search/category/{categoryId}", method = RequestMethod.GET) public SearchResultsWrapper findSearchResultsByCategoryAndQuery(HttpServletRequest request, @PathVariable("categoryId") Long categoryId, @RequestParam("q") String q, @RequestParam(value = "pageSize", defaultValue = "15") Integer pageSize, @RequestParam(value = "page", defaultValue = "1") Integer page) { return super.findSearchResultsByCategoryAndQuery(request, categoryId, q, pageSize, page); } @Override @RequestMapping(value = "product/{id}/skus", method = RequestMethod.GET) public List<SkuWrapper> findSkusByProductById(HttpServletRequest request, @PathVariable("id") Long id) { return super.findSkusByProductById(request, id); } @Override @RequestMapping(value = "product/{id}/defaultSku", method = RequestMethod.GET) public SkuWrapper findDefaultSkuByProductId(HttpServletRequest request, @PathVariable("id") Long id) { return super.findDefaultSkuByProductId(request, id); } @Override @RequestMapping(value = "categories", method = RequestMethod.GET) public CategoriesWrapper findAllCategories(HttpServletRequest request, @RequestParam("name") String name, @RequestParam(value = "limit", defaultValue = "20") int limit, @RequestParam(value = "offset", defaultValue = "0") int offset) { return super.findAllCategories(request, name, limit, offset); } @Override @RequestMapping(value = "category/{id}/categories", method = RequestMethod.GET) public CategoriesWrapper findSubCategories(HttpServletRequest request, @PathVariable("id") Long id, @RequestParam(value = "limit", defaultValue = "20") int limit, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "active", defaultValue = "true") boolean active) { return super.findSubCategories(request, id, limit, offset, active); } @Override @RequestMapping(value = "category/{id}/activeSubcategories", method = RequestMethod.GET) public CategoriesWrapper findActiveSubCategories(HttpServletRequest request, @PathVariable("id") Long id, @RequestParam(value = "limit", defaultValue = "20") int limit, @RequestParam(value = "offset", defaultValue = "0") int offset) { return super.findActiveSubCategories(request, id, limit, offset); } @Override @RequestMapping(value = "category/{id}", method = RequestMethod.GET) public CategoryWrapper findCategoryById(HttpServletRequest request, @PathVariable("id") Long id, @RequestParam(value = "productLimit", defaultValue = "20") int productLimit, @RequestParam(value = "productOffset", defaultValue = "0") int productOffset, @RequestParam(value = "subcategoryLimit", defaultValue = "20") int subcategoryLimit, @RequestParam(value = "subcategoryOffset", defaultValue = "0") int subcategoryOffset) { return super.findCategoryById(request, id, productLimit, productOffset, subcategoryLimit, subcategoryOffset); } @Override @RequestMapping(value = "category", method = RequestMethod.GET) public CategoryWrapper findCategoryByIdOrName(HttpServletRequest request, @RequestParam("searchParameter") String searchParameter, @RequestParam(value = "productLimit", defaultValue = "20") int productLimit, @RequestParam(value = "productOffset", defaultValue = "0") int productOffset, @RequestParam(value = "subcategoryLimit", defaultValue = "20") int subcategoryLimit, @RequestParam(value = "subcategoryOffset", defaultValue = "0") int subcategoryOffset) { return super.findCategoryByIdOrName(request, searchParameter, productLimit, productOffset, subcategoryLimit, subcategoryOffset); } @Override @RequestMapping(value = "category/{id}/category-attributes", method = RequestMethod.GET) public List<CategoryAttributeWrapper> findCategoryAttributesForCategory(HttpServletRequest request, @PathVariable("id") Long id) { return super.findCategoryAttributesForCategory(request, id); } @Override @RequestMapping(value = "product/{id}/related-products/upsale", method = RequestMethod.GET) public List<RelatedProductWrapper> findUpSaleProductsByProduct(HttpServletRequest request, @PathVariable("id") Long id, @RequestParam(value = "limit", defaultValue = "20") int limit, @RequestParam(value = "offset", defaultValue = "0") int offset) { return super.findUpSaleProductsByProduct(request, id, limit, offset); } @Override @RequestMapping(value = "product/{id}/related-products/crosssale", method = RequestMethod.GET) public List<RelatedProductWrapper> findCrossSaleProductsByProduct(HttpServletRequest request, @PathVariable("id") Long id, @RequestParam(value = "limit", defaultValue = "20") int limit, @RequestParam(value = "offset", defaultValue = "0") int offset) { return super.findCrossSaleProductsByProduct(request, id, limit, offset); } @Override @RequestMapping(value = "product/{id}/product-attributes", method = RequestMethod.GET) public List<ProductAttributeWrapper> findProductAttributesForProduct(HttpServletRequest request, @PathVariable("id") Long id) { return super.findProductAttributesForProduct(request, id); } @Override @RequestMapping(value = "sku/{id}/sku-attributes", method = RequestMethod.GET) public List<SkuAttributeWrapper> findSkuAttributesForSku(HttpServletRequest request, @PathVariable("id") Long id) { return super.findSkuAttributesForSku(request, id); } @Override @RequestMapping(value = "sku/{id}/media", method = RequestMethod.GET) public List<MediaWrapper> findMediaForSku(HttpServletRequest request, @PathVariable("id") Long id) { return super.findMediaForSku(request, id); } @Override @RequestMapping(value = "sku/{id}", method = RequestMethod.GET) public SkuWrapper findSkuById(HttpServletRequest request, @PathVariable("id") Long id) { return super.findSkuById(request, id); } @Override @RequestMapping(value = "sku/inventory", method = RequestMethod.GET) public List<InventoryWrapper> findInventoryForSkus(HttpServletRequest request, @RequestParam("id") List<Long> ids) { return super.findInventoryForSkus(request, ids); } @Override @RequestMapping(value = "product/{id}/media", method = RequestMethod.GET) public List<MediaWrapper> findMediaForProduct(HttpServletRequest request, @PathVariable("id") Long id) { return super.findMediaForProduct(request, id); } @Override @RequestMapping(value = "category/{id}/media", method = RequestMethod.GET) public List<MediaWrapper> findMediaForCategory(HttpServletRequest request, @PathVariable("id") Long id) { return super.findMediaForCategory(request, id); } @Override @RequestMapping(value = "product/{id}/categories", method = RequestMethod.GET) public CategoriesWrapper findParentCategoriesForProduct(HttpServletRequest request, @PathVariable("id") Long id) { return super.findParentCategoriesForProduct(request, id); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui; import com.intellij.icons.AllIcons; import com.intellij.ui.ColorUtil; import com.intellij.ui.JBColor; import com.intellij.ui.ScreenUtil; import com.intellij.util.ui.GraphicsUtil; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.PopupMenuEvent; import javax.swing.event.PopupMenuListener; import javax.swing.plaf.basic.DefaultMenuLayout; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseWheelEvent; /** * @author ignatov */ public class JBPopupMenu extends JPopupMenu { private MyLayout myLayout; public JBPopupMenu() { this(null); } public JBPopupMenu(String label) { super(label); enableEvents(AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_WHEEL_EVENT_MASK); myLayout = new MyLayout(this); setLayout(myLayout); setLightWeightPopupEnabled(false); } @Override public void processMouseWheelEvent(MouseWheelEvent e) { if (!isShowing()) return; int rotation = e.getWheelRotation(); if (rotation == 0) return; if (e.getComponent() != this) { e = (MouseWheelEvent)SwingUtilities.convertMouseEvent(e.getComponent(), e, this); } Point p = e.getPoint(); SwingUtilities.convertPointToScreen(p, this); Point tPoint = getLocationOnScreen(); if (p.x >= tPoint.x && p.x <= tPoint.x + getWidth() && p.y >= tPoint.y && p.y <= tPoint.y + getHeight()) { myLayout.updateShift(rotation * 10); } } @Override public void setLayout(LayoutManager mgr) { if (!(mgr instanceof MyLayout)) return; super.setLayout(mgr); } @Override public void paint(Graphics g) { GraphicsUtil.setupAntialiasing(g); super.paint(g); LayoutManager layout = getLayout(); if (layout instanceof MyLayout) { ((MyLayout)layout).paintIfNeed(g); } } private static class MyLayout extends DefaultMenuLayout implements ActionListener { private JPopupMenu myTarget; int myShift = 0; int myScrollDirection = 0; Timer myTimer; public MyLayout(final JPopupMenu target) { super(target, BoxLayout.PAGE_AXIS); myTarget = target; myTimer = UIUtil.createNamedTimer("PopupTimer", 40, this); myTarget.addPopupMenuListener(new PopupMenuListener() { @Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) { switchTimer(true); } @Override public void popupMenuWillBecomeInvisible(PopupMenuEvent e) { switchTimer(false); JRootPane rootPane = SwingUtilities.getRootPane(target); if (rootPane != null) { rootPane.putClientProperty("apple.awt._windowFadeDelegate", null); } } @Override public void popupMenuCanceled(PopupMenuEvent e) { switchTimer(false); } }); if (myTarget.isVisible()) { switchTimer(true); } } private void switchTimer(boolean on) { if (on && !myTimer.isRunning()) { myTimer.start(); } if (!on && myTimer.isRunning()) { myTimer.stop(); } } @Override public void actionPerformed(ActionEvent e) { if (!myTarget.isShowing()) return; PointerInfo info = MouseInfo.getPointerInfo(); if (info == null) return; Point mouseLocation = info.getLocation(); Point targetLocation = myTarget.getLocationOnScreen(); if (mouseLocation.x < targetLocation.x || mouseLocation.x > targetLocation.x + myTarget.getWidth()) { return; } if (Math.abs(mouseLocation.y - targetLocation.y - getMaxHeight()) < 10) { myScrollDirection = 1; } else if (Math.abs(mouseLocation.y - targetLocation.y) < 10) { myScrollDirection = -1; } else { myScrollDirection = 0; } if (myScrollDirection == 0) { myTarget.revalidate(); myTarget.repaint(); return; } SwingUtilities.convertPointFromScreen(mouseLocation, myTarget); myTarget.dispatchEvent( new MouseEvent(myTarget, MouseEvent.MOUSE_ENTERED, System.currentTimeMillis(), 0, mouseLocation.x, mouseLocation.y, 0, false)); updateShift(5 * myScrollDirection); } private void updateShift(int increment) { int maxHeight = super.preferredLayoutSize(myTarget).height - getMaxHeight(); int newShift = Math.max(0, Math.min(maxHeight, myShift + increment)); if (newShift != myShift) { myShift = newShift; myTarget.revalidate(); myTarget.repaint(); Window w = UIUtil.getWindow(myTarget.getComponent()); if (w != null) { for (Window window : w.getOwnedWindows()) { window.dispose(); } } } } private Color[] dim = new Color[]{ JBColor.background(), ColorUtil.withAlpha(JBColor.background(), .9), ColorUtil.withAlpha(JBColor.background(), .8), ColorUtil.withAlpha(JBColor.background(), .7), ColorUtil.withAlpha(JBColor.background(), .6), ColorUtil.withAlpha(JBColor.background(), .5), ColorUtil.withAlpha(JBColor.background(), .4), ColorUtil.withAlpha(JBColor.background(), .3), ColorUtil.withAlpha(JBColor.background(), .2), ColorUtil.withAlpha(JBColor.background(), .1), }; public void paintIfNeed(Graphics g) { if (myShift > 0) { for (int i = 0; i < dim.length; i++) { g.setColor(dim[i]); g.drawLine(0, i, myTarget.getWidth(), i); } AllIcons.General.SplitUp.paintIcon(myTarget, g, myTarget.getWidth() / 2 - AllIcons.General.SplitUp.getIconWidth() / 2, 0); } if (super.preferredLayoutSize(myTarget).height - getMaxHeight() - myShift > 0) { for (int i = 0; i < dim.length; i++) { g.setColor(dim[i]); g.drawLine(0, myTarget.getHeight() - i, myTarget.getWidth(), myTarget.getHeight() - i); } AllIcons.General.SplitDown.paintIcon(myTarget, g, myTarget.getWidth() / 2 - AllIcons.General.SplitDown.getIconWidth() / 2, myTarget.getHeight() - AllIcons.General.SplitDown.getIconHeight()); } } @Override public void layoutContainer(Container target) { Insets insets = target.getInsets(); int width = target.getWidth() - insets.left - insets.right; Component[] components = target.getComponents(); int y = -myShift + insets.top; for (Component component : components) { int height = component.getPreferredSize().height; component.setBounds(insets.left, y, width, height); y += height; } } private int getMaxHeight() { GraphicsConfiguration configuration = myTarget.getGraphicsConfiguration(); if (configuration == null && myTarget.getInvoker() != null) { configuration = myTarget.getInvoker().getGraphicsConfiguration(); } if (configuration == null) return Short.MAX_VALUE; Rectangle screenRectangle = ScreenUtil.getScreenRectangle(configuration); return screenRectangle.height; } @NotNull @Override public Dimension preferredLayoutSize(Container target) { Dimension dimension = super.preferredLayoutSize(target); dimension.height = Math.min(getMaxHeight(), dimension.height); return dimension; } } }
/* * Extremely Compiler Collection * Copyright (c) 2015-2020, Jianping Zeng. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ package backend.analysis; import backend.codegen.MachineBasicBlock; import backend.codegen.MachineFunction; import gnu.trove.map.hash.TObjectIntHashMap; import tools.Util; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import static backend.support.DepthFirstOrder.dfTraversal; /** * @author Jianping Zeng * @version 0.4 */ public class MachineDomTreeInfoCooper implements IMachineDomTreeInfo { /** * An index array whose element's value is index of idom. * The index is number of Reverse PostOrder on CFG. */ private int[] doms; private ArrayList<MachineBasicBlock> reversePostOrder; private ArrayList<MachineBasicBlock> roots; private DomTreeNodeBase<MachineBasicBlock> rootNodes; private HashMap<MachineBasicBlock, DomTreeNodeBase<MachineBasicBlock>> bb2DomTreeNode; private MachineFunction fn; private TObjectIntHashMap<MachineBasicBlock> bb2Number; private static final int UNDEF = -1; @Override public void recalculate(MachineFunction f) { if (f == null) return; fn = f; MachineBasicBlock entryBB = f.getEntryBlock(); reversePostOrder = dfTraversal(entryBB); doms = new int[reversePostOrder.size()]; bb2Number = new TObjectIntHashMap<>(); bb2DomTreeNode = new HashMap<>(); int e = reversePostOrder.size(); for (int i = 0; i < e; i++) { MachineBasicBlock bb = reversePostOrder.get(i); bb2Number.put(bb, e - 1 - i); // initially setting the idom node as null bb2DomTreeNode.put(bb, new DomTreeNodeBase<>(bb, null)); } roots = new ArrayList<>(); roots.add(entryBB); // Step#1: initialize array doms with undefined value(-1) Arrays.fill(doms, UNDEF); // set the idom of start node as itself doms[e - 1] = e - 1; // Step#2: iterate until no changed on doms boolean changed = true; while (changed) { changed = false; int idx = e - 2; for (int i = 1; i < e; i++) { MachineBasicBlock bb = reversePostOrder.get(i); int numPreds = bb.getNumPredecessors(); if (numPreds <= 0) continue; int newIdom = bb2Number.get(bb.predAt(0)); for (int j = 1; j < numPreds; j++) { int pred = bb2Number.get(bb.predAt(j)); if (doms[pred] != UNDEF) newIdom = interset(pred, newIdom); } if (doms[idx] != newIdom) { doms[idx] = newIdom; changed = true; } --idx; } Util.assertion(idx == -1, "Remained unhandled MachineBasicBlock"); } // Step#3: create a dom tree createDomTree(); // For debug if (Util.DEBUG) dump(); } private int interset(int finger1, int finger2) { while (finger1 != finger2) { while (finger1 < finger2) finger1 = doms[finger1]; while (finger2 < finger1) finger2 = doms[finger2]; } return finger1; } private void createDomTree() { int e = doms.length; for (int i = 0; i < e; i++) { int idomIdx = doms[i]; // skip entry bb if (i == idomIdx) continue; MachineBasicBlock idomBB = reversePostOrder.get(e - 1 - idomIdx); bb2DomTreeNode.get(reversePostOrder.get(e - 1 - i)) .setIDom(bb2DomTreeNode.get(idomBB)); } rootNodes = bb2DomTreeNode.get(reversePostOrder.get(0)); } @Override public ArrayList<MachineBasicBlock> getRoots() { return roots; } @Override public DomTreeNodeBase<MachineBasicBlock> getRootNode() { return rootNodes; } @Override public DomTreeNodeBase<MachineBasicBlock> getTreeNodeForBlock(MachineBasicBlock bb) { return bb2DomTreeNode.get(bb); } @Override public boolean isPostDominators() { return false; } @Override public boolean dominates(DomTreeNodeBase<MachineBasicBlock> A, DomTreeNodeBase<MachineBasicBlock> B) { Util.assertion(A.getBlock().getParent() == fn && B.getBlock().getParent() == A.getBlock().getParent()); if (A == B) return true; while (B != A && B != null) { B = B.getIDom(); } return B != null; } @Override public boolean dominates(MachineBasicBlock mbb1, MachineBasicBlock mbb2) { Util.assertion(mbb1.getParent() == fn && mbb2.getParent() == mbb1.getParent()); if (mbb1.equals(mbb2)) return true; int indexA = bb2Number.get(mbb1); int indexB = bb2Number.get(mbb2); while (indexB != indexA && indexB != doms[indexB]) { indexB = doms[indexB]; } return indexB == indexA; } @Override public boolean strictDominate(DomTreeNodeBase<MachineBasicBlock> a, DomTreeNodeBase<MachineBasicBlock> b) { return dominates(a, b) && a != b; } @Override public boolean strictDominate(MachineBasicBlock a, MachineBasicBlock b) { return dominates(a, b) && a != b; } @Override public boolean isReachableFromEntry(MachineBasicBlock bb) { int idx = bb2Number.get(bb); return doms[idx] != UNDEF; } @Override public boolean isReachableFromEntry(DomTreeNodeBase<MachineBasicBlock> node) { return isReachableFromEntry(node.getBlock()); } @Override public MachineBasicBlock getIDom(MachineBasicBlock block) { int len = reversePostOrder.size() - 1; return reversePostOrder.get(len - doms[bb2Number.get(block)]); } @Override public MachineBasicBlock findNearestCommonDominator(MachineBasicBlock bb1, MachineBasicBlock bb2) { if (bb1 == null || bb2 == null) return null; if (bb1.getParent() != bb2.getParent() && bb1.getParent() == fn) return null; int idx1 = bb2Number.get(bb1); int idx2 = bb2Number.get(bb2); while (idx1 != idx2) { while (idx1 < idx2) idx1 = doms[idx1]; while (idx2 < idx1) idx2 = doms[idx2]; } return reversePostOrder.get(idx1); } @Override public void eraseNode(MachineBasicBlock bb) { Util.assertion(bb != null); int index = bb2Number.get(bb); boolean exist = false; for (int i : doms) { if (i == index) { exist = true; break; } } Util.assertion(!exist, "Can not remove non-leaf node"); doms[index] = UNDEF; bb2Number.remove(bb); bb2DomTreeNode.remove(bb); if (roots.contains(bb)) roots.remove(bb); if (rootNodes.getBlock().equals(bb)) rootNodes = null; } @Override public void splitBlock(MachineBasicBlock newBB) { int e = newBB.getNumSuccessors(); MachineBasicBlock succ = newBB.suxAt(0); Util.assertion(e == 1 && succ != null, "newBB must have a single successor"); ArrayList<MachineBasicBlock> preds = newBB.getPredecessors(); Util.assertion(!preds.isEmpty(), "No predecessors block!"); boolean newBBDominatesSucc = true; for (MachineBasicBlock p : succ.getPredecessors()) { if (p != newBB && !dominates(succ, p) && isReachableFromEntry(p)) { newBBDominatesSucc = false; break; } } // Find newBB's immediate dominator and create new dominator tree node // for newBB. MachineBasicBlock newBBIDom = null; int i = 0; for (; i < preds.size(); i++) { if (isReachableFromEntry(preds.get(i))) { newBBIDom = preds.get(i); break; } } // It's possible that none of the predecessors of NewBB are reachable; // in that case, NewBB itself is unreachable, so nothing needs to be // changed. if (newBBIDom == null) return; for (i += 1; i < preds.size(); i++) { if (isReachableFromEntry(preds.get(i))) newBBIDom = findNearestCommonDominator(newBB, preds.get(i)); } // Create a new dominator tree node, and set it as the idom of newBB. DomTreeNodeBase<MachineBasicBlock> newBBNode = addNewBlock(newBB, newBBIDom); // If newBB strictly dominates other blocks, then it is now the immediate // dominator of cucc. Update the dominator tree as appropriate. if (newBBDominatesSucc) { DomTreeNodeBase<MachineBasicBlock> newBBSuccNode = getTreeNodeForBlock(succ); changeIDom(newBBSuccNode, newBBNode); } } @Override public DomTreeNodeBase<MachineBasicBlock> addNewBlock(MachineBasicBlock bb, MachineBasicBlock idom) { Util.assertion(bb != null && idom != null && bb2Number.containsKey(idom)); int bbIndex = reversePostOrder.size() - reversePostOrder.indexOf(bb) - 1; int idomIndex = bb2Number.get(idom); doms[bbIndex] = idomIndex; bb2Number.put(bb, bbIndex); DomTreeNodeBase<MachineBasicBlock> domBB = new DomTreeNodeBase<>( bb, bb2DomTreeNode.get(idom)); bb2DomTreeNode.put(bb, domBB); return domBB; } @Override public void changeIDom(DomTreeNodeBase<MachineBasicBlock> oldIDom, DomTreeNodeBase<MachineBasicBlock> newIDom) { Util.assertion(bb2Number.containsKey(oldIDom.getBlock()) && bb2Number.containsKey(newIDom.getBlock())); int oldIdomIndex = bb2Number.get(oldIDom.getBlock()); int newIdomIndex = bb2Number.get(newIDom.getBlock()); for (int idx = 0; idx < doms.length; idx++) { if (doms[idx] == oldIdomIndex) doms[idx] = newIdomIndex; } for (DomTreeNodeBase<MachineBasicBlock> domBB : bb2DomTreeNode.values()) { if (domBB.getIDom().equals(oldIDom)) domBB.setIDom(newIDom); } } @Override public void changeIDom(MachineBasicBlock oldIDomBB, MachineBasicBlock newIDomBB) { Util.assertion(bb2Number.containsKey(oldIDomBB) && bb2Number.containsKey(newIDomBB)); int oldIdomIndex = bb2Number.get(oldIDomBB); int newIdomIndex = bb2Number.get(newIDomBB); for (int idx = 0; idx < doms.length; idx++) { if (doms[idx] == oldIdomIndex) doms[idx] = newIdomIndex; } for (MachineBasicBlock bb : bb2DomTreeNode.keySet()) { if (bb2DomTreeNode.get(bb).getIDom().getBlock().equals(oldIDomBB)) { bb2DomTreeNode.put(bb, bb2DomTreeNode.get(newIDomBB)); } } } public void dump() { for (MachineBasicBlock bb : reversePostOrder) { if (bb.getBasicBlock().hasName()) System.err.println("MBB_" + bb.getBasicBlock().getName()); else System.err.printf("MBB_0x%x ", bb.hashCode()); } System.err.println(); // Draw dot graph with graph-java for (int i = 0; i < doms.length; i++) { // the index of idom MachineBasicBlock src = reversePostOrder.get(i); if (src.getBasicBlock().hasName()) System.err.print(src.getBasicBlock().getName()); else System.err.printf("BB_0x%x", src.hashCode()); int idomIdx = doms[i]; if (idomIdx != i) { MachineBasicBlock dest = reversePostOrder.get(idomIdx); if (dest.getBasicBlock().hasName()) System.err.printf("--->BB_%s", dest.getBasicBlock().getName()); else System.err.printf("--->BB_0x%x", dest.hashCode()); } System.err.println(); } } }
/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.form.wicket.mapper.tree; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptReferenceHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.HiddenField; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.util.ListModel; import org.apache.wicket.request.resource.PackageResourceReference; import org.json.JSONObject; import org.opensingular.form.SInstance; import org.opensingular.form.converter.SInstanceConverter; import org.opensingular.form.provider.ProviderContext; import org.opensingular.form.provider.TreeProvider; import org.opensingular.form.view.SViewTree; import org.opensingular.form.wicket.WicketBuildContext; import org.opensingular.lib.commons.lambda.IConsumer; import org.opensingular.lib.commons.lambda.ISupplier; import org.opensingular.lib.commons.util.Loggable; import org.springframework.util.CollectionUtils; import java.io.Serializable; import java.util.*; import java.util.stream.Collectors; @SuppressWarnings("unchecked") public class SearchModalBodyTreePanel extends Panel implements Loggable { private static final String PANEL_SCRIPT = "SearchModalBodyTreePanel.js"; private final IModel<List<? extends TreeNode>> nodes = new ListModel(); private final IModel<String> nodeSelectedModel = new Model<>(); private final IModel<String> viewParams = new Model<>(); private final HiddenField<String> nodeSelected = new HiddenField<>("nodeSelected", nodeSelectedModel); private final Map<String, TreeNode> cache = new HashMap(); private final WicketBuildContext ctx; private final IConsumer<AjaxRequestTarget> selectCallback; private final IConsumer<AjaxRequestTarget> clearCallback; private final ISupplier<SViewTree> viewSupplier; SearchModalBodyTreePanel(String id, WicketBuildContext ctx, IConsumer<AjaxRequestTarget> selectCallback, IConsumer<AjaxRequestTarget> clearCallback) { super(id); this.ctx = ctx; this.viewSupplier = ctx.getViewSupplier(SViewTree.class); this.selectCallback = selectCallback; this.clearCallback = clearCallback; } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); final PackageResourceReference customJS = new PackageResourceReference(getClass(), PANEL_SCRIPT); response.render(JavaScriptReferenceHeaderItem.forReference(customJS)); response.render(OnDomReadyHeaderItem.forScript("treeView.create(" + viewParams.getObject() + ")")); } @Override protected void onInitialize() { super.onInitialize(); nodes.setObject(loadTree()); populateParamsTree(); Form<?> form = new Form<>("formHidden"); form.setOutputMarkupId(false); form.add(nodeSelected); add(buildSelectButton()); add(buildClearButton()); add(form); } private void populateParamsTree() { final SViewTree view = viewSupplier.get(); final JSONObject json = new JSONObject(); json.put("data", treeJson(nodes.getObject(), view.isOpen())); json.put("hidden", stringfyId(nodeSelected)); json.put("showOnlyMatches", view.isShowOnlyMatches()); json.put("showOnlyMatchesChildren", view.isShowOnlyMatchesChildren()); json.put("onlyLeafSelected", view.isSelectOnlyLeafs()); viewParams.setObject(json.toString()); } private Component buildClearButton() { return new AjaxButton("selectNode") { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { if (nodeSelectedModel.getObject() != null) { populateInstance(cache.get(nodeSelectedModel.getObject())); selectCallback.accept(target); } nodeSelectedModel.setObject(null); } }; } private AjaxButton buildSelectButton() { return new AjaxButton("clearNode") { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { clearCallback.accept(target); } }; } private List<? extends TreeNode> loadTree() { clearCache(); TreeProvider<Serializable> provider = getInstance().asAtrProvider().getTreeProvider(); List<Serializable> nodes = provider.load(ProviderContext.of(getInstance())); return nodes.stream().map(node -> new TreeNodeImpl(null, node, 0, getInstance().asAtrProvider().getIdFunction(), getInstance().asAtrProvider().getDisplayFunction(), provider::loadChildren)) .map(this::cacheId) .collect(Collectors.toList()); } private TreeNode cacheId(TreeNode treeNode) { cache.put(treeNode.getId().toString(), treeNode); if (treeNode.hasChildren()) { treeNode.getChildrens().forEach(c -> cacheId((TreeNode) c)); } return treeNode; } private void clearCache() { if (!CollectionUtils.isEmpty(cache)) { cache.clear(); } } private void populateInstance(TreeNode tree) { Optional<TreeNode> optional = Optional.of(tree); optional.ifPresent(treeNode -> { SInstanceConverter converter = getInstance().asAtrProvider().getConverter(); if (converter != null) { converter.fillInstance(getInstance(), treeNode.getValue()); } } ); } private SInstance getInstance() { return ctx.getModel().getObject(); } private JSONObject treeJson(TreeNode<? extends TreeNode> node, boolean open) { JSONObject json = new JSONObject(); json.put("id", node.getId()); json.put("text", node.getDisplayLabel()); json.put("state", stateShowTree(open)); if (node.isLeaf()) { json.put("type", "leaf"); } else { json.put("type", "open"); List<JSONObject> childs = childrenNodes(node, open); json.put("children", childs); } return json; } private List<JSONObject> childrenNodes(TreeNode<? extends TreeNode> node, boolean open) { List<JSONObject> childs = new ArrayList<>(); node.getChildrens().forEach(t -> childs.add(treeJson(t, open))); return childs; } private JSONObject stateShowTree(boolean open) { JSONObject opened = new JSONObject(); opened.put("opened", open); return opened; } private List<JSONObject> treeJson(List<? extends TreeNode> nodes, boolean open) { List<JSONObject> jsons = new ArrayList<>(nodes.size()); nodes.forEach(n -> jsons.add(treeJson(n, open))); return jsons; } private String stringfyId(Component c) { return "'" + c.getMarkupId(true) + "'"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.internal.util.concurrent; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; /** * This ReadWriteLock is useful when different threads need to lock * and unlock the read lock. This is <b>NOT</b> a reentrant lock. * * @author sbawaska */ public class SemaphoreReadWriteLock implements ReadWriteLock { private SemaphoreReadLock readLock; private SemaphoreWriteLock writeLock; public SemaphoreReadWriteLock() { Semaphore writerSemaphore = new Semaphore(1); Semaphore readerSemaphore = new Semaphore(1); readLock = new SemaphoreReadLock(readerSemaphore, writerSemaphore); writeLock = new SemaphoreWriteLock(writerSemaphore); } @Override public Lock readLock() { return readLock; } @Override public Lock writeLock() { return writeLock; } public static class SemaphoreReadLock implements Lock { private int numReaders = 0; private final Semaphore readerSemaphore; private final Semaphore writerSemaphore; public SemaphoreReadLock(Semaphore readerSemaphore, Semaphore writerSemaphore) { this.readerSemaphore = readerSemaphore; this.writerSemaphore = writerSemaphore; } @Override public void lock() { boolean interrupted = false; try { for (;;) { try { lockInterruptibly(); break; } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) Thread.currentThread().interrupt(); } } @Override public void lockInterruptibly() throws InterruptedException { readerSemaphore.acquire(); try { numReaders++; if (numReaders == 1) { writerSemaphore.acquire(); } } finally { // in case writeSemaphore.acquire throws Exception readerSemaphore.release(); } } @Override public boolean tryLock() { boolean interrupted = false; try { for (;;) { try { return tryLock(0, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) Thread.currentThread().interrupt(); } } @Override public boolean tryLock(long time, TimeUnit unit) throws InterruptedException { if (readerSemaphore.tryAcquire(time, unit)) { int oldNumReaders = numReaders; numReaders++; if (numReaders == 1) { if (writerSemaphore.tryAcquire(time, unit)) { readerSemaphore.release(); return true; } else { numReaders = oldNumReaders; readerSemaphore.release(); return false; } } else { readerSemaphore.release(); return true; } } return false; } @Override public void unlock() { for (;;) { boolean interrupted = false; try { readerSemaphore.acquire(); } catch (InterruptedException e) { interrupted = true; continue; } finally { if (interrupted) Thread.currentThread().interrupt(); } numReaders--; // The unlock method is forgiving if (numReaders <= 0) { numReaders = 0; if (writerSemaphore.availablePermits() == 0) { writerSemaphore.release(); } } readerSemaphore.release(); break; } } @Override public Condition newCondition() { throw new UnsupportedOperationException(); } } public static class SemaphoreWriteLock implements Lock { private final Semaphore writerSemaphore; public SemaphoreWriteLock(Semaphore writerSemaphore) { this.writerSemaphore = writerSemaphore; } @Override public void lock() { boolean interrupted = false; try { for(;;) { try { lockInterruptibly(); break; } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) Thread.currentThread().interrupt(); } } @Override public void lockInterruptibly() throws InterruptedException { writerSemaphore.acquire(); } @Override public boolean tryLock() { return writerSemaphore.tryAcquire(); } @Override public boolean tryLock(long time, TimeUnit unit) throws InterruptedException { return writerSemaphore.tryAcquire(time, unit); } @Override public void unlock() { writerSemaphore.release(); } @Override public Condition newCondition() { throw new UnsupportedOperationException(); } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.web.controls.resource; import com.google.common.collect.Lists; import com.tle.annotation.NonNullByDefault; import com.tle.beans.item.ItemId; import com.tle.beans.item.attachments.Attachment; import com.tle.beans.item.attachments.CustomAttachment; import com.tle.beans.item.attachments.IAttachment; import com.tle.common.Check; import com.tle.common.Pair; import com.tle.common.wizard.controls.resource.ResourceSettings; import com.tle.common.wizard.controls.resource.ResourceSettings.AllowedSelection; import com.tle.core.guice.Bind; import com.tle.core.i18n.BundleCache; import com.tle.core.item.service.ItemService; import com.tle.core.mimetypes.MimeTypeConstants; import com.tle.core.services.item.relation.RelationModify; import com.tle.core.services.item.relation.RelationOperationState; import com.tle.core.services.item.relation.RelationService; import com.tle.web.controls.universal.AbstractDetailsAttachmentHandler; import com.tle.web.controls.universal.AttachmentHandlerLabel; import com.tle.web.controls.universal.DialogRenderOptions; import com.tle.web.controls.universal.UniversalControlState; import com.tle.web.sections.SectionInfo; import com.tle.web.sections.SectionTree; import com.tle.web.sections.annotations.EventHandlerMethod; import com.tle.web.sections.equella.annotation.PlugKey; import com.tle.web.sections.events.RenderContext; import com.tle.web.sections.js.JSCallAndReference; import com.tle.web.sections.js.generic.function.PassThroughFunction; import com.tle.web.sections.render.Label; import com.tle.web.sections.render.SectionRenderable; import com.tle.web.sections.render.TextLabel; import com.tle.web.sections.result.util.BundleLabel; import com.tle.web.sections.standard.model.HtmlLinkState; import com.tle.web.sections.standard.renderers.ImageRenderer; import com.tle.web.sections.standard.renderers.LinkRenderer; import com.tle.web.selection.ParentFrameSelectionCallback; import com.tle.web.selection.SelectedResource; import com.tle.web.selection.SelectedResourceDetails; import com.tle.web.selection.SelectionService; import com.tle.web.selection.SelectionSession; import com.tle.web.selection.home.SelectionHomeSelectable; import com.tle.web.viewurl.AttachmentDetail; import com.tle.web.viewurl.ItemSectionInfo; import com.tle.web.viewurl.ViewableResource; import com.tle.web.viewurl.attachments.AttachmentResourceService; import com.tle.web.wizard.WizardState; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; /** @author Aaron */ @SuppressWarnings("nls") @Bind @NonNullByDefault public class ResourceHandler extends AbstractDetailsAttachmentHandler< ResourceHandler.ResourceHandlerModel, ResourceUniversalAttachment> { public static final String TYPE_RESOURCE = "resource"; public static final String DATA_TYPE = "type"; public static final String DATA_UUID = "uuid"; public static final String DATA_VERSION = "version"; @PlugKey("label.handler.name") private static Label LABEL_NAME; @PlugKey("label.handler.description") private static Label LABEL_DESCRIPTION; @PlugKey("label.title.edit") private static Label LABEL_TITLE_EDIT; @PlugKey("ressel.details.item.desc") private static Label ITEM_DESC; @PlugKey("ressel.details.item.viewlink") private static Label VIEW_LINK_LABEL; @Inject private SelectionService selectionService; @Inject private RelationService relationService; @Inject private SelectionHomeSelectable homeSelectable; @Inject private ItemService itemService; @Inject private BundleCache bundleCache; @Inject private AttachmentResourceService attachmentResourceService; private JSCallAndReference resultsCallback; private ResourceSettings resourceSettings; private String selectionsKey; @Override public String getHandlerId() { return "resourceHandler"; } @Override public AttachmentHandlerLabel getLabel() { return new AttachmentHandlerLabel(LABEL_NAME, LABEL_DESCRIPTION); } @Override protected SectionRenderable renderDetails( RenderContext context, DialogRenderOptions renderOptions) { // Common details ResourceHandlerModel model = getModel(context); final CustomAttachment attachment = (CustomAttachment) getDetailsAttachment(context); ItemSectionInfo itemInfo = context.getAttributeForClass(ItemSectionInfo.class); final ViewableResource viewable = attachmentResourceService.getViewableResource( context, itemInfo.getViewableItem(), attachment); List<AttachmentDetail> commonAttachmentDetails = viewable.getCommonAttachmentDetails(); addAttachmentDetails(context, commonAttachmentDetails); // Thumbnail (set thumb) ImageRenderer thumbRenderer = viewable.createStandardThumbnailRenderer(new TextLabel(attachment.getDescription())); model.setThumbnail(thumbRenderer.addClass("file-thumbnail")); // Additional details String type = (String) attachment.getData(DATA_TYPE); if (type.equals(Character.toString(SelectedResource.TYPE_PATH)) && attachment.getUrl().isEmpty()) { int version = (Integer) attachment.getData(DATA_VERSION); String uuid = (String) attachment.getData(DATA_UUID); if (version == 0) { version = itemService.getLiveItemVersion(uuid); } ItemId itemId = new ItemId(uuid, version); Map<String, Object> allInfo = itemService.getItemInfo(itemId); if (!Check.isEmpty(allInfo)) { BundleLabel desc = new BundleLabel(allInfo.get("description_id"), "", bundleCache); model.addSpecificDetail( "itemdesc", new Pair<Label, Object>(ITEM_DESC, desc)); // Description } } HtmlLinkState linkState = new HtmlLinkState(VIEW_LINK_LABEL, viewable.createCanonicalUrl()); linkState.setTarget(HtmlLinkState.TARGET_BLANK); model.setViewlink(new LinkRenderer(linkState)); return viewFactory.createResult("resource/resource-edit.ftl", this); } @Override protected SectionRenderable renderAdd(RenderContext context, DialogRenderOptions renderOptions) { SelectionSession session = new SelectionSession(new ParentFrameSelectionCallback(resultsCallback, false)); final AllowedSelection as = resourceSettings.getAllowedSelection(); session.setHomeSelectable("home"); session.setSelectAttachments(as.isAttachments()); session.setSelectItem(as.isItems()); // session.setSelectPackage(as.isPackages()); session.setSelectMultiple(isMultipleAllowed(context)); session.setSelectDraft(true); session.setSkipCheckoutPage(resourceSettings.isSkipCheckoutPage()); session.setAllCollections( !resourceSettings.isRestricted(ResourceSettings.KEY_RESTRICT_COLLECTIONS)); if (!session.isAllCollections()) { Set<String> collections = resourceSettings.getRestrictedTo(ResourceSettings.KEY_RESTRICT_COLLECTIONS); session.setCollectionUuids(collections); } session.setAllPowerSearches( !resourceSettings.isRestricted(ResourceSettings.KEY_RESTRICT_POWERSEARCHES)); if (!session.isAllPowerSearches()) { Set<String> powerSearches = resourceSettings.getRestrictedTo(ResourceSettings.KEY_RESTRICT_POWERSEARCHES); session.setPowerSearchIds(powerSearches); } session.setAllContributionCollections( !resourceSettings.isRestricted(ResourceSettings.KEY_RESTRICT_CONTRIBUTION)); if (!session.isAllContributionCollections()) { Set<String> contributeCol = resourceSettings.getRestrictedTo(ResourceSettings.KEY_RESTRICT_CONTRIBUTION); session.setContributionCollectionIds(contributeCol); } session.setAllDynamicCollections( !resourceSettings.isRestricted(ResourceSettings.KEY_RESTRICT_DYNACOLLECTION)); if (!session.isAllDynamicCollections()) { Set<String> contributeCol = resourceSettings.getRestrictedTo(ResourceSettings.KEY_RESTRICT_DYNACOLLECTION); session.setDynamicCollectionIds(contributeCol); } SectionInfo forward = homeSelectable.createSectionInfo(context, session); selectionService.setupSelectionSession(forward, session); getModel(context).setIntegrationUrl(forward.getPublicBookmark().getHref()); renderOptions.setFullscreen(true); return viewFactory.createResult("resource/resource-add.ftl", this); } @Override public Label getTitleLabel(RenderContext context, boolean editing) { return LABEL_TITLE_EDIT; } @Override public void onRegister(SectionTree tree, String parentId, UniversalControlState state) { super.onRegister(tree, parentId, state); resourceSettings = new ResourceSettings(state.getControlConfiguration()); selectionsKey = getSectionId() + ":" + getHandlerId(); } @Override public void registered(String id, SectionTree tree) { super.registered(id, tree); resultsCallback = new PassThroughFunction("r" + id, events.getSubmitValuesFunction("results")); } @Override public boolean supports(IAttachment attachment) { if (attachment instanceof CustomAttachment) { CustomAttachment custom = (CustomAttachment) attachment; return custom.getType().equals(TYPE_RESOURCE); } return false; } @Override public void remove(SectionInfo info, Attachment attachment, boolean willBeReplaced) { super.remove(info, attachment, willBeReplaced); RelationModify relOp = getRelationModifier(); relOp.getState().deleteByResourceId(attachment.getUuid()); } private void addRelation(SelectedResourceDetails resource, String resourceId) { String relationType = resourceSettings.getRelationType(); if (!Check.isEmpty(relationType)) { RelationModify relOp = getRelationModifier(); final RelationOperationState relationState = relOp.getState(); relationState.deleteByType(relationType); relationState.add( new ItemId(resource.getUuid(), resource.getVersion()), relationType, resourceId); } } @EventHandlerMethod public void results(SectionInfo info, List<SelectedResourceDetails> selectedResources) { dialogState.setAttribute(info, selectionsKey, selectedResources); if (selectedResources.size() == 0) { dialogState.cancel(info); } else { dialogState.save(info); } } @Override protected void addOrReplace( SectionInfo info, ResourceUniversalAttachment attachment, String replacementUuid) { super.addOrReplace(info, attachment, replacementUuid); SelectedResourceDetails selection = attachment.getSelection(); if (selection != null) { addRelation(selection, attachment.getAttachment().getUuid()); } } @Override protected ResourceUniversalAttachment createUniversalAttachmentForEdit( SectionInfo info, Attachment attachment) { return new ResourceUniversalAttachment(null, (CustomAttachment) attachment); } @Override protected List<ResourceUniversalAttachment> createUniversalAttachments(SectionInfo info) { List<ResourceUniversalAttachment> attachments = Lists.newArrayList(); List<SelectedResourceDetails> selectedResources = dialogState.getAttribute(info, selectionsKey); for (SelectedResourceDetails resource : selectedResources) { attachments.add(new ResourceUniversalAttachment(resource, makeAttachment(resource))); } return attachments; } private CustomAttachment makeAttachment(SelectedResourceDetails resource) { final char type = resource.getType(); final int version = Check.isEmpty(resourceSettings.getRelationType()) && resource.isLatest() ? 0 : resource.getVersion(); final String attachmentUuid = resource.getAttachmentUuid(); final CustomAttachment attachment = new CustomAttachment(); if (type == SelectedResource.TYPE_ATTACHMENT) { attachment.setUrl(attachmentUuid); } else { attachment.setUrl(resource.getUrl()); } attachment.setType(TYPE_RESOURCE); attachment.setDescription(resource.getTitle()); attachment.setData(DATA_UUID, resource.getUuid()); attachment.setData(DATA_VERSION, version); attachment.setData(DATA_TYPE, Character.toString(type)); return attachment; } private RelationModify getRelationModifier() { final WizardState state = dialogState.getRepository().getState(); RelationModify relOp = (RelationModify) state.getWizardSaveOperation(RelationModify.NAME); if (relOp == null) { final RelationOperationState relState = new RelationOperationState(); if (!state.isNewItem()) { relState.initForCurrent(relationService.getAllByFromItem(state.getItem())); } relOp = new RelationModify(relState); state.setWizardSaveOperation(RelationModify.NAME, relOp); } return relOp; } @Override public Object instantiateModel(SectionInfo info) { return new ResourceHandlerModel(); } public JSCallAndReference getResultsCallback() { return resultsCallback; } public static class ResourceHandlerModel extends AbstractDetailsAttachmentHandler.AbstractAttachmentHandlerModel { private String integrationUrl; public String getIntegrationUrl() { return integrationUrl; } public void setIntegrationUrl(String integrationUrl) { this.integrationUrl = integrationUrl; } } @Override protected boolean validateAddPage(SectionInfo info) { return true; } @Override public String getMimeType(SectionInfo info) { return MimeTypeConstants.MIME_ITEM; } @Override public boolean canRestrictAttachments() { return false; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.support.master; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; /** * A base class for operations that needs to be performed on the master node. */ public abstract class TransportMasterNodeOperationAction<Request extends MasterNodeOperationRequest, Response extends ActionResponse> extends TransportAction<Request, Response> { protected final TransportService transportService; protected final ClusterService clusterService; final String transportAction; final String executor; protected TransportMasterNodeOperationAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool) { super(settings, threadPool); this.transportService = transportService; this.clusterService = clusterService; this.transportAction = transportAction(); this.executor = executor(); transportService.registerHandler(transportAction, new TransportHandler()); } protected abstract String transportAction(); protected abstract String executor(); protected abstract Request newRequest(); protected abstract Response newResponse(); protected abstract void masterOperation(Request request, ClusterState state, ActionListener<Response> listener) throws ElasticsearchException; protected boolean localExecute(Request request) { return false; } protected ClusterBlockException checkBlock(Request request, ClusterState state) { return null; } protected void processBeforeDelegationToMaster(Request request, ClusterState state) { } @Override public void execute(Request request, ActionListener<Response> listener) { // since the callback is async, we typically can get called from within an event in the cluster service // or something similar, so make sure we are threaded so we won't block it. request.listenerThreaded(true); super.execute(request, listener); } @Override protected void doExecute(final Request request, final ActionListener<Response> listener) { innerExecute(request, listener, false); } private void innerExecute(final Request request, final ActionListener<Response> listener, final boolean retrying) { final ClusterState clusterState = clusterService.state(); final DiscoveryNodes nodes = clusterState.nodes(); if (nodes.localNodeMaster() || localExecute(request)) { // check for block, if blocked, retry, else, execute locally final ClusterBlockException blockException = checkBlock(request, clusterState); if (blockException != null) { if (!blockException.retryable()) { listener.onFailure(blockException); return; } clusterService.add(request.masterNodeTimeout(), new TimeoutClusterStateListener() { @Override public void postAdded() { ClusterBlockException blockException = checkBlock(request, clusterService.state()); if (blockException == null || !blockException.retryable()) { clusterService.remove(this); innerExecute(request, listener, false); } } @Override public void onClose() { clusterService.remove(this); listener.onFailure(blockException); } @Override public void onTimeout(TimeValue timeout) { clusterService.remove(this); listener.onFailure(blockException); } @Override public void clusterChanged(ClusterChangedEvent event) { ClusterBlockException blockException = checkBlock(request, event.state()); if (blockException == null || !blockException.retryable()) { clusterService.remove(this); innerExecute(request, listener, false); } } }); } else { try { threadPool.executor(executor).execute(new Runnable() { @Override public void run() { try { masterOperation(request, clusterService.state(), listener); } catch (Throwable e) { listener.onFailure(e); } } }); } catch (Throwable t) { listener.onFailure(t); } } } else { if (nodes.masterNode() == null) { if (retrying) { listener.onFailure(new MasterNotDiscoveredException()); } else { clusterService.add(request.masterNodeTimeout(), new TimeoutClusterStateListener() { @Override public void postAdded() { ClusterState clusterStateV2 = clusterService.state(); if (clusterStateV2.nodes().masterNodeId() != null) { // now we have a master, try and execute it... clusterService.remove(this); innerExecute(request, listener, true); } } @Override public void onClose() { clusterService.remove(this); listener.onFailure(new NodeClosedException(clusterService.localNode())); } @Override public void onTimeout(TimeValue timeout) { clusterService.remove(this); listener.onFailure(new MasterNotDiscoveredException("waited for [" + timeout + "]")); } @Override public void clusterChanged(ClusterChangedEvent event) { if (event.nodesDelta().masterNodeChanged()) { clusterService.remove(this); innerExecute(request, listener, true); } } }); } return; } processBeforeDelegationToMaster(request, clusterState); transportService.sendRequest(nodes.masterNode(), transportAction, request, new BaseTransportResponseHandler<Response>() { @Override public Response newInstance() { return newResponse(); } @Override public void handleResponse(Response response) { listener.onResponse(response); } @Override public String executor() { return ThreadPool.Names.SAME; } @Override public void handleException(final TransportException exp) { if (exp.unwrapCause() instanceof ConnectTransportException) { // we want to retry here a bit to see if a new master is elected clusterService.add(request.masterNodeTimeout(), new TimeoutClusterStateListener() { @Override public void postAdded() { ClusterState clusterStateV2 = clusterService.state(); if (!clusterState.nodes().masterNodeId().equals(clusterStateV2.nodes().masterNodeId())) { // master changes while adding the listener, try here clusterService.remove(this); innerExecute(request, listener, false); } } @Override public void onClose() { clusterService.remove(this); listener.onFailure(new NodeClosedException(clusterService.localNode())); } @Override public void onTimeout(TimeValue timeout) { clusterService.remove(this); listener.onFailure(new MasterNotDiscoveredException()); } @Override public void clusterChanged(ClusterChangedEvent event) { if (event.nodesDelta().masterNodeChanged()) { clusterService.remove(this); innerExecute(request, listener, false); } } }); } else { listener.onFailure(exp); } } }); } } private class TransportHandler extends BaseTransportRequestHandler<Request> { @Override public Request newInstance() { return newRequest(); } @Override public String executor() { return ThreadPool.Names.SAME; } @Override public void messageReceived(final Request request, final TransportChannel channel) throws Exception { // we just send back a response, no need to fork a listener request.listenerThreaded(false); execute(request, new ActionListener<Response>() { @Override public void onResponse(Response response) { try { channel.sendResponse(response); } catch (Throwable e) { onFailure(e); } } @Override public void onFailure(Throwable e) { try { channel.sendResponse(e); } catch (Exception e1) { logger.warn("Failed to send response", e1); } } }); } } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.impl.protocol.jabber; import java.util.*; import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*; import net.java.sip.communicator.service.protocol.*; import net.java.sip.communicator.service.protocol.event.*; import net.java.sip.communicator.service.protocol.media.*; import net.java.sip.communicator.util.*; import org.jivesoftware.smack.*; import org.jivesoftware.smack.filter.*; import org.jivesoftware.smack.packet.*; /** * Implements <tt>OperationSetVideoBridge</tt> for Jabber. * * @author Yana Stamcheva * @author Lyubomir Marinov */ public class OperationSetVideoBridgeImpl implements OperationSetVideoBridge, PacketFilter, PacketListener, RegistrationStateChangeListener { /** * The <tt>Logger</tt> used by the <tt>OperationSetVideoBridgeImpl</tt> * class and its instances for logging output. */ private static final Logger logger = Logger.getLogger(OperationSetVideoBridgeImpl.class); /** * The <tt>ProtocolProviderService</tt> implementation which initialized * this instance, owns it and is often referred to as its parent. */ private final ProtocolProviderServiceJabberImpl protocolProvider; /** * Creates an instance of <tt>OperationSetVideoBridgeImpl</tt> by * specifying the parent <tt>ProtocolProviderService</tt> announcing this * operation set. * * @param protocolProvider the parent Jabber protocol provider */ public OperationSetVideoBridgeImpl( ProtocolProviderServiceJabberImpl protocolProvider) { this.protocolProvider = protocolProvider; this.protocolProvider.addRegistrationStateChangeListener(this); } /** * Implements {@link PacketFilter}. Determines whether this instance is * interested in a specific {@link Packet}. * <tt>OperationSetVideoBridgeImpl</tt> returns <tt>true</tt> if the * specified <tt>packet</tt> is a {@link ColibriConferenceIQ}; otherwise, * <tt>false</tt>. * * @param packet the <tt>Packet</tt> to be determined whether this instance * is interested in it * @return <tt>true</tt> if the specified <tt>packet</tt> is a * <tt>ColibriConferenceIQ</tt>; otherwise, <tt>false</tt> */ public boolean accept(Packet packet) { return (packet instanceof ColibriConferenceIQ); } /** * Creates a conference call with the specified callees as call peers via a * video bridge provided by the parent Jabber provider. * * @param callees the list of addresses that we should call * @return the newly created conference call containing all CallPeers * @throws OperationFailedException if establishing the conference call * fails * @throws OperationNotSupportedException if the provider does not have any * conferencing features. */ public Call createConfCall(String[] callees) throws OperationFailedException, OperationNotSupportedException { return protocolProvider .getOperationSet(OperationSetTelephonyConferencing.class) .createConfCall( callees, new MediaAwareCallConference(true)); } /** * Invites the callee represented by the specified uri to an already * existing call using a video bridge provided by the parent Jabber provider. * The difference between this method and createConfCall is that * inviteCalleeToCall allows a user to add new peers to an already * established conference. * * @param uri the callee to invite to an existing conf call. * @param call the call that we should invite the callee to. * @return the CallPeer object corresponding to the callee represented by * the specified uri. * @throws OperationFailedException if inviting the specified callee to the * specified call fails * @throws OperationNotSupportedException if allowing additional callees to * a pre-established call is not supported. */ public CallPeer inviteCalleeToCall(String uri, Call call) throws OperationFailedException, OperationNotSupportedException { return protocolProvider .getOperationSet(OperationSetTelephonyConferencing.class) .inviteCalleeToCall(uri, call); } /** * Indicates if there's an active video bridge available at this moment. The * Jabber provider may announce support for video bridge, but it should not * be used for calling until it becomes actually active. * * @return <tt>true</tt> to indicate that there's currently an active * available video bridge, <tt>false</tt> - otherwise */ public boolean isActive() { String jitsiVideobridge = protocolProvider.getJitsiVideobridge(); return ((jitsiVideobridge != null) && (jitsiVideobridge.length() > 0)); } /** * Notifies this instance that a specific <tt>ColibriConferenceIQ</tt> has * been received. * * @param conferenceIQ the <tt>ColibriConferenceIQ</tt> which has been * received */ private void processColibriConferenceIQ(ColibriConferenceIQ conferenceIQ) { /* * The application is not a Jitsi Videobridge server, it is a client. * Consequently, the specified ColibriConferenceIQ is sent to it in * relation to the part of the application's functionality which makes * requests to a Jitsi Videobridge server i.e. CallJabberImpl. * * Additionally, the method processColibriConferenceIQ is presently tasked * with processing ColibriConferenceIQ requests only. They are SET IQs * sent by the Jitsi Videobridge server to notify the application about * updates in the states of (colibri) conferences organized by the * application. */ if (IQ.Type.SET.equals(conferenceIQ.getType()) && conferenceIQ.getID() != null) { OperationSetBasicTelephony<?> basicTelephony = protocolProvider.getOperationSet( OperationSetBasicTelephony.class); if (basicTelephony != null) { Iterator<? extends Call> i = basicTelephony.getActiveCalls(); while (i.hasNext()) { Call call = i.next(); if (call instanceof CallJabberImpl) { CallJabberImpl callJabberImpl = (CallJabberImpl) call; MediaAwareCallConference conference = callJabberImpl.getConference(); if ((conference != null) && conference.isJitsiVideobridge()) { /* * TODO We may want to disallow rogue CallJabberImpl * instances which may throw an exception to prevent * the conferenceIQ from reaching the CallJabberImpl * instance which it was meant for. */ if (callJabberImpl.processColibriConferenceIQ( conferenceIQ)) break; } } } } } } /** * Implements {@link PacketListener}. Notifies this instance that a specific * {@link Packet} (which this instance has already expressed interest into * by returning <tt>true</tt> from {@link #accept(Packet)}) has been * received. * * @param packet the <tt>Packet</tt> which has been received and which this * instance is given a chance to process */ public void processPacket(Packet packet) { /* * As we do elsewhere, acknowledge the receipt of the Packet first and * then go about our business with it. */ IQ iq = (IQ) packet; if (iq.getType() == IQ.Type.SET) protocolProvider.getConnection().sendPacket(IQ.createResultIQ(iq)); /* * Now that the acknowledging is out of the way, do go about our * business with the Packet. */ ColibriConferenceIQ conferenceIQ = (ColibriConferenceIQ) iq; boolean interrupted = false; try { processColibriConferenceIQ(conferenceIQ); } catch (Throwable t) { logger.error( "An error occurred during the processing of a " + packet.getClass().getName() + " packet", t); if (t instanceof InterruptedException) { /* * We cleared the interrupted state of the current Thread by * catching the InterruptedException. However, we do not really * care whether the current Thread has been interrupted - we * caught the InterruptedException because we want to swallow * any Throwable. Consequently, we should better restore the * interrupted state. */ interrupted = true; } else if (t instanceof ThreadDeath) throw (ThreadDeath) t; } if (interrupted) Thread.currentThread().interrupt(); } /** * {@inheritDoc} * * Implements {@link RegistrationStateChangeListener}. Notifies this * instance that there has been a change in the <tt>RegistrationState</tt> * of {@link #protocolProvider}. Subscribes this instance to * {@link ColibriConferenceIQ}s as soon as <tt>protocolProvider</tt> is * registered and unsubscribes it as soon as <tt>protocolProvider</tt> is * unregistered. */ public void registrationStateChanged(RegistrationStateChangeEvent ev) { RegistrationState registrationState = ev.getNewState(); if (RegistrationState.REGISTERED.equals(registrationState)) { protocolProvider.getConnection().addPacketListener(this, this); } else if (RegistrationState.UNREGISTERED.equals(registrationState)) { Connection connection = protocolProvider.getConnection(); if (connection != null) connection.removePacketListener(this); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ws.security.processor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.ws.security.WSConstants; import org.apache.ws.security.WSDataRef; import org.apache.ws.security.WSDocInfo; import org.apache.ws.security.WSPasswordCallback; import org.apache.ws.security.WSSConfig; import org.apache.ws.security.WSSecurityEngine; import org.apache.ws.security.WSSecurityEngineResult; import org.apache.ws.security.WSSecurityException; import org.apache.ws.security.components.crypto.Crypto; import org.apache.ws.security.message.token.SecurityTokenReference; import org.apache.ws.security.message.token.X509Security; import org.apache.ws.security.saml.SAML2KeyInfo; import org.apache.ws.security.saml.SAML2Util; import org.apache.ws.security.saml.SAMLKeyInfo; import org.apache.ws.security.saml.SAMLUtil; import org.apache.ws.security.util.Base64; import org.apache.ws.security.util.WSSecurityUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.SecretKey; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.UnsupportedCallbackException; import javax.xml.namespace.QName; import java.io.IOException; import java.security.PrivateKey; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Vector; public class EncryptedKeyProcessor implements Processor { private static Log log = LogFactory.getLog(EncryptedKeyProcessor.class.getName()); private static Log tlog = LogFactory.getLog("org.apache.ws.security.TIME"); private byte[] encryptedEphemeralKey; private byte[] decryptedBytes = null; private String encryptedKeyId = null; private X509Certificate cert = null; private String encryptedKeyTransportMethod = null; private WSDocInfo docInfo = null; public void handleToken( Element elem, Crypto crypto, Crypto decCrypto, CallbackHandler cb, WSDocInfo wsDocInfo, Vector returnResults, WSSConfig wsc ) throws WSSecurityException { if (log.isDebugEnabled()) { log.debug("Found encrypted key element"); } if (decCrypto == null) { throw new WSSecurityException(WSSecurityException.FAILURE, "noDecCryptoFile"); } if (cb == null) { throw new WSSecurityException(WSSecurityException.FAILURE, "noCallback"); } docInfo = wsDocInfo; ArrayList dataRefUris = handleEncryptedKey((Element) elem, cb, decCrypto); encryptedKeyId = elem.getAttributeNS(null, "Id"); WSSecurityEngineResult result = new WSSecurityEngineResult( WSConstants.ENCR, this.decryptedBytes, this.encryptedEphemeralKey, this.encryptedKeyId, dataRefUris, cert); result.put(WSSecurityEngineResult.TAG_ENCRYPTED_KEY_TRANSPORT_METHOD, this.encryptedKeyTransportMethod); returnResults.add( 0, result ); } public ArrayList handleEncryptedKey( Element xencEncryptedKey, CallbackHandler cb, Crypto crypto ) throws WSSecurityException { return handleEncryptedKey(xencEncryptedKey, cb, crypto, null); } public ArrayList handleEncryptedKey( Element xencEncryptedKey, PrivateKey privatekey ) throws WSSecurityException { return handleEncryptedKey(xencEncryptedKey, null, null, privatekey); } public ArrayList handleEncryptedKey( Element xencEncryptedKey, CallbackHandler cb, Crypto crypto, PrivateKey privateKey ) throws WSSecurityException { long t0 = 0, t1 = 0, t2 = 0; if (tlog.isDebugEnabled()) { t0 = System.currentTimeMillis(); } // need to have it to find the encrypted data elements in the envelope Document doc = xencEncryptedKey.getOwnerDocument(); // lookup xenc:EncryptionMethod, get the Algorithm attribute to determine // how the key was encrypted. Then check if we support the algorithm Node tmpE = null; // short living Element used for lookups only tmpE = (Element) WSSecurityUtil.getDirectChild( (Node) xencEncryptedKey, "EncryptionMethod", WSConstants.ENC_NS ); if (tmpE != null) { this.encryptedKeyTransportMethod = ((Element) tmpE).getAttribute("Algorithm"); } if (this.encryptedKeyTransportMethod == null) { throw new WSSecurityException(WSSecurityException.UNSUPPORTED_ALGORITHM, "noEncAlgo"); } Cipher cipher = WSSecurityUtil.getCipherInstance(this.encryptedKeyTransportMethod); // // Well, we can decrypt the session (symmetric) key. Now lookup CipherValue, this is the // value of the encrypted session key (session key usually is a symmetrical key that encrypts // the referenced content). This is a 2-step lookup // Element xencCipherValue = null; tmpE = (Element) WSSecurityUtil.getDirectChild( (Node) xencEncryptedKey, "CipherData", WSConstants.ENC_NS ); if (tmpE != null) { xencCipherValue = (Element) WSSecurityUtil.getDirectChild(tmpE, "CipherValue", WSConstants.ENC_NS); } if (xencCipherValue == null) { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "noCipher"); } if (privateKey == null) { Element keyInfo = (Element) WSSecurityUtil.getDirectChild( (Node) xencEncryptedKey, "KeyInfo", WSConstants.SIG_NS ); String alias; if (keyInfo != null) { Element secRefToken = (Element) WSSecurityUtil.getDirectChild( keyInfo, "SecurityTokenReference", WSConstants.WSSE_NS ); // // EncryptedKey must a a STR as child of KeyInfo, KeyName // valid only for EncryptedData // // if (secRefToken == null) { // secRefToken = (Element) WSSecurityUtil.getDirectChild(keyInfo, // "KeyName", WSConstants.SIG_NS); // } if (secRefToken == null) { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "noSecTokRef" ); } SecurityTokenReference secRef = new SecurityTokenReference(secRefToken); // // Well, at this point there are several ways to get the key. // Try to handle all of them :-). // alias = null; // // handle X509IssuerSerial here. First check if all elements are available, // get the appropriate data, check if all data is available. // If all is ok up to that point, look up the certificate alias according // to issuer name and serial number. // This method is recommended by OASIS WS-S specification, X509 profile // if (secRef.containsX509Data() || secRef.containsX509IssuerSerial()) { alias = secRef.getX509IssuerSerialAlias(crypto); if (log.isDebugEnabled()) { log.debug("X509IssuerSerial alias: " + alias); } } // // If wsse:KeyIdentifier found, then the public key of the attached cert was used to // encrypt the session (symmetric) key that encrypts the data. Extract the certificate // using the BinarySecurity token (was enhanced to handle KeyIdentifier too). // This method is _not_ recommended by OASIS WS-S specification, X509 profile // else if (secRef.containsKeyIdentifier()) { X509Certificate[] certs = null; if (WSConstants.WSS_SAML_KI_VALUE_TYPE.equals(secRef .getKeyIdentifierValueType())) { Element token = secRef.getKeyIdentifierTokenElement( doc, docInfo, cb); if (crypto == null) { throw new WSSecurityException( WSSecurityException.FAILURE, "noSigCryptoFile"); } SAMLKeyInfo samlKi = SAMLUtil.getSAMLKeyInfo(token, crypto, cb); certs = samlKi.getCerts(); } else if (WSConstants.WSS_SAML2_KI_VALUE_TYPE .equals(secRef.getKeyIdentifierValueType())) { Element token = secRef.getKeyIdentifierTokenElement( doc, docInfo, cb); if (crypto == null) { throw new WSSecurityException(0, "noSigCryptoFile"); } SAML2KeyInfo samlKi = SAML2Util.getSAML2KeyInfo(token, crypto, cb); certs = samlKi.getCerts(); } else { certs = secRef.getKeyIdentifier(crypto); } if (certs == null || certs.length < 1 || certs[0] == null) { throw new WSSecurityException( WSSecurityException.FAILURE, "noCertsFound", new Object[] {"decryption (KeyId)"} ); } // // Here we have the certificate. Now find the alias for it. Needed to identify // the private key associated with this certificate // alias = crypto.getAliasForX509Cert(certs[0]); cert = certs[0]; if (log.isDebugEnabled()) { log.debug("cert: " + certs[0]); log.debug("KeyIdentifier Alias: " + alias); } } else if (secRef.containsReference()) { Element bstElement = secRef.getTokenElement(doc, null, cb); // at this point ... check token type: Binary QName el = new QName(bstElement.getNamespaceURI(), bstElement.getLocalName()); if (el.equals(WSSecurityEngine.binaryToken)) { X509Security token = new X509Security(bstElement); String value = bstElement.getAttribute(WSSecurityEngine.VALUE_TYPE); if (!X509Security.X509_V3_TYPE.equals(value) || (token == null)) { throw new WSSecurityException( WSSecurityException.UNSUPPORTED_SECURITY_TOKEN, "unsupportedBinaryTokenType", new Object[] {"for decryption (BST)"} ); } cert = token.getX509Certificate(crypto); if (cert == null) { throw new WSSecurityException( WSSecurityException.FAILURE, "noCertsFound", new Object[] {"decryption"} ); } // // Here we have the certificate. Now find the alias for it. Needed to identify // the private key associated with this certificate // alias = crypto.getAliasForX509Cert(cert); if (log.isDebugEnabled()) { log.debug("BST Alias: " + alias); } } else { throw new WSSecurityException( WSSecurityException.UNSUPPORTED_SECURITY_TOKEN, "unsupportedBinaryTokenType", null ); } // // The following code is somewhat strange: the called crypto method gets // the keyname and searches for a certificate with an issuer's name that is // equal to this keyname. No serialnumber is used - IMHO this does // not identifies a certificate. In addition neither the WSS4J encryption // nor signature methods use this way to identify a certificate. Because of that // the next lines of code are disabled. // // } else if (secRef.containsKeyName()) { // alias = crypto.getAliasForX509Cert(secRef.getKeyNameValue()); // if (log.isDebugEnabled()) { // log.debug("KeyName alias: " + alias); // } } else { throw new WSSecurityException( WSSecurityException.INVALID_SECURITY, "unsupportedKeyId" ); } } else if (crypto.getDefaultX509Alias() != null) { alias = crypto.getDefaultX509Alias(); } else { throw new WSSecurityException(WSSecurityException.INVALID_SECURITY, "noKeyinfo"); } // // At this point we have all information necessary to decrypt the session // key: // - the Cipher object intialized with the correct methods // - The data that holds the encrypted session key // - the alias name for the private key // // Now use the callback here to get password that enables // us to read the private key // WSPasswordCallback pwCb = new WSPasswordCallback(alias, WSPasswordCallback.DECRYPT); try { Callback[] callbacks = new Callback[]{pwCb}; cb.handle(callbacks); } catch (IOException e) { throw new WSSecurityException( WSSecurityException.FAILURE, "noPassword", new Object[]{alias}, e ); } catch (UnsupportedCallbackException e) { throw new WSSecurityException( WSSecurityException.FAILURE, "noPassword", new Object[]{alias}, e ); } String password = pwCb.getPassword(); if (password == null) { throw new WSSecurityException( WSSecurityException.FAILURE, "noPassword", new Object[]{alias} ); } try { privateKey = crypto.getPrivateKey(alias, password); } catch (Exception e) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, e); } } try { cipher.init(Cipher.DECRYPT_MODE, privateKey); } catch (Exception e1) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, e1); } try { encryptedEphemeralKey = getDecodedBase64EncodedData(xencCipherValue); decryptedBytes = cipher.doFinal(encryptedEphemeralKey); } catch (IllegalStateException e2) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, e2); } catch (IllegalBlockSizeException e2) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, e2); } catch (BadPaddingException e2) { throw new WSSecurityException(WSSecurityException.FAILED_CHECK, null, null, e2); } if (tlog.isDebugEnabled()) { t1 = System.currentTimeMillis(); } // At this point we have the decrypted session (symmetric) key. According // to W3C XML-Enc this key is used to decrypt _any_ references contained in // the reference list // Now lookup the references that are encrypted with this key // Element refList = (Element) WSSecurityUtil.getDirectChild( (Node) xencEncryptedKey, "ReferenceList", WSConstants.ENC_NS ); ArrayList dataRefs = new ArrayList(); if (refList != null) { for (tmpE = refList.getFirstChild(); tmpE != null; tmpE = tmpE.getNextSibling() ) { if (tmpE.getNodeType() != Node.ELEMENT_NODE) { continue; } if (!tmpE.getNamespaceURI().equals(WSConstants.ENC_NS)) { continue; } if (tmpE.getLocalName().equals("DataReference")) { String dataRefURI = ((Element) tmpE).getAttribute("URI"); if (dataRefURI.charAt(0) == '#') { dataRefURI = dataRefURI.substring(1); } WSDataRef dataRef = decryptDataRef(doc, dataRefURI, decryptedBytes); dataRefs.add(dataRef); } } return dataRefs; } if (tlog.isDebugEnabled()) { t2 = System.currentTimeMillis(); tlog.debug( "XMLDecrypt: total= " + (t2 - t0) + ", get-sym-key= " + (t1 - t0) + ", decrypt= " + (t2 - t1) ); } return null; } /** * Method getDecodedBase64EncodedData * * @param element * @return a byte array containing the decoded data * @throws WSSecurityException */ public static byte[] getDecodedBase64EncodedData(Element element) throws WSSecurityException { StringBuffer sb = new StringBuffer(); NodeList children = element.getChildNodes(); int iMax = children.getLength(); for (int i = 0; i < iMax; i++) { Node curr = children.item(i); if (curr.getNodeType() == Node.TEXT_NODE) { sb.append(((Text) curr).getData()); } } String encodedData = sb.toString(); return Base64.decode(encodedData); } /** * Decrypt an EncryptedData element referenced by dataRefURI */ private WSDataRef decryptDataRef( Document doc, String dataRefURI, byte[] decryptedData ) throws WSSecurityException { if (log.isDebugEnabled()) { log.debug("found data reference: " + dataRefURI); } // // Find the encrypted data element referenced by dataRefURI // Element encryptedDataElement = ReferenceListProcessor.findEncryptedDataElement(doc, dataRefURI); // // Prepare the SecretKey object to decrypt EncryptedData // String symEncAlgo = X509Util.getEncAlgo(encryptedDataElement); SecretKey symmetricKey = WSSecurityUtil.prepareSecretKey(symEncAlgo, decryptedData); return ReferenceListProcessor.decryptEncryptedData( doc, dataRefURI, encryptedDataElement, symmetricKey, symEncAlgo ); } /** * Get the Id of the encrypted key element. * * @return The Id string */ public String getId() { return encryptedKeyId; } /** * Get the decrypted key. * * The encrypted key element contains an encrypted session key. The * security functions use the session key to encrypt contents of the message * with symmetrical encryption methods. * * @return The decrypted key. */ public byte[] getDecryptedBytes() { return decryptedBytes; } public byte[] getEncryptedEphemeralKey() { return encryptedEphemeralKey; } }
package com.dailystudio.simplenoterx.ui; import android.content.Context; import android.content.Intent; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import com.dailystudio.app.ui.AbsArrayItemViewHolder; import com.dailystudio.app.utils.ActivityLauncher; import com.dailystudio.simplenoterx.Constants; import com.dailystudio.simplenoterx.R; import com.dailystudio.simplenoterx.activity.EditNoteActivity; import com.dailystudio.simplenoterx.databaseobject.NoteObject; import com.hwangjr.rxbus.RxBus; import java.util.ArrayList; import java.util.List; /** * Created by nanye on 17/2/10. */ public class NotesAdapter extends RecyclerView.Adapter<NoteViewHolder> { private List<NoteObject> mNotes; private Context mContext; private boolean mInEditMode = false; public NotesAdapter(Context context) { mContext = context.getApplicationContext(); mNotes = new ArrayList<>(); } public void setNotes(List<NoteObject> notes) { mNotes.clear(); if (notes != null && notes.size() > 0) { mNotes.addAll(notes); } notifyDataSetChanged(); } public NoteObject getNoteAtPosition(int pos) { if (pos < 0 || pos >= mNotes.size()) { return null; } return mNotes.get(pos); } @Override public NoteViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(mContext).inflate( R.layout.layout_note, null); return new NoteViewHolder(view); } @Override public void onBindViewHolder(NoteViewHolder holder, int position) { NoteObject noteObject = getNoteAtPosition(position); if (holder != null) { if (holder.itemView != null) { holder.itemView.setTag(new Integer(position)); holder.itemView.setOnClickListener(mItemClickListener); holder.itemView.setOnLongClickListener(mItemLongClickListener); } holder.bindNote(noteObject); } } @Override public int getItemCount() { return mNotes.size(); } private View.OnClickListener mItemClickListener = new View.OnClickListener() { @Override public void onClick(View v) { if (v == null) { return; } final Context context = v.getContext(); if (context == null) { return; } Object o = v.getTag(); if (o instanceof Integer == false) { return; } int position = (Integer)o; NoteObject noteObject = getNoteAtPosition(position); if (mInEditMode) { noteObject.setSelected(!noteObject.isSelected()); notifyItemChanged(position); } else { Intent i = new Intent(); i.setClass(context.getApplicationContext(), EditNoteActivity.class); i.putExtra(Constants.EXTRA_NOTE_ID, noteObject.getId()); ActivityLauncher.launchActivity(context, i); } } }; private View.OnLongClickListener mItemLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { if (v == null) { return false; } final Context context = v.getContext(); if (context == null) { return false; } Object o = v.getTag(); if (o instanceof Integer == false) { return false; } int position = (Integer)o; NoteObject noteObject = getNoteAtPosition(position); if (!mInEditMode) { noteObject.setSelected(true); notifyItemChanged(position); } RxBus.get().post(mInEditMode ? Constants.EditModeEvent.EVENT_LEAVE : Constants.EditModeEvent.EVENT_ENTER); return true; } }; public List<NoteObject> getNotes() { return getNotes(false); } public List<NoteObject> getNotes(boolean onlySelected) { List<NoteObject> notes = new ArrayList<>(); for (NoteObject noteObject: mNotes) { if (onlySelected && !noteObject.isSelected()) { continue; } notes.add(noteObject); } return notes; } public ArrayList<Integer> getNoteIds() { return getNoteIds(false); } public ArrayList<Integer> getNoteIds(boolean onlySelected) { ArrayList<Integer> noteIds = new ArrayList<>(); for (NoteObject noteObject: mNotes) { if (onlySelected && !noteObject.isSelected()) { continue; } noteIds.add(noteObject.getId()); } return noteIds; } public void setEditMode(boolean enabled) { mInEditMode = enabled; final List<NoteObject> notes = getNotes(); if (notes != null && !enabled) { for (NoteObject noteObject: notes) { noteObject.setSelected(false); } } notifyDataSetChanged(); } }
/* * Copyright (c) 1998, 2010, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.util; import java.lang.ref.WeakReference; import java.lang.ref.ReferenceQueue; /** * Hash table based implementation of the <tt>Map</tt> interface, with * <em>weak keys</em>. * An entry in a <tt>WeakHashMap</tt> will automatically be removed when * its key is no longer in ordinary use. More precisely, the presence of a * mapping for a given key will not prevent the key from being discarded by the * garbage collector, that is, made finalizable, finalized, and then reclaimed. * When a key has been discarded its entry is effectively removed from the map, * so this class behaves somewhat differently from other <tt>Map</tt> * implementations. * * <p> Both null values and the null key are supported. This class has * performance characteristics similar to those of the <tt>HashMap</tt> * class, and has the same efficiency parameters of <em>initial capacity</em> * and <em>load factor</em>. * * <p> Like most collection classes, this class is not synchronized. * A synchronized <tt>WeakHashMap</tt> may be constructed using the * {@link Collections#synchronizedMap Collections.synchronizedMap} * method. * * <p> This class is intended primarily for use with key objects whose * <tt>equals</tt> methods test for object identity using the * <tt>==</tt> operator. Once such a key is discarded it can never be * recreated, so it is impossible to do a lookup of that key in a * <tt>WeakHashMap</tt> at some later time and be surprised that its entry * has been removed. This class will work perfectly well with key objects * whose <tt>equals</tt> methods are not based upon object identity, such * as <tt>String</tt> instances. With such recreatable key objects, * however, the automatic removal of <tt>WeakHashMap</tt> entries whose * keys have been discarded may prove to be confusing. * * <p> The behavior of the <tt>WeakHashMap</tt> class depends in part upon * the actions of the garbage collector, so several familiar (though not * required) <tt>Map</tt> invariants do not hold for this class. Because * the garbage collector may discard keys at any time, a * <tt>WeakHashMap</tt> may behave as though an unknown thread is silently * removing entries. In particular, even if you synchronize on a * <tt>WeakHashMap</tt> instance and invoke none of its mutator methods, it * is possible for the <tt>size</tt> method to return smaller values over * time, for the <tt>isEmpty</tt> method to return <tt>false</tt> and * then <tt>true</tt>, for the <tt>containsKey</tt> method to return * <tt>true</tt> and later <tt>false</tt> for a given key, for the * <tt>get</tt> method to return a value for a given key but later return * <tt>null</tt>, for the <tt>put</tt> method to return * <tt>null</tt> and the <tt>remove</tt> method to return * <tt>false</tt> for a key that previously appeared to be in the map, and * for successive examinations of the key set, the value collection, and * the entry set to yield successively smaller numbers of elements. * * <p> Each key object in a <tt>WeakHashMap</tt> is stored indirectly as * the referent of a weak reference. Therefore a key will automatically be * removed only after the weak references to it, both inside and outside of the * map, have been cleared by the garbage collector. * * <p> <strong>Implementation note:</strong> The value objects in a * <tt>WeakHashMap</tt> are held by ordinary strong references. Thus care * should be taken to ensure that value objects do not strongly refer to their * own keys, either directly or indirectly, since that will prevent the keys * from being discarded. Note that a value object may refer indirectly to its * key via the <tt>WeakHashMap</tt> itself; that is, a value object may * strongly refer to some other key object whose associated value object, in * turn, strongly refers to the key of the first value object. If the values * in the map do not rely on the map holding strong references to them, one way * to deal with this is to wrap values themselves within * <tt>WeakReferences</tt> before * inserting, as in: <tt>m.put(key, new WeakReference(value))</tt>, * and then unwrapping upon each <tt>get</tt>. * * <p>The iterators returned by the <tt>iterator</tt> method of the collections * returned by all of this class's "collection view methods" are * <i>fail-fast</i>: if the map is structurally modified at any time after the * iterator is created, in any way except through the iterator's own * <tt>remove</tt> method, the iterator will throw a {@link * ConcurrentModificationException}. Thus, in the face of concurrent * modification, the iterator fails quickly and cleanly, rather than risking * arbitrary, non-deterministic behavior at an undetermined time in the future. * * <p>Note that the fail-fast behavior of an iterator cannot be guaranteed * as it is, generally speaking, impossible to make any hard guarantees in the * presence of unsynchronized concurrent modification. Fail-fast iterators * throw <tt>ConcurrentModificationException</tt> on a best-effort basis. * Therefore, it would be wrong to write a program that depended on this * exception for its correctness: <i>the fail-fast behavior of iterators * should be used only to detect bugs.</i> * * <p>This class is a member of the * <a href="{@docRoot}/../technotes/guides/collections/index.html"> * Java Collections Framework</a>. * * @param <K> the type of keys maintained by this map * @param <V> the type of mapped values * * @author Doug Lea * @author Josh Bloch * @author Mark Reinhold * @since 1.2 * @see java.util.HashMap * @see java.lang.ref.WeakReference */ public class WeakHashMap<K,V> extends AbstractMap<K,V> implements Map<K,V> { /** * The default initial capacity -- MUST be a power of two. */ private static final int DEFAULT_INITIAL_CAPACITY = 16; /** * The maximum capacity, used if a higher value is implicitly specified * by either of the constructors with arguments. * MUST be a power of two <= 1<<30. */ private static final int MAXIMUM_CAPACITY = 1 << 30; /** * The load factor used when none specified in constructor. */ private static final float DEFAULT_LOAD_FACTOR = 0.75f; /** * The table, resized as necessary. Length MUST Always be a power of two. */ Entry<K,V>[] table; /** * The number of key-value mappings contained in this weak hash map. */ private int size; /** * The next size value at which to resize (capacity * load factor). */ private int threshold; /** * The load factor for the hash table. */ private final float loadFactor; /** * Reference queue for cleared WeakEntries */ private final ReferenceQueue<Object> queue = new ReferenceQueue<>(); /** * The number of times this WeakHashMap has been structurally modified. * Structural modifications are those that change the number of * mappings in the map or otherwise modify its internal structure * (e.g., rehash). This field is used to make iterators on * Collection-views of the map fail-fast. * * @see ConcurrentModificationException */ int modCount; /** * The default threshold of map capacity above which alternative hashing is * used for String keys. Alternative hashing reduces the incidence of * collisions due to weak hash code calculation for String keys. * <p/> * This value may be overridden by defining the system property * {@code jdk.map.althashing.threshold}. A property value of {@code 1} * forces alternative hashing to be used at all times whereas * {@code -1} value ensures that alternative hashing is never used. */ static final int ALTERNATIVE_HASHING_THRESHOLD_DEFAULT = Integer.MAX_VALUE; /** * holds values which can't be initialized until after VM is booted. */ private static class Holder { /** * Table capacity above which to switch to use alternative hashing. */ static final int ALTERNATIVE_HASHING_THRESHOLD; static { String altThreshold = java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction( "jdk.map.althashing.threshold")); int threshold; try { threshold = (null != altThreshold) ? Integer.parseInt(altThreshold) : ALTERNATIVE_HASHING_THRESHOLD_DEFAULT; // disable alternative hashing if -1 if (threshold == -1) { threshold = Integer.MAX_VALUE; } if (threshold < 0) { throw new IllegalArgumentException("value must be positive integer."); } } catch(IllegalArgumentException failed) { throw new Error("Illegal value for 'jdk.map.althashing.threshold'", failed); } ALTERNATIVE_HASHING_THRESHOLD = threshold; } } /** * If {@code true} then perform alternate hashing to reduce the incidence of * collisions due to weak hash code calculation. */ transient boolean useAltHashing; /** * A randomizing value associated with this instance that is applied to * hash code of keys to make hash collisions harder to find. */ transient final int hashSeed = sun.misc.Hashing.randomHashSeed(this); @SuppressWarnings("unchecked") private Entry<K,V>[] newTable(int n) { return (Entry<K,V>[]) new Entry[n]; } /** * Constructs a new, empty <tt>WeakHashMap</tt> with the given initial * capacity and the given load factor. * * @param initialCapacity The initial capacity of the <tt>WeakHashMap</tt> * @param loadFactor The load factor of the <tt>WeakHashMap</tt> * @throws IllegalArgumentException if the initial capacity is negative, * or if the load factor is nonpositive. */ public WeakHashMap(int initialCapacity, float loadFactor) { if (initialCapacity < 0) throw new IllegalArgumentException("Illegal Initial Capacity: "+ initialCapacity); if (initialCapacity > MAXIMUM_CAPACITY) initialCapacity = MAXIMUM_CAPACITY; if (loadFactor <= 0 || Float.isNaN(loadFactor)) throw new IllegalArgumentException("Illegal Load factor: "+ loadFactor); int capacity = 1; while (capacity < initialCapacity) capacity <<= 1; table = newTable(capacity); this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); useAltHashing = sun.misc.VM.isBooted() && (capacity >= Holder.ALTERNATIVE_HASHING_THRESHOLD); } /** * Constructs a new, empty <tt>WeakHashMap</tt> with the given initial * capacity and the default load factor (0.75). * * @param initialCapacity The initial capacity of the <tt>WeakHashMap</tt> * @throws IllegalArgumentException if the initial capacity is negative */ public WeakHashMap(int initialCapacity) { this(initialCapacity, DEFAULT_LOAD_FACTOR); } /** * Constructs a new, empty <tt>WeakHashMap</tt> with the default initial * capacity (16) and load factor (0.75). */ public WeakHashMap() { this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR); } /** * Constructs a new <tt>WeakHashMap</tt> with the same mappings as the * specified map. The <tt>WeakHashMap</tt> is created with the default * load factor (0.75) and an initial capacity sufficient to hold the * mappings in the specified map. * * @param m the map whose mappings are to be placed in this map * @throws NullPointerException if the specified map is null * @since 1.3 */ public WeakHashMap(Map<? extends K, ? extends V> m) { this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1, DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR); putAll(m); } // internal utilities /** * Value representing null keys inside tables. */ private static final Object NULL_KEY = new Object(); /** * Use NULL_KEY for key if it is null. */ private static Object maskNull(Object key) { return (key == null) ? NULL_KEY : key; } /** * Returns internal representation of null key back to caller as null. */ static Object unmaskNull(Object key) { return (key == NULL_KEY) ? null : key; } /** * Checks for equality of non-null reference x and possibly-null y. By * default uses Object.equals. */ private static boolean eq(Object x, Object y) { return x == y || x.equals(y); } /** * Retrieve object hash code and applies a supplemental hash function to the * result hash, which defends against poor quality hash functions. This is * critical because HashMap uses power-of-two length hash tables, that * otherwise encounter collisions for hashCodes that do not differ * in lower bits. */ int hash(Object k) { int h; if (useAltHashing) { h = hashSeed; if (k instanceof String) { return sun.misc.Hashing.stringHash32((String) k); } else { h ^= k.hashCode(); } } else { h = k.hashCode(); } // This function ensures that hashCodes that differ only by // constant multiples at each bit position have a bounded // number of collisions (approximately 8 at default load factor). h ^= (h >>> 20) ^ (h >>> 12); return h ^ (h >>> 7) ^ (h >>> 4); } /** * Returns index for hash code h. */ private static int indexFor(int h, int length) { return h & (length-1); } /** * Expunges stale entries from the table. */ private void expungeStaleEntries() { for (Object x; (x = queue.poll()) != null; ) { synchronized (queue) { @SuppressWarnings("unchecked") Entry<K,V> e = (Entry<K,V>) x; int i = indexFor(e.hash, table.length); Entry<K,V> prev = table[i]; Entry<K,V> p = prev; while (p != null) { Entry<K,V> next = p.next; if (p == e) { if (prev == e) table[i] = next; else prev.next = next; // Must not null out e.next; // stale entries may be in use by a HashIterator e.value = null; // Help GC size--; break; } prev = p; p = next; } } } } /** * Returns the table after first expunging stale entries. */ private Entry<K,V>[] getTable() { expungeStaleEntries(); return table; } /** * Returns the number of key-value mappings in this map. * This result is a snapshot, and may not reflect unprocessed * entries that will be removed before next attempted access * because they are no longer referenced. */ public int size() { if (size == 0) return 0; expungeStaleEntries(); return size; } /** * Returns <tt>true</tt> if this map contains no key-value mappings. * This result is a snapshot, and may not reflect unprocessed * entries that will be removed before next attempted access * because they are no longer referenced. */ public boolean isEmpty() { return size() == 0; } /** * Returns the value to which the specified key is mapped, * or {@code null} if this map contains no mapping for the key. * * <p>More formally, if this map contains a mapping from a key * {@code k} to a value {@code v} such that {@code (key==null ? k==null : * key.equals(k))}, then this method returns {@code v}; otherwise * it returns {@code null}. (There can be at most one such mapping.) * * <p>A return value of {@code null} does not <i>necessarily</i> * indicate that the map contains no mapping for the key; it's also * possible that the map explicitly maps the key to {@code null}. * The {@link #containsKey containsKey} operation may be used to * distinguish these two cases. * * @see #put(Object, Object) */ public V get(Object key) { Object k = maskNull(key); int h = hash(k); Entry<K,V>[] tab = getTable(); int index = indexFor(h, tab.length); Entry<K,V> e = tab[index]; while (e != null) { if (e.hash == h && eq(k, e.get())) return e.value; e = e.next; } return null; } /** * Returns <tt>true</tt> if this map contains a mapping for the * specified key. * * @param key The key whose presence in this map is to be tested * @return <tt>true</tt> if there is a mapping for <tt>key</tt>; * <tt>false</tt> otherwise */ public boolean containsKey(Object key) { return getEntry(key) != null; } /** * Returns the entry associated with the specified key in this map. * Returns null if the map contains no mapping for this key. */ Entry<K,V> getEntry(Object key) { Object k = maskNull(key); int h = hash(k); Entry<K,V>[] tab = getTable(); int index = indexFor(h, tab.length); Entry<K,V> e = tab[index]; while (e != null && !(e.hash == h && eq(k, e.get()))) e = e.next; return e; } /** * Associates the specified value with the specified key in this map. * If the map previously contained a mapping for this key, the old * value is replaced. * * @param key key with which the specified value is to be associated. * @param value value to be associated with the specified key. * @return the previous value associated with <tt>key</tt>, or * <tt>null</tt> if there was no mapping for <tt>key</tt>. * (A <tt>null</tt> return can also indicate that the map * previously associated <tt>null</tt> with <tt>key</tt>.) */ public V put(K key, V value) { Object k = maskNull(key); int h = hash(k); Entry<K,V>[] tab = getTable(); int i = indexFor(h, tab.length); for (Entry<K,V> e = tab[i]; e != null; e = e.next) { if (h == e.hash && eq(k, e.get())) { V oldValue = e.value; if (value != oldValue) e.value = value; return oldValue; } } modCount++; Entry<K,V> e = tab[i]; tab[i] = new Entry<>(k, value, queue, h, e); if (++size >= threshold) resize(tab.length * 2); return null; } /** * Rehashes the contents of this map into a new array with a * larger capacity. This method is called automatically when the * number of keys in this map reaches its threshold. * * If current capacity is MAXIMUM_CAPACITY, this method does not * resize the map, but sets threshold to Integer.MAX_VALUE. * This has the effect of preventing future calls. * * @param newCapacity the new capacity, MUST be a power of two; * must be greater than current capacity unless current * capacity is MAXIMUM_CAPACITY (in which case value * is irrelevant). */ void resize(int newCapacity) { Entry<K,V>[] oldTable = getTable(); int oldCapacity = oldTable.length; if (oldCapacity == MAXIMUM_CAPACITY) { threshold = Integer.MAX_VALUE; return; } Entry<K,V>[] newTable = newTable(newCapacity); boolean oldAltHashing = useAltHashing; useAltHashing |= sun.misc.VM.isBooted() && (newCapacity >= Holder.ALTERNATIVE_HASHING_THRESHOLD); boolean rehash = oldAltHashing ^ useAltHashing; transfer(oldTable, newTable, rehash); table = newTable; /* * If ignoring null elements and processing ref queue caused massive * shrinkage, then restore old table. This should be rare, but avoids * unbounded expansion of garbage-filled tables. */ if (size >= threshold / 2) { threshold = (int)(newCapacity * loadFactor); } else { expungeStaleEntries(); transfer(newTable, oldTable, false); table = oldTable; } } /** Transfers all entries from src to dest tables */ private void transfer(Entry<K,V>[] src, Entry<K,V>[] dest, boolean rehash) { for (int j = 0; j < src.length; ++j) { Entry<K,V> e = src[j]; src[j] = null; while (e != null) { Entry<K,V> next = e.next; Object key = e.get(); if (key == null) { e.next = null; // Help GC e.value = null; // " " size--; } else { if (rehash) { e.hash = hash(key); } int i = indexFor(e.hash, dest.length); e.next = dest[i]; dest[i] = e; } e = next; } } } /** * Copies all of the mappings from the specified map to this map. * These mappings will replace any mappings that this map had for any * of the keys currently in the specified map. * * @param m mappings to be stored in this map. * @throws NullPointerException if the specified map is null. */ public void putAll(Map<? extends K, ? extends V> m) { int numKeysToBeAdded = m.size(); if (numKeysToBeAdded == 0) return; /* * Expand the map if the map if the number of mappings to be added * is greater than or equal to threshold. This is conservative; the * obvious condition is (m.size() + size) >= threshold, but this * condition could result in a map with twice the appropriate capacity, * if the keys to be added overlap with the keys already in this map. * By using the conservative calculation, we subject ourself * to at most one extra resize. */ if (numKeysToBeAdded > threshold) { int targetCapacity = (int)(numKeysToBeAdded / loadFactor + 1); if (targetCapacity > MAXIMUM_CAPACITY) targetCapacity = MAXIMUM_CAPACITY; int newCapacity = table.length; while (newCapacity < targetCapacity) newCapacity <<= 1; if (newCapacity > table.length) resize(newCapacity); } for (Map.Entry<? extends K, ? extends V> e : m.entrySet()) put(e.getKey(), e.getValue()); } /** * Removes the mapping for a key from this weak hash map if it is present. * More formally, if this map contains a mapping from key <tt>k</tt> to * value <tt>v</tt> such that <code>(key==null ? k==null : * key.equals(k))</code>, that mapping is removed. (The map can contain * at most one such mapping.) * * <p>Returns the value to which this map previously associated the key, * or <tt>null</tt> if the map contained no mapping for the key. A * return value of <tt>null</tt> does not <i>necessarily</i> indicate * that the map contained no mapping for the key; it's also possible * that the map explicitly mapped the key to <tt>null</tt>. * * <p>The map will not contain a mapping for the specified key once the * call returns. * * @param key key whose mapping is to be removed from the map * @return the previous value associated with <tt>key</tt>, or * <tt>null</tt> if there was no mapping for <tt>key</tt> */ public V remove(Object key) { Object k = maskNull(key); int h = hash(k); Entry<K,V>[] tab = getTable(); int i = indexFor(h, tab.length); Entry<K,V> prev = tab[i]; Entry<K,V> e = prev; while (e != null) { Entry<K,V> next = e.next; if (h == e.hash && eq(k, e.get())) { modCount++; size--; if (prev == e) tab[i] = next; else prev.next = next; return e.value; } prev = e; e = next; } return null; } /** Special version of remove needed by Entry set */ boolean removeMapping(Object o) { if (!(o instanceof Map.Entry)) return false; Entry<K,V>[] tab = getTable(); Map.Entry<?,?> entry = (Map.Entry<?,?>)o; Object k = maskNull(entry.getKey()); int h = hash(k); int i = indexFor(h, tab.length); Entry<K,V> prev = tab[i]; Entry<K,V> e = prev; while (e != null) { Entry<K,V> next = e.next; if (h == e.hash && e.equals(entry)) { modCount++; size--; if (prev == e) tab[i] = next; else prev.next = next; return true; } prev = e; e = next; } return false; } /** * Removes all of the mappings from this map. * The map will be empty after this call returns. */ public void clear() { // clear out ref queue. We don't need to expunge entries // since table is getting cleared. while (queue.poll() != null) ; modCount++; Arrays.fill(table, null); size = 0; // Allocation of array may have caused GC, which may have caused // additional entries to go stale. Removing these entries from the // reference queue will make them eligible for reclamation. while (queue.poll() != null) ; } /** * Returns <tt>true</tt> if this map maps one or more keys to the * specified value. * * @param value value whose presence in this map is to be tested * @return <tt>true</tt> if this map maps one or more keys to the * specified value */ public boolean containsValue(Object value) { if (value==null) return containsNullValue(); Entry<K,V>[] tab = getTable(); for (int i = tab.length; i-- > 0;) for (Entry<K,V> e = tab[i]; e != null; e = e.next) if (value.equals(e.value)) return true; return false; } /** * Special-case code for containsValue with null argument */ private boolean containsNullValue() { Entry<K,V>[] tab = getTable(); for (int i = tab.length; i-- > 0;) for (Entry<K,V> e = tab[i]; e != null; e = e.next) if (e.value==null) return true; return false; } /** * The entries in this hash table extend WeakReference, using its main ref * field as the key. */ private static class Entry<K,V> extends WeakReference<Object> implements Map.Entry<K,V> { V value; int hash; Entry<K,V> next; /** * Creates new entry. */ Entry(Object key, V value, ReferenceQueue<Object> queue, int hash, Entry<K,V> next) { super(key, queue); this.value = value; this.hash = hash; this.next = next; } @SuppressWarnings("unchecked") public K getKey() { return (K) WeakHashMap.unmaskNull(get()); } public V getValue() { return value; } public V setValue(V newValue) { V oldValue = value; value = newValue; return oldValue; } public boolean equals(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry<?,?> e = (Map.Entry<?,?>)o; K k1 = getKey(); Object k2 = e.getKey(); if (k1 == k2 || (k1 != null && k1.equals(k2))) { V v1 = getValue(); Object v2 = e.getValue(); if (v1 == v2 || (v1 != null && v1.equals(v2))) return true; } return false; } public int hashCode() { K k = getKey(); V v = getValue(); return ((k==null ? 0 : k.hashCode()) ^ (v==null ? 0 : v.hashCode())); } public String toString() { return getKey() + "=" + getValue(); } } private abstract class HashIterator<T> implements Iterator<T> { private int index; private Entry<K,V> entry = null; private Entry<K,V> lastReturned = null; private int expectedModCount = modCount; /** * Strong reference needed to avoid disappearance of key * between hasNext and next */ private Object nextKey = null; /** * Strong reference needed to avoid disappearance of key * between nextEntry() and any use of the entry */ private Object currentKey = null; HashIterator() { index = isEmpty() ? 0 : table.length; } public boolean hasNext() { Entry<K,V>[] t = table; while (nextKey == null) { Entry<K,V> e = entry; int i = index; while (e == null && i > 0) e = t[--i]; entry = e; index = i; if (e == null) { currentKey = null; return false; } nextKey = e.get(); // hold on to key in strong ref if (nextKey == null) entry = entry.next; } return true; } /** The common parts of next() across different types of iterators */ protected Entry<K,V> nextEntry() { if (modCount != expectedModCount) throw new ConcurrentModificationException(); if (nextKey == null && !hasNext()) throw new NoSuchElementException(); lastReturned = entry; entry = entry.next; currentKey = nextKey; nextKey = null; return lastReturned; } public void remove() { if (lastReturned == null) throw new IllegalStateException(); if (modCount != expectedModCount) throw new ConcurrentModificationException(); WeakHashMap.this.remove(currentKey); expectedModCount = modCount; lastReturned = null; currentKey = null; } } private class ValueIterator extends HashIterator<V> { public V next() { return nextEntry().value; } } private class KeyIterator extends HashIterator<K> { public K next() { return nextEntry().getKey(); } } private class EntryIterator extends HashIterator<Map.Entry<K,V>> { public Map.Entry<K,V> next() { return nextEntry(); } } // Views private transient Set<Map.Entry<K,V>> entrySet = null; /** * Returns a {@link Set} view of the keys contained in this map. * The set is backed by the map, so changes to the map are * reflected in the set, and vice-versa. If the map is modified * while an iteration over the set is in progress (except through * the iterator's own <tt>remove</tt> operation), the results of * the iteration are undefined. The set supports element removal, * which removes the corresponding mapping from the map, via the * <tt>Iterator.remove</tt>, <tt>Set.remove</tt>, * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt> * operations. It does not support the <tt>add</tt> or <tt>addAll</tt> * operations. */ public Set<K> keySet() { Set<K> ks = keySet; return (ks != null ? ks : (keySet = new KeySet())); } private class KeySet extends AbstractSet<K> { public Iterator<K> iterator() { return new KeyIterator(); } public int size() { return WeakHashMap.this.size(); } public boolean contains(Object o) { return containsKey(o); } public boolean remove(Object o) { if (containsKey(o)) { WeakHashMap.this.remove(o); return true; } else return false; } public void clear() { WeakHashMap.this.clear(); } } /** * Returns a {@link Collection} view of the values contained in this map. * The collection is backed by the map, so changes to the map are * reflected in the collection, and vice-versa. If the map is * modified while an iteration over the collection is in progress * (except through the iterator's own <tt>remove</tt> operation), * the results of the iteration are undefined. The collection * supports element removal, which removes the corresponding * mapping from the map, via the <tt>Iterator.remove</tt>, * <tt>Collection.remove</tt>, <tt>removeAll</tt>, * <tt>retainAll</tt> and <tt>clear</tt> operations. It does not * support the <tt>add</tt> or <tt>addAll</tt> operations. */ public Collection<V> values() { Collection<V> vs = values; return (vs != null) ? vs : (values = new Values()); } private class Values extends AbstractCollection<V> { public Iterator<V> iterator() { return new ValueIterator(); } public int size() { return WeakHashMap.this.size(); } public boolean contains(Object o) { return containsValue(o); } public void clear() { WeakHashMap.this.clear(); } } /** * Returns a {@link Set} view of the mappings contained in this map. * The set is backed by the map, so changes to the map are * reflected in the set, and vice-versa. If the map is modified * while an iteration over the set is in progress (except through * the iterator's own <tt>remove</tt> operation, or through the * <tt>setValue</tt> operation on a map entry returned by the * iterator) the results of the iteration are undefined. The set * supports element removal, which removes the corresponding * mapping from the map, via the <tt>Iterator.remove</tt>, * <tt>Set.remove</tt>, <tt>removeAll</tt>, <tt>retainAll</tt> and * <tt>clear</tt> operations. It does not support the * <tt>add</tt> or <tt>addAll</tt> operations. */ public Set<Map.Entry<K,V>> entrySet() { Set<Map.Entry<K,V>> es = entrySet; return es != null ? es : (entrySet = new EntrySet()); } private class EntrySet extends AbstractSet<Map.Entry<K,V>> { public Iterator<Map.Entry<K,V>> iterator() { return new EntryIterator(); } public boolean contains(Object o) { if (!(o instanceof Map.Entry)) return false; Map.Entry<?,?> e = (Map.Entry<?,?>)o; Entry<K,V> candidate = getEntry(e.getKey()); return candidate != null && candidate.equals(e); } public boolean remove(Object o) { return removeMapping(o); } public int size() { return WeakHashMap.this.size(); } public void clear() { WeakHashMap.this.clear(); } private List<Map.Entry<K,V>> deepCopy() { List<Map.Entry<K,V>> list = new ArrayList<>(size()); for (Map.Entry<K,V> e : this) list.add(new AbstractMap.SimpleEntry<>(e)); return list; } public Object[] toArray() { return deepCopy().toArray(); } public <T> T[] toArray(T[] a) { return deepCopy().toArray(a); } } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apereo.portal.portlets.portletadmin; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.portlet.ActionRequest; import javax.portlet.ActionResponse; import javax.portlet.PortletMode; import javax.portlet.PortletRequest; import javax.portlet.PortletSession; import javax.portlet.WindowState; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.xml.bind.JAXBElement; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pluto.container.PortletContainerException; import org.apache.pluto.container.driver.PortalDriverContainerServices; import org.apache.pluto.container.driver.PortletRegistryService; import org.apache.pluto.container.om.portlet.DisplayName; import org.apache.pluto.container.om.portlet.PortletApplicationDefinition; import org.apache.pluto.container.om.portlet.PortletDefinition; import org.apache.pluto.container.om.portlet.Supports; import org.apereo.portal.EntityIdentifier; import org.apereo.portal.api.portlet.DelegateState; import org.apereo.portal.api.portlet.DelegationActionResponse; import org.apereo.portal.api.portlet.PortletDelegationDispatcher; import org.apereo.portal.api.portlet.PortletDelegationLocator; import org.apereo.portal.channel.IPortletPublishingService; import org.apereo.portal.groups.IEntityGroup; import org.apereo.portal.groups.IGroupMember; import org.apereo.portal.layout.dlm.remoting.IGroupListHelper; import org.apereo.portal.layout.dlm.remoting.JsonEntityBean; import org.apereo.portal.portlet.PortletUtils; import org.apereo.portal.portlet.dao.jpa.PortletDefinitionImpl; import org.apereo.portal.portlet.dao.jpa.PortletPreferenceImpl; import org.apereo.portal.portlet.delegation.jsp.RenderPortletTag; import org.apereo.portal.portlet.om.IPortletDefinition; import org.apereo.portal.portlet.om.IPortletPreference; import org.apereo.portal.portlet.om.IPortletType; import org.apereo.portal.portlet.om.IPortletWindowId; import org.apereo.portal.portlet.om.PortletCategory; import org.apereo.portal.portlet.om.PortletLifecycleState; import org.apereo.portal.portlet.registry.IPortletCategoryRegistry; import org.apereo.portal.portlet.registry.IPortletDefinitionRegistry; import org.apereo.portal.portlet.registry.IPortletTypeRegistry; import org.apereo.portal.portlet.rendering.IPortletRenderer; import org.apereo.portal.portletpublishing.xml.MultiValuedPreferenceInputType; import org.apereo.portal.portletpublishing.xml.Parameter; import org.apereo.portal.portletpublishing.xml.ParameterInputType; import org.apereo.portal.portletpublishing.xml.PortletPublishingDefinition; import org.apereo.portal.portletpublishing.xml.Preference; import org.apereo.portal.portletpublishing.xml.PreferenceInputType; import org.apereo.portal.portletpublishing.xml.SingleValuedPreferenceInputType; import org.apereo.portal.portletpublishing.xml.Step; import org.apereo.portal.portlets.Attribute; import org.apereo.portal.portlets.BooleanAttribute; import org.apereo.portal.portlets.StringListAttribute; import org.apereo.portal.portlets.fragmentadmin.FragmentAdministrationHelper; import org.apereo.portal.portlets.groupselector.EntityEnum; import org.apereo.portal.portlets.portletadmin.xmlsupport.IChannelPublishingDefinitionDao; import org.apereo.portal.security.AuthorizationPrincipalHelper; import org.apereo.portal.security.IAuthorizationPrincipal; import org.apereo.portal.security.IAuthorizationService; import org.apereo.portal.security.IPermission; import org.apereo.portal.security.IPermissionManager; import org.apereo.portal.security.IPerson; import org.apereo.portal.security.IUpdatingPermissionManager; import org.apereo.portal.security.PermissionHelper; import org.apereo.portal.services.GroupService; import org.apereo.portal.url.IPortalUrlBuilder; import org.apereo.portal.url.IPortalUrlProvider; import org.apereo.portal.url.IPortletUrlBuilder; import org.apereo.portal.url.UrlType; import org.apereo.portal.utils.ComparableExtractingComparator; import org.apereo.portal.utils.Tuple; import org.apereo.portal.xml.PortletDescriptor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.web.context.ServletContextAware; import org.springframework.webflow.context.ExternalContext; /** * Helper methods for the portlet administration workflow. * * @author Jen Bourey, jbourey@unicon.net */ @Service public final class PortletAdministrationHelper implements ServletContextAware { private final Log logger = LogFactory.getLog(this.getClass()); private static final String PORTLET_FNAME_FRAGMENT_ADMIN_PORTLET = "fragment-admin"; public static final String[] PORTLET_SUBSCRIBE_ACTIVITIES = { IPermission.PORTLET_SUBSCRIBER_ACTIVITY, IPermission.PORTLET_BROWSE_ACTIVITY }; /* * Autowired beans listed alphabetically by type */ @Autowired private FragmentAdministrationHelper fragmentAdminHelper; @Autowired private IAuthorizationService authorizationService; @Autowired private IChannelPublishingDefinitionDao portletPublishingDefinitionDao; @Autowired private IGroupListHelper groupListHelper; @Autowired private IPortalUrlProvider urlProvider; @Autowired private IPortletCategoryRegistry portletCategoryRegistry; @Autowired private IPortletDefinitionRegistry portletDefinitionRegistry; @Autowired private IPortletPublishingService portletPublishingService; @Autowired private IPortletTypeRegistry portletTypeRegistry; @Autowired private PortalDriverContainerServices portalDriverContainerServices; @Autowired private PortletDelegationLocator portletDelegationLocator; private ServletContext servletContext; @Override public void setServletContext(ServletContext servletContext) { this.servletContext = servletContext; } /** * Construct a new PortletDefinitionForm for the given IPortletDefinition id. * If a PortletDefinition matching this ID already exists, the form will * be pre-populated with the PortletDefinition's current configuration. If * the PortletDefinition does not yet exist, a new default form will be * created. * * @param person user that is required to have related lifecycle permission * @param portletId identifier for the portlet definition * @return {@PortletDefinitionForm} with set values based on portlet definition * or default category and principal if no definition is found */ public PortletDefinitionForm createPortletDefinitionForm(IPerson person, String portletId) { IPortletDefinition def = portletDefinitionRegistry.getPortletDefinition(portletId); // create the new form final PortletDefinitionForm form; if (def != null) { // if this is a pre-existing portlet, set the category and permissions form = new PortletDefinitionForm(def); form.setId(def.getPortletDefinitionId().getStringId()); // create a JsonEntityBean for each current category and add it // to our form bean's category list Set<PortletCategory> categories = portletCategoryRegistry.getParentCategories(def); for (PortletCategory cat : categories) { form.addCategory(new JsonEntityBean(cat)); } addSubscribePermissionsToForm(def, form); } else { form = createNewPortletDefinitionForm(); } /* TODO: Service-Layer Security Reboot (great need of refactoring with a community-approved plan in place) */ // User must have SOME FORM of lifecycle permission over AT LEAST ONE // category in which this portlet resides; lifecycle permissions are // hierarchical, so we'll test with the weakest. if (!hasLifecyclePermission(person, PortletLifecycleState.CREATED, form.getCategories())) { logger.warn("User '" + person.getUserName() + "' attempted to edit the following portlet without MANAGE permission: " + def); throw new SecurityException("Not Authorized"); } return form; } /* * Add to the form SUBSCRIBE and BROWSE activity permissions, along with their principals, * assigned to the portlet. */ private void addSubscribePermissionsToForm(IPortletDefinition def, PortletDefinitionForm form) { final String portletTargetId = PermissionHelper.permissionTargetIdForPortletDefinition(def); /* We are concerned with PORTAL_SUBSCRIBE system */ final IPermissionManager pm = authorizationService.newPermissionManager(IPermission.PORTAL_SUBSCRIBE); for (String activity : PORTLET_SUBSCRIBE_ACTIVITIES) { /* Obtain the principals that have permission for the activity on this portlet */ final IAuthorizationPrincipal[] principals = pm.getAuthorizedPrincipals(activity, portletTargetId) ; for (IAuthorizationPrincipal principal : principals) { JsonEntityBean principalBean; // first assume this is a group IEntityGroup group = GroupService.findGroup(principal.getKey()); if (group != null) { // principal is a group principalBean = new JsonEntityBean(group, EntityEnum.GROUP); } else { // not a group, so it must be a person IGroupMember member = authorizationService.getGroupMember(principal); principalBean = new JsonEntityBean(member, EntityEnum.PERSON); // set the name String name = groupListHelper.lookupEntityName(principalBean); principalBean.setName(name); } /* Make sure we capture the principal just once*/ if (!form.getPrincipals().contains(principalBean)) { form.addPrincipal(principalBean); } form.addPermission(principalBean.getTypeAndIdHash() + "_" + activity); } } } /* * Create a {@code PortletDefinitionForm} and pre-populate it with default categories and principal permissions. */ private PortletDefinitionForm createNewPortletDefinitionForm() { PortletDefinitionForm form = new PortletDefinitionForm(); // pre-populate with top-level category final IEntityGroup portletCategoriesGroup = GroupService.getDistinguishedGroup(GroupService.PORTLET_CATEGORIES); form.addCategory(new JsonEntityBean(portletCategoriesGroup, groupListHelper.getEntityType(portletCategoriesGroup))); // pre-populate with top-level group final IEntityGroup everyoneGroup = GroupService.getDistinguishedGroup(GroupService.EVERYONE); JsonEntityBean everyoneBean = new JsonEntityBean(everyoneGroup, groupListHelper.getEntityType(everyoneGroup)); form.addPrincipal(everyoneBean); for (String activity : PORTLET_SUBSCRIBE_ACTIVITIES) { form.addPermission(everyoneBean.getTypeAndIdHash() + "_" + activity); } return form; } /** * Persist a new or edited PortletDefinition from a form, replacing existing values. * * @param publisher {@code IPerson} that requires permission to save this definition * @param form form data to persist * @return new {@code PortletDefinitionForm} for this portlet ID */ public PortletDefinitionForm savePortletRegistration(IPerson publisher, PortletDefinitionForm form) throws Exception { /* TODO: Service-Layer Security Reboot (great need of refactoring with a community-approved plan in place) */ // User must have the selected lifecycle permission over AT LEAST ONE // category in which this portlet resides. (This is the same check that // is made when the user enters the lifecycle-selection step in the wizard.) if (!hasLifecyclePermission(publisher, form.getLifecycleState(), form.getCategories())) { logger.warn("User '" + publisher.getUserName() + "' attempted to save the following portlet without the selected MANAGE permission: " + form); throw new SecurityException("Not Authorized"); } if (!form.isNew()) { // User must have the previous lifecycle permission // in AT LEAST ONE previous category as well IPortletDefinition def = this.portletDefinitionRegistry.getPortletDefinition(form.getId()); Set<PortletCategory> categories = portletCategoryRegistry.getParentCategories(def); SortedSet<JsonEntityBean> categoryBeans = new TreeSet<>(); for (PortletCategory cat : categories) { categoryBeans.add(new JsonEntityBean(cat)); } if (!hasLifecyclePermission(publisher, def.getLifecycleState(), categoryBeans)) { logger.warn("User '" + publisher.getUserName() + "' attempted to save the following portlet without the previous MANAGE permission: " + form); throw new SecurityException("Not Authorized"); } } if (form.isNew() || portletDefinitionRegistry.getPortletDefinition(form.getId()).getType().getId() != form.getTypeId()) { // User must have access to the selected CPD if s/he selected it in this interaction final int selectedTypeId = form.getTypeId(); final PortletPublishingDefinition cpd = portletPublishingDefinitionDao.getChannelPublishingDefinition(selectedTypeId); final Map<IPortletType, PortletPublishingDefinition> allowableCpds = this.getAllowableChannelPublishingDefinitions(publisher); if (!allowableCpds.containsValue(cpd)) { logger.warn("User '" + publisher.getUserName() + "' attempted to administer the following portlet without the selected " + IPermission.PORTLET_MANAGER_SELECT_PORTLET_TYPE + " permission: " + form); throw new SecurityException("Not Authorized"); } } // create the principal array from the form's principal list -- only principals with permissions final Set<IGroupMember> subscribePrincipalSet = new HashSet<>(form.getPrincipals().size()); final Set<IGroupMember> browsePrincipalSet = new HashSet<>(form.getPrincipals().size()); for (JsonEntityBean bean : form.getPrincipals()) { final String subscribePerm = bean.getTypeAndIdHash() + "_" + IPermission.PORTLET_SUBSCRIBER_ACTIVITY; final String browsePerm = bean.getTypeAndIdHash() + "_" + IPermission.PORTLET_BROWSE_ACTIVITY; final EntityEnum entityEnum = bean.getEntityType(); final IGroupMember principal = entityEnum.isGroup() ? (GroupService.findGroup(bean.getId())) : (GroupService.getGroupMember(bean.getId(), entityEnum.getClazz())); if (form.getPermissions().contains(subscribePerm)) { subscribePrincipalSet.add(principal); } if (form.getPermissions().contains(browsePerm)) { browsePrincipalSet.add(principal); } } // create the category list from the form's category bean list List<PortletCategory> categories = new ArrayList<>(); for (JsonEntityBean category : form.getCategories()) { String id = category.getId(); String iCatID = id.startsWith("cat") ? id.substring(3) : id; categories.add(portletCategoryRegistry.getPortletCategory(iCatID)); } final IPortletType portletType = portletTypeRegistry.getPortletType(form.getTypeId()); if (portletType == null) { throw new IllegalArgumentException("No IPortletType exists for ID " + form.getTypeId()); } IPortletDefinition portletDef; if (form.getId() == null) { portletDef = new PortletDefinitionImpl( portletType, form.getFname(), form.getName(), form.getTitle(), form.getApplicationId(), form.getPortletName(), form.isFramework()); } else { portletDef = portletDefinitionRegistry.getPortletDefinition(form.getId()); portletDef.setType(portletType); portletDef.setFName(form.getFname()); portletDef.setName(form.getName()); portletDef.setTitle(form.getTitle()); portletDef.getPortletDescriptorKey().setWebAppName(form.getApplicationId()); portletDef.getPortletDescriptorKey().setPortletName(form.getPortletName()); portletDef.getPortletDescriptorKey().setFrameworkPortlet(form.isFramework()); } portletDef.setDescription(form.getDescription()); portletDef.setTimeout(form.getTimeout()); // Make parameters (NB: these are different from preferences) in the // portletDef reflect the state of the form, in case any have changed. for (String key : form.getParameters().keySet()) { String value = form.getParameters().get(key).getValue(); if (!StringUtils.isBlank(value)) { portletDef.addParameter(key, value); } } portletDef.addParameter(IPortletDefinition.EDITABLE_PARAM, Boolean.toString(form.isEditable())); portletDef.addParameter(IPortletDefinition.CONFIGURABLE_PARAM, Boolean.toString(form.isConfigurable())); portletDef.addParameter(IPortletDefinition.HAS_HELP_PARAM, Boolean.toString(form.isHasHelp())); portletDef.addParameter(IPortletDefinition.HAS_ABOUT_PARAM, Boolean.toString(form.isHasAbout())); // Now add portlet preferences List<IPortletPreference> preferenceList = new ArrayList<>(); for (String key : form.getPortletPreferences().keySet()) { List<String> prefValues = form.getPortletPreferences().get(key).getValue(); if (prefValues != null && prefValues.size() > 0) { String[] values = prefValues.toArray(new String[prefValues.size()]); BooleanAttribute readOnly = form.getPortletPreferenceReadOnly().get(key); preferenceList.add(new PortletPreferenceImpl(key, readOnly.getValue(), values)); } } portletDef.setPortletPreferences(preferenceList); // Lastly update the PortletDefinition's lifecycle state & lifecycle-related metadata updateLifecycleState(form, portletDef, publisher); // The final parameter of IGroupMembers is used to set the initial SUBSCRIBE permission set portletPublishingService.savePortletDefinition(portletDef, publisher, categories, new ArrayList<>(subscribePrincipalSet)); //updatePermissions(portletDef, subscribePrincipalSet, IPermission.PORTLET_SUBSCRIBER_ACTIVITY); updatePermissions(portletDef, browsePrincipalSet, IPermission.PORTLET_BROWSE_ACTIVITY); return this.createPortletDefinitionForm(publisher, portletDef.getPortletDefinitionId().getStringId()); } /* * Update permissions for activity for portlet definition. Adds new principals' permissions passed in and removes * principals' permissions if not in the list for the given activity. */ private void updatePermissions(IPortletDefinition def, Set<IGroupMember> newPrincipals, String activity) { final String portletTargetId = PermissionHelper.permissionTargetIdForPortletDefinition(def); /* We are concerned with PORTAL_SUBSCRIBE system */ final IUpdatingPermissionManager pm = authorizationService.newUpdatingPermissionManager(IPermission.PORTAL_SUBSCRIBE); /* Create the new permissions array */ final List<IPermission> newPermissions = new ArrayList<>(); for (final IGroupMember newPrincipal : newPrincipals) { final IAuthorizationPrincipal authorizationPrincipal = authorizationService.newPrincipal(newPrincipal); final IPermission permission = pm.newPermission(authorizationPrincipal); permission.setType(IPermission.PERMISSION_TYPE_GRANT); permission.setActivity(activity); permission.setTarget(portletTargetId); newPermissions.add(permission); } /* Remove former permissions for this portlet / activity */ final IPermission[] oldPermissions = pm.getPermissions(activity, portletTargetId); pm.removePermissions(oldPermissions); /* Add the new permissions */ pm.addPermissions(newPermissions.toArray(new IPermission[newPermissions.size()])); } /** * Delete the portlet with the given portlet ID. * * @param person the person removing the portlet * @param form */ public void removePortletRegistration(IPerson person, PortletDefinitionForm form) { /* TODO: Service-Layer Security Reboot (great need of refactoring with a community-approved plan in place) */ // Arguably a check here is redundant since -- in the current // portlet-manager webflow -- you can't get to this point in the // conversation with out first obtaining a PortletDefinitionForm; but // it makes sense to check permissions here as well since the route(s) // to reach this method could evolve in the future. // Let's enforce the policy that you may only delete a portlet thet's // currently in a lifecycle state you have permission to MANAGE. // (They're hierarchical.) if (!hasLifecyclePermission(person, form.getLifecycleState(), form.getCategories())) { logger.warn("User '" + person.getUserName() + "' attempted to remove portlet '" + form.getFname() + "' without the proper MANAGE permission"); throw new SecurityException("Not Authorized"); } IPortletDefinition def = portletDefinitionRegistry.getPortletDefinition(form.getId()); /* * It's very important to remove portlets via the portletPublishingService * because that API cleans up details like category memberships and permissions. */ portletPublishingService.removePortletDefinition(def, person); } /** * Check if the link to the Fragment admin portlet should display in the status message. * * Checks that the portlet is new, that the portlet has been published and that * the user has necessary permissions to go to the fragment admin page. * * @param person the person publishing/editing the portlet * @param form the portlet being editted * @param portletId the id of the saved portlet * @return true If all three conditions are met */ public boolean shouldDisplayLayoutLink(IPerson person, PortletDefinitionForm form, String portletId) { if (!form.isNew()) { return false; } // only include the "do layout" link for published portlets. if (form.getLifecycleState() != PortletLifecycleState.PUBLISHED) { return false; } // check that the user can edit at least 1 fragment. Map<String, String> layouts = fragmentAdminHelper.getAuthorizedDlmFragments(person.getUserName()); if (layouts == null || layouts.isEmpty()) { return false; } // check that the user has subscribe priv. IAuthorizationPrincipal authPrincipal = authorizationService.newPrincipal(person.getUserName(), EntityEnum.PERSON.getClazz()); if (!authPrincipal.canSubscribe(portletId)) { return false; } return true; } /** * Get the link to the fragment admin portlet. * * @param request the current http request. * @return the portlet link */ public String getFragmentAdminURL(HttpServletRequest request) { IPortalUrlBuilder builder = urlProvider.getPortalUrlBuilderByPortletFName(request, PORTLET_FNAME_FRAGMENT_ADMIN_PORTLET, UrlType.RENDER); IPortletUrlBuilder portletUrlBuilder = builder.getTargetedPortletUrlBuilder(); portletUrlBuilder.setPortletMode(PortletMode.VIEW); portletUrlBuilder.setWindowState(WindowState.MAXIMIZED); return builder.getUrlString(); } /** * Get a list of the key names of the currently-set arbitrary portlet * preferences. * * @param form * @return */ public Set<String> getArbitraryPortletPreferenceNames(PortletDefinitionForm form) { // set default values for all portlet parameters PortletPublishingDefinition cpd = this.portletPublishingDefinitionDao.getChannelPublishingDefinition(form.getTypeId()); Set<String> currentPrefs = new HashSet<String>(); currentPrefs.addAll(form.getPortletPreferences().keySet()); for (Step step : cpd.getSteps()) { if (step.getPreferences() != null) { for (Preference pref : step.getPreferences()) { currentPrefs.remove(pref.getName()); } } } return currentPrefs; } /** * If the portlet is a portlet and if one of the supported portlet modes is {@link IPortletRenderer#CONFIG} */ public boolean supportsConfigMode(PortletDefinitionForm form) { final Tuple<String, String> portletDescriptorKeys = this.getPortletDescriptorKeys(form); if (portletDescriptorKeys == null) { return false; } final String portletAppId = portletDescriptorKeys.first; final String portletName = portletDescriptorKeys.second; final PortletRegistryService portletRegistryService = this.portalDriverContainerServices.getPortletRegistryService(); final PortletDefinition portletDescriptor; try { portletDescriptor = portletRegistryService.getPortlet(portletAppId, portletName); } catch (PortletContainerException e) { this.logger.warn("Failed to load portlet descriptor for appId='" + portletAppId + "', portletName='" + portletName + "'", e); return false; } if (portletDescriptor == null) { return false; } //Iterate over supported portlet modes, this ignores the content types for now final List<? extends Supports> supports = portletDescriptor.getSupports(); for (final Supports support : supports) { final List<String> portletModes = support.getPortletModes(); for (final String portletMode : portletModes) { if (IPortletRenderer.CONFIG.equals(PortletUtils.getPortletMode(portletMode))) { return true; } } } return false; } private static final Pattern PARAM_PATTERN = Pattern.compile("^([^\\[]+)\\['([^\\']+)'\\]\\.value$"); public void cleanOptions(PortletDefinitionForm form, PortletRequest request) { // Add permission parameters to permissions collection form.clearPermissions(); for (String activity : PORTLET_SUBSCRIBE_ACTIVITIES) { addPermissionsFromRequestToForm(form, request, activity); } //Names of valid preferences and parameters final Set<String> preferenceNames = new HashSet<String>(); final Set<String> parameterNames = new HashSet<String>(); //Read all of the submitted channel parameter and portlet preference names from the request for (final Enumeration<String> e = request.getParameterNames(); e.hasMoreElements();) { final String name = e.nextElement(); final Matcher nameMatcher = PARAM_PATTERN.matcher(name); if (nameMatcher.matches()) { final String paramType = nameMatcher.group(1); final String paramName = nameMatcher.group(2); if ("portletPreferences".equals(paramType)) { preferenceNames.add(paramName); } else if ("parameters".equals(paramType)) { parameterNames.add(paramName); } } } //Add all of the parameter and preference names that have default values in the CPD into the valid name sets final PortletPublishingDefinition cpd = this.portletPublishingDefinitionDao.getChannelPublishingDefinition(form.getTypeId()); for (final Step step : cpd.getSteps()) { final List<Parameter> parameters = step.getParameters(); if (parameters != null) { for (final Parameter parameter : parameters) { final JAXBElement<? extends ParameterInputType> parameterInput = parameter.getParameterInput(); if (parameterInput != null) { final ParameterInputType parameterInputType = parameterInput.getValue(); if (parameterInputType != null && parameterInputType.getDefault() != null) { parameterNames.add(parameter.getName()); } } } } final List<Preference> preferences = step.getPreferences(); if (preferences != null) { for (final Preference preference : preferences) { final JAXBElement<? extends PreferenceInputType> preferenceInput = preference.getPreferenceInput(); final PreferenceInputType preferenceInputType = preferenceInput.getValue(); if (preferenceInputType instanceof MultiValuedPreferenceInputType) { final MultiValuedPreferenceInputType multiValuedPreferenceInputType = (MultiValuedPreferenceInputType)preferenceInputType; final List<String> defaultValues = multiValuedPreferenceInputType.getDefaults(); if (defaultValues != null && !defaultValues.isEmpty()) { preferenceNames.add(preference.getName()); } } else if (preferenceInputType instanceof SingleValuedPreferenceInputType) { final SingleValuedPreferenceInputType SingleValuedPreferenceInputType = (SingleValuedPreferenceInputType)preferenceInputType; if (SingleValuedPreferenceInputType.getDefault() != null) { preferenceNames.add(preference.getName()); } } } } } //Remove portlet preferences from the form object that were not part of this request or defined in the CPD // - do it only if portlet doesn't support configMode if (!this.supportsConfigMode(form)) { final Map<String, StringListAttribute> portletPreferences = form.getPortletPreferences(); final Map<String, BooleanAttribute> portletPreferencesOverrides = form.getPortletPreferenceReadOnly(); for (final Iterator<Entry<String, StringListAttribute>> portletPreferenceEntryItr = portletPreferences.entrySet().iterator(); portletPreferenceEntryItr.hasNext();) { final Map.Entry<String, StringListAttribute> portletPreferenceEntry = portletPreferenceEntryItr.next(); final String key = portletPreferenceEntry.getKey(); final StringListAttribute valueAttr = portletPreferenceEntry.getValue(); if (!preferenceNames.contains(key) || valueAttr == null) { portletPreferenceEntryItr.remove(); portletPreferencesOverrides.remove(key); } else { final List<String> values = valueAttr.getValue(); for (final Iterator<String> iter = values.iterator(); iter.hasNext();) { String value = iter.next(); if (value == null) { iter.remove(); } } if (values.size() == 0) { portletPreferenceEntryItr.remove(); portletPreferencesOverrides.remove(key); } } } } final Map<String, Attribute> parameters = form.getParameters(); for (final Iterator<Entry<String, Attribute>> parameterEntryItr = parameters.entrySet().iterator(); parameterEntryItr.hasNext();) { final Entry<String, Attribute> parameterEntry = parameterEntryItr.next(); final String key = parameterEntry.getKey(); final Attribute value = parameterEntry.getValue(); if (!parameterNames.contains(key) || value == null || StringUtils.isBlank(value.getValue())) { parameterEntryItr.remove(); } } } private void addPermissionsFromRequestToForm(PortletDefinitionForm form, PortletRequest request, String activity) { final String ending = "_" + activity; for (final String name : request.getParameterMap().keySet()) { if (name.endsWith(ending)) { form.addPermission(name); } } } /** * Retreive the list of portlet application contexts currently available in * this portlet container. * * @return list of portlet context */ public List<PortletApplicationDefinition> getPortletApplications() { final PortletRegistryService portletRegistryService = portalDriverContainerServices.getPortletRegistryService(); final List<PortletApplicationDefinition> contexts = new ArrayList<PortletApplicationDefinition>(); for (final Iterator<String> iter = portletRegistryService.getRegisteredPortletApplicationNames(); iter.hasNext();) { final String applicationName = iter.next(); final PortletApplicationDefinition applicationDefninition; try { applicationDefninition = portletRegistryService.getPortletApplication(applicationName); } catch (PortletContainerException e) { throw new RuntimeException("Failed to load PortletApplicationDefinition for '" + applicationName + "'"); } final List<? extends PortletDefinition> portlets = applicationDefninition.getPortlets(); Collections.sort(portlets, new ComparableExtractingComparator<PortletDefinition, String>(String.CASE_INSENSITIVE_ORDER) { @Override protected String getComparable(PortletDefinition o) { final List<? extends DisplayName> displayNames = o.getDisplayNames(); if (displayNames != null && displayNames.size() > 0) { return displayNames.get(0).getDisplayName(); } return o.getPortletName(); } }); contexts.add(applicationDefninition); } Collections.sort(contexts, new ComparableExtractingComparator<PortletApplicationDefinition, String>(String.CASE_INSENSITIVE_ORDER) { @Override protected String getComparable(PortletApplicationDefinition o) { final String portletContextName = o.getName(); if (portletContextName != null) { return portletContextName; } final String applicationName = o.getContextPath(); if ("/".equals(applicationName)) { return "ROOT"; } if (applicationName.startsWith("/")) { return applicationName.substring(1); } return applicationName; } }); return contexts; } /** * Get a portlet descriptor matching the current portlet definition form. * If the current form does not represent a portlet, the application or * portlet name fields are blank, or the portlet description cannot be * retrieved, the method will return <code>null</code>. * * @param form * @return */ public PortletDefinition getPortletDescriptor(PortletDefinitionForm form) { final Tuple<String, String> portletDescriptorKeys = this.getPortletDescriptorKeys(form); if (portletDescriptorKeys == null) { return null; } final String portletAppId = portletDescriptorKeys.first; final String portletName = portletDescriptorKeys.second; final PortletRegistryService portletRegistryService = portalDriverContainerServices.getPortletRegistryService(); try { PortletDefinition portletDD = portletRegistryService.getPortlet(portletAppId, portletName); return portletDD; } catch (PortletContainerException e) { e.printStackTrace(); return null; } } /** * Pre-populate a new {@link PortletDefinitionForm} with information from * the {@link PortletDefinition}. * * @param form */ public void loadDefaultsFromPortletDefinitionIfNew(PortletDefinitionForm form) { if (!form.isNew()) { // Get out; we only prepopulate new portlets return; } // appName/portletName must be set at this point Validate.notBlank(form.getApplicationId(), "ApplicationId not set"); Validate.notBlank(form.getPortletName(), "PortletName not set"); final PortletRegistryService portletRegistryService = portalDriverContainerServices.getPortletRegistryService(); final PortletDefinition portletDef; try { portletDef = portletRegistryService.getPortlet(form.getApplicationId(), form.getPortletName()); } catch (PortletContainerException e) { this.logger.warn("Failed to load portlet descriptor for appId='" + form.getApplicationId() + "', portletName='" + form.getPortletName() + "'", e); return; } form.setTitle(portletDef.getPortletName()); form.setName(portletDef.getPortletName()); for (Supports supports : portletDef.getSupports()) { for (String mode : supports.getPortletModes()) { if ("edit".equalsIgnoreCase(mode)) { form.setEditable(true); } else if ("help".equalsIgnoreCase(mode)) { form.setHasHelp(true); } else if ("config".equalsIgnoreCase(mode)) { form.setConfigurable(true); } } } } public PortletLifecycleState[] getLifecycleStates() { return PortletLifecycleState.values(); } public Set<PortletLifecycleState> getAllowedLifecycleStates(IPerson person, SortedSet<JsonEntityBean> categories) { Set<PortletLifecycleState> states = new TreeSet<PortletLifecycleState>(); if (hasLifecyclePermission(person, PortletLifecycleState.MAINTENANCE, categories)) { states.add(PortletLifecycleState.CREATED); states.add(PortletLifecycleState.APPROVED); states.add(PortletLifecycleState.EXPIRED); states.add(PortletLifecycleState.PUBLISHED); states.add(PortletLifecycleState.MAINTENANCE); } else if (hasLifecyclePermission(person, PortletLifecycleState.EXPIRED, categories)) { states.add(PortletLifecycleState.CREATED); states.add(PortletLifecycleState.APPROVED); states.add(PortletLifecycleState.EXPIRED); states.add(PortletLifecycleState.PUBLISHED); } else if (hasLifecyclePermission(person, PortletLifecycleState.PUBLISHED, categories)) { states.add(PortletLifecycleState.CREATED); states.add(PortletLifecycleState.APPROVED); states.add(PortletLifecycleState.PUBLISHED); } else if (hasLifecyclePermission(person, PortletLifecycleState.APPROVED, categories)) { states.add(PortletLifecycleState.CREATED); states.add(PortletLifecycleState.APPROVED); } else if (hasLifecyclePermission(person, PortletLifecycleState.CREATED, categories)) { states.add(PortletLifecycleState.CREATED); } return states; } public boolean hasLifecyclePermission(IPerson person, PortletLifecycleState state, SortedSet<JsonEntityBean> categories) { EntityIdentifier ei = person.getEntityIdentifier(); IAuthorizationPrincipal ap = authorizationService.newPrincipal(ei.getKey(), ei.getType()); final String activity; switch (state) { case APPROVED: { activity = IPermission.PORTLET_MANAGER_APPROVED_ACTIVITY; break; } case CREATED: { activity = IPermission.PORTLET_MANAGER_CREATED_ACTIVITY; break; } case PUBLISHED: { activity = IPermission.PORTLET_MANAGER_ACTIVITY; break; } case EXPIRED: { activity = IPermission.PORTLET_MANAGER_EXPIRED_ACTIVITY; break; } case MAINTENANCE: { activity = IPermission.PORTLET_MANAGER_MAINTENANCE_ACTIVITY; break; } default: { throw new IllegalArgumentException(""); } } if (ap.hasPermission(IPermission.PORTAL_PUBLISH, activity, IPermission.ALL_PORTLETS_TARGET)) { logger.debug("Found permission for category ALL_PORTLETS and lifecycle state " + state.toString()); return true; } for (JsonEntityBean category : categories) { if (ap.canManage(state, category.getId())) { logger.debug("Found permission for category " + category.getName() + " and lifecycle state " + state.toString()); return true; } } logger.debug("No permission for lifecycle state " + state.toString()); return false; } public IPortletWindowId getDelegateWindowId(ExternalContext externalContext, String fname) { final PortletRequest nativeRequest = (PortletRequest)externalContext.getNativeRequest(); final PortletSession portletSession = nativeRequest.getPortletSession(); return (IPortletWindowId)portletSession.getAttribute(RenderPortletTag.DEFAULT_SESSION_KEY_PREFIX + fname); } public boolean configModeAction(ExternalContext externalContext, String fname) throws IOException { final ActionRequest actionRequest = (ActionRequest)externalContext.getNativeRequest(); final ActionResponse actionResponse = (ActionResponse)externalContext.getNativeResponse(); final IPortletWindowId portletWindowId = this.getDelegateWindowId(externalContext, fname); if (portletWindowId == null) { throw new IllegalStateException("Cannot execute configModeAciton without a delegate window ID in the session for key: " + RenderPortletTag.DEFAULT_SESSION_KEY_PREFIX + fname); } final PortletDelegationDispatcher requestDispatcher = this.portletDelegationLocator.getRequestDispatcher(actionRequest, portletWindowId); final DelegationActionResponse delegationResponse = requestDispatcher.doAction(actionRequest, actionResponse); final String redirectLocation = delegationResponse.getRedirectLocation(); final DelegateState delegateState = delegationResponse.getDelegateState(); if (redirectLocation != null || (delegationResponse.getPortletMode() != null && !IPortletRenderer.CONFIG.equals(delegationResponse.getPortletMode())) || !IPortletRenderer.CONFIG.equals(delegateState.getPortletMode())) { //The portlet sent a redirect OR changed it's mode away from CONFIG, assume it is done return true; } return false; } /** * updates the editPortlet form with the portletType * of the first (and only) portletDefinition passed in through the Map of * portlet definitions. * @param portletDefinitions * @param form * @return PortletPublishingDefinition of the first portlet definition in the * list, null if the list is empty or has more than one element. */ public PortletPublishingDefinition updateFormForSinglePortletType( Map<IPortletType, PortletPublishingDefinition> portletDefinitions, PortletDefinitionForm form) { if (portletDefinitions.size() != 1) { return null; } IPortletType portletType = portletDefinitions.keySet().iterator().next(); form.setTypeId(portletType.getId()); PortletPublishingDefinition cpd = portletPublishingDefinitionDao.getChannelPublishingDefinition(portletType.getId()); form.setChannelPublishingDefinition(cpd); return cpd; } public boolean offerPortletSelection(PortletDefinitionForm form) { final IPortletType portletType = this.portletTypeRegistry.getPortletType(form.getTypeId()); final PortletPublishingDefinition portletPublishingDefinition = this.portletPublishingDefinitionDao.getChannelPublishingDefinition(portletType.getId()); final PortletDescriptor portletDescriptor = portletPublishingDefinition.getPortletDescriptor(); if (portletDescriptor == null) { return true; } final Boolean isFramework = portletDescriptor.isIsFramework(); if (isFramework != null && isFramework) { form.setFramework(isFramework); } else { final String webAppName = portletDescriptor.getWebAppName(); form.setApplicationId(webAppName); } final String portletName = portletDescriptor.getPortletName(); form.setPortletName(portletName); return false; } public Map<IPortletType, PortletPublishingDefinition> getAllowableChannelPublishingDefinitions(IPerson user) { Map<IPortletType, PortletPublishingDefinition> rslt; final Map<IPortletType, PortletPublishingDefinition> rawMap = portletPublishingDefinitionDao.getChannelPublishingDefinitions(); final IAuthorizationPrincipal principal = AuthorizationPrincipalHelper.principalFromUser(user); if (principal.hasPermission(IPermission.PORTAL_PUBLISH, IPermission.PORTLET_MANAGER_SELECT_PORTLET_TYPE, IPermission.ALL_PORTLET_TYPES)) { // Send the whole collection back... rslt = rawMap; } else { // Filter the collection by permissions... rslt = new HashMap<IPortletType, PortletPublishingDefinition>(); for (Map.Entry<IPortletType, PortletPublishingDefinition> y : rawMap.entrySet()) { if (principal.hasPermission(IPermission.PORTAL_PUBLISH, IPermission.PORTLET_MANAGER_SELECT_PORTLET_TYPE, y.getKey().getName())) { rslt.put(y.getKey(), y.getValue()); } } } return rslt; } protected Tuple<String, String> getPortletDescriptorKeys(PortletDefinitionForm form) { if (form.getPortletName() == null || (form.getApplicationId() == null && !form.isFramework())) { return null; } final String portletAppId; if (form.isFramework()) { portletAppId = this.servletContext.getContextPath(); } else { portletAppId = form.getApplicationId(); } final String portletName = form.getPortletName(); return new Tuple<String, String>(portletAppId, portletName); } private void updateLifecycleState(PortletDefinitionForm form, IPortletDefinition portletDef, IPerson publisher) { /* * Manage the metadata for each possible lifecycle state in turn... */ Date now = new Date(); // Will be entered as the timestamp for states that we trigger PortletLifecycleState selectedLifecycleState = form.getLifecycleState(); /* * APPROVED */ if (selectedLifecycleState.isEqualToOrAfter(PortletLifecycleState.APPROVED)) { // We are the 'approver' if it isn't previously approved... if (portletDef.getApprovalDate() == null) { portletDef.setApproverId(publisher.getID()); portletDef.setApprovalDate(now); } if (selectedLifecycleState.equals(PortletLifecycleState.APPROVED) && form.getPublishDate() != null // Permissions check required (of course) to use the auto-publish feature && hasLifecyclePermission(publisher, PortletLifecycleState.PUBLISHED, form.getCategories())) { // We are also the 'publisher' if we scheduled the portlet for (future) publication... portletDef.setPublishDate(form.getPublishDateTime()); portletDef.setPublisherId(publisher.getID()); } } else { // Clear previous approval fields, if present... portletDef.setApprovalDate(null); portletDef.setApproverId(-1); } /* * PUBLISHED */ if (selectedLifecycleState.isEqualToOrAfter(PortletLifecycleState.PUBLISHED)) { // We are the 'publisher' if it isn't previously published or the publish time hasn't hit yet... if (portletDef.getPublishDate() == null || portletDef.getPublishDate().after(now)) { portletDef.setPublisherId(publisher.getID()); portletDef.setPublishDate(now); } if (selectedLifecycleState.equals(PortletLifecycleState.PUBLISHED) && form.getExpirationDate() != null // Permissions check required (of course) to use the auto-expire feature && hasLifecyclePermission(publisher, PortletLifecycleState.EXPIRED, form.getCategories())) { // We are also the 'expirer' if we scheduled the portlet for (future) expiration... portletDef.setExpirationDate(form.getExpirationDateTime()); portletDef.setExpirerId(publisher.getID()); } } else if (!selectedLifecycleState.equals(PortletLifecycleState.APPROVED) || form.getPublishDate() == null){ // Clear previous publishing fields, if present... portletDef.setPublishDate(null); portletDef.setPublisherId(-1); } /* * EXPIRED */ if (selectedLifecycleState.equals(PortletLifecycleState.EXPIRED)) { // We are only the 'expirer' if we specifically choose EXPIRED // (MAINTENANCE mode is not considered expired) portletDef.setExpirerId(publisher.getID()); portletDef.setExpirationDate(now); } else if (!selectedLifecycleState.equals(PortletLifecycleState.PUBLISHED) || form.getExpirationDate() == null) { // Clear previous expiration fields, if present... portletDef.setExpirationDate(null); portletDef.setExpirerId(-1); } /* * MAINTENANCE */ if (selectedLifecycleState.equals(PortletLifecycleState.MAINTENANCE)) { // We are placing the portlet into MAINTENANCE mode; // an admin will restore it (manually) when available portletDef.addParameter(PortletLifecycleState.MAINTENANCE_MODE_PARAMETER_NAME, "true"); } else { // Otherwise we must remove the MAINTENANCE flag, if present portletDef.removeParameter(PortletLifecycleState.MAINTENANCE_MODE_PARAMETER_NAME); } } }
package com.oskopek.transport.planners.sequential.state; import com.google.common.collect.Lists; import com.oskopek.transport.model.domain.action.Action; import com.oskopek.transport.model.problem.*; import com.oskopek.transport.model.problem.Package; import com.oskopek.transport.model.state.PlanState; import org.apache.commons.lang3.builder.HashCodeBuilder; import java.util.*; /** * An immutable {@link PlanState} variant used for planning. * <p> * Wrapper of a problem used in planning: does not use standard hash code and equals, * but assumes the graph (and other things, like the action object names) do not change and does not compare them. */ public class ImmutablePlanState { private final Action action; private final ImmutablePlanState lastState; private final int totalTime; private final Problem problem; private int planningHashCode; /** * Default start constructor. * * @param problem the problem */ public ImmutablePlanState(Problem problem) { this.problem = problem; this.lastState = null; this.action = null; this.totalTime = 0; } /** * Default constructor. * * @param problem the problem * @param lastState the last state * @param action the last action */ public ImmutablePlanState(Problem problem, ImmutablePlanState lastState, Action action) { this.problem = problem; this.lastState = lastState; this.action = action; totalTime = lastState.totalTime + action.getDuration().getCost(); } /** * Get the action. * * @return the action that led to this state */ public Action getAction() { return action; } /** * Get the underlying problem instance. * * @return the problem */ public Problem getProblem() { return problem; } /** * Get the pre-calculated total time it takes this plan to reach this state. * * @return the total time */ public int getTotalTime() { return totalTime; } /** * Get the actions. * * @return the actions */ public List<Action> getAllActionsInList() { return Lists.reverse(Lists.newArrayList(getAllActionsReversed())); } /** * Get the actions. * * @return the actions */ public Iterator<Action> getAllActionsReversed() { return new ReversedActionIterator(this); } /** * Applies the specified action and returns the new state. * * @param action the action to apply * @return the updated state or empty if the preconditions or the effects were not valid in the resulting state */ public Optional<ImmutablePlanState> apply(Action action) { return applyPreconditions(problem, action).flatMap(p -> applyEffects(p, action)) .map(p -> new ImmutablePlanState(p, this, action)); } /** * Applies the specified action's preconditions and returns the new state. * * @param problem the problem * @param action the action's preconditions to apply * @return the updated state or empty if the preconditions were not valid before application */ private static Optional<Problem> applyPreconditions(Problem problem, Action action) { if (!action.arePreconditionsValid(problem)) { return Optional.empty(); } return Optional.of(action.applyPreconditions(problem)); } /** * Applies the specified action's effects and returns the new state. * * @param problem the problem * @param action the action's effects to apply * @return the updated state or empty if the effects were not valid after application */ private static Optional<Problem> applyEffects(Problem problem, Action action) { Problem newProblem = action.applyEffects(problem); if (!action.areEffectsValid(newProblem)) { return Optional.empty(); } return Optional.of(newProblem); } /** * Check if this state is a goal state. * * @return true iff this state is a goal state, i.e. if all packages and vehicles are at their targets, if specified */ public boolean isGoalState() { // TODO: merge this into the PlanState interface for (Package p : getProblem().getAllPackages()) { if (!p.getTarget().equals(p.getLocation())) { return false; } } for (Vehicle v : getProblem().getAllVehicles()) { Location target = v.getTarget(); if (target != null && !target.equals(v.getLocation())) { return false; } } return true; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof ImmutablePlanState)) { return false; } ImmutablePlanState that = (ImmutablePlanState) o; return equalsDuringPlanning(that); } /** * Internal implementation of a fast assumption-dependent equals. For example, totally omits the graph * from comparison. * * @param other the other state to compare * @return true iff they are equal, assuming all assumptions hold */ private boolean equalsDuringPlanning(ImmutablePlanState other) { return problem.getAllPackages().equals(other.problem.getAllPackages()) && problem.getAllVehicles().equals(other.problem.getAllVehicles()); } @Override public int hashCode() { if (planningHashCode == 0) { planningHashCode = hashCodeDuringPlanning(problem); } return planningHashCode; } /** * Internal implementation of a fast assumption-dependent hash code. * * @param problem the problem to calculate a hash code for * @return the hash code */ private static int hashCodeDuringPlanning(Problem problem) { HashCodeBuilder builder = new HashCodeBuilder(13, 17); for (Vehicle vehicle : problem.getVehicleMap().values()) { builder.append(vehicle.getName()); String location = vehicle.getLocation().getName(); builder.append(location); vehicle.getPackageList().forEach(p -> packageHashCodeDuringPlanning(builder, p, location)); } for (Package pkg : problem.getPackageMap().values()) { if (pkg.getLocation() != null) { packageHashCodeDuringPlanning(builder, pkg, pkg.getLocation().getName()); } } return builder.toHashCode(); } /** * Util method for calculating a packages hash code fast. Assumption-dependent. * * @param builder the builder to append to * @param pkg the package * @param location the location of the package */ private static void packageHashCodeDuringPlanning(HashCodeBuilder builder, Package pkg, String location) { builder.append(pkg.getName()); builder.append(location); } /** * Reverse iterator jumping from the current state back, returning actions on the way. */ private static final class ReversedActionIterator implements Iterator<Action> { private ImmutablePlanState current; /** * Default constructor. * * @param begin the current state */ ReversedActionIterator(ImmutablePlanState begin) { current = begin; } @Override public boolean hasNext() { return current.lastState != null; } @Override public Action next() { Action retVal = current.action; current = current.lastState; return retVal; } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.primitives; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkPositionIndexes; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.base.Converter; import java.io.Serializable; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.RandomAccess; /** * Static utility methods pertaining to {@code int} primitives, that are not * already found in either {@link Integer} or {@link Arrays}. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/PrimitivesExplained"> * primitive utilities</a>. * * @author Kevin Bourrillion * @since 1.0 */ @GwtCompatible(emulated = true) public final class Ints { private Ints() {} /** * The number of bytes required to represent a primitive {@code int} * value. */ public static final int BYTES = Integer.SIZE / Byte.SIZE; /** * The largest power of two that can be represented as an {@code int}. * * @since 10.0 */ public static final int MAX_POWER_OF_TWO = 1 << (Integer.SIZE - 2); /** * Returns a hash code for {@code value}; equal to the result of invoking * {@code ((Integer) value).hashCode()}. * * @param value a primitive {@code int} value * @return a hash code for the value */ public static int hashCode(int value) { return value; } /** * Returns the {@code int} value that is equal to {@code value}, if possible. * * @param value any value in the range of the {@code int} type * @return the {@code int} value that equals {@code value} * @throws IllegalArgumentException if {@code value} is greater than {@link * Integer#MAX_VALUE} or less than {@link Integer#MIN_VALUE} */ public static int checkedCast(long value) { int result = (int) value; if (result != value) { // don't use checkArgument here, to avoid boxing throw new IllegalArgumentException("Out of range: " + value); } return result; } /** * Returns the {@code int} nearest in value to {@code value}. * * @param value any {@code long} value * @return the same value cast to {@code int} if it is in the range of the * {@code int} type, {@link Integer#MAX_VALUE} if it is too large, * or {@link Integer#MIN_VALUE} if it is too small */ public static int saturatedCast(long value) { if (value > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } if (value < Integer.MIN_VALUE) { return Integer.MIN_VALUE; } return (int) value; } /** * Compares the two specified {@code int} values. The sign of the value * returned is the same as that of {@code ((Integer) a).compareTo(b)}. * * <p><b>Note:</b> projects using JDK 7 or later should use the equivalent * {@link Integer#compare} method instead. * * @param a the first {@code int} to compare * @param b the second {@code int} to compare * @return a negative value if {@code a} is less than {@code b}; a positive * value if {@code a} is greater than {@code b}; or zero if they are equal */ public static int compare(int a, int b) { return (a < b) ? -1 : ((a > b) ? 1 : 0); } /** * Returns {@code true} if {@code target} is present as an element anywhere in * {@code array}. * * @param array an array of {@code int} values, possibly empty * @param target a primitive {@code int} value * @return {@code true} if {@code array[i] == target} for some value of {@code * i} */ public static boolean contains(int[] array, int target) { for (int value : array) { if (value == target) { return true; } } return false; } /** * Returns the index of the first appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code int} values, possibly empty * @param target a primitive {@code int} value * @return the least index {@code i} for which {@code array[i] == target}, or * {@code -1} if no such index exists. */ public static int indexOf(int[] array, int target) { return indexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int indexOf( int[] array, int target, int start, int end) { for (int i = start; i < end; i++) { if (array[i] == target) { return i; } } return -1; } /** * Returns the start position of the first occurrence of the specified {@code * target} within {@code array}, or {@code -1} if there is no such occurrence. * * <p>More formally, returns the lowest index {@code i} such that {@code * java.util.Arrays.copyOfRange(array, i, i + target.length)} contains exactly * the same elements as {@code target}. * * @param array the array to search for the sequence {@code target} * @param target the array to search for as a sub-sequence of {@code array} */ public static int indexOf(int[] array, int[] target) { checkNotNull(array, "array"); checkNotNull(target, "target"); if (target.length == 0) { return 0; } outer: for (int i = 0; i < array.length - target.length + 1; i++) { for (int j = 0; j < target.length; j++) { if (array[i + j] != target[j]) { continue outer; } } return i; } return -1; } /** * Returns the index of the last appearance of the value {@code target} in * {@code array}. * * @param array an array of {@code int} values, possibly empty * @param target a primitive {@code int} value * @return the greatest index {@code i} for which {@code array[i] == target}, * or {@code -1} if no such index exists. */ public static int lastIndexOf(int[] array, int target) { return lastIndexOf(array, target, 0, array.length); } // TODO(kevinb): consider making this public private static int lastIndexOf( int[] array, int target, int start, int end) { for (int i = end - 1; i >= start; i--) { if (array[i] == target) { return i; } } return -1; } /** * Returns the least value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code int} values * @return the value present in {@code array} that is less than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static int min(int... array) { checkArgument(array.length > 0); int min = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] < min) { min = array[i]; } } return min; } /** * Returns the greatest value present in {@code array}. * * @param array a <i>nonempty</i> array of {@code int} values * @return the value present in {@code array} that is greater than or equal to * every other value in the array * @throws IllegalArgumentException if {@code array} is empty */ public static int max(int... array) { checkArgument(array.length > 0); int max = array[0]; for (int i = 1; i < array.length; i++) { if (array[i] > max) { max = array[i]; } } return max; } /** * Returns the values from each provided array combined into a single array. * For example, {@code concat(new int[] {a, b}, new int[] {}, new * int[] {c}} returns the array {@code {a, b, c}}. * * @param arrays zero or more {@code int} arrays * @return a single array containing all the values from the source arrays, in * order */ public static int[] concat(int[]... arrays) { int length = 0; for (int[] array : arrays) { length += array.length; } int[] result = new int[length]; int pos = 0; for (int[] array : arrays) { System.arraycopy(array, 0, result, pos, array.length); pos += array.length; } return result; } private static final class IntConverter extends Converter<String, Integer> implements Serializable { static final IntConverter INSTANCE = new IntConverter(); @Override protected Integer doForward(String value) { return Integer.decode(value); } @Override protected String doBackward(Integer value) { return value.toString(); } @Override public String toString() { return "Ints.stringConverter()"; } private Object readResolve() { return INSTANCE; } private static final long serialVersionUID = 1; } /** * Returns a serializable converter object that converts between strings and * integers using {@link Integer#decode} and {@link Integer#toString()}. * * @since 16.0 */ @Beta public static Converter<String, Integer> stringConverter() { return IntConverter.INSTANCE; } /** * Returns an array containing the same values as {@code array}, but * guaranteed to be of a specified minimum length. If {@code array} already * has a length of at least {@code minLength}, it is returned directly. * Otherwise, a new array of size {@code minLength + padding} is returned, * containing the values of {@code array}, and zeroes in the remaining places. * * @param array the source array * @param minLength the minimum length the returned array must guarantee * @param padding an extra amount to "grow" the array by if growth is * necessary * @throws IllegalArgumentException if {@code minLength} or {@code padding} is * negative * @return an array containing the values of {@code array}, with guaranteed * minimum length {@code minLength} */ public static int[] ensureCapacity( int[] array, int minLength, int padding) { checkArgument(minLength >= 0, "Invalid minLength: %s", minLength); checkArgument(padding >= 0, "Invalid padding: %s", padding); return (array.length < minLength) ? copyOf(array, minLength + padding) : array; } // Arrays.copyOf() requires Java 6 private static int[] copyOf(int[] original, int length) { int[] copy = new int[length]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, length)); return copy; } /** * Returns a string containing the supplied {@code int} values separated * by {@code separator}. For example, {@code join("-", 1, 2, 3)} returns * the string {@code "1-2-3"}. * * @param separator the text that should appear between consecutive values in * the resulting string (but not at the start or end) * @param array an array of {@code int} values, possibly empty */ public static String join(String separator, int... array) { checkNotNull(separator); if (array.length == 0) { return ""; } // For pre-sizing a builder, just get the right order of magnitude StringBuilder builder = new StringBuilder(array.length * 5); builder.append(array[0]); for (int i = 1; i < array.length; i++) { builder.append(separator).append(array[i]); } return builder.toString(); } /** * Returns a comparator that compares two {@code int} arrays * lexicographically. That is, it compares, using {@link * #compare(int, int)}), the first pair of values that follow any * common prefix, or when one array is a prefix of the other, treats the * shorter array as the lesser. For example, {@code [] < [1] < [1, 2] < [2]}. * * <p>The returned comparator is inconsistent with {@link * Object#equals(Object)} (since arrays support only identity equality), but * it is consistent with {@link Arrays#equals(int[], int[])}. * * @see <a href="http://en.wikipedia.org/wiki/Lexicographical_order"> * Lexicographical order article at Wikipedia</a> * @since 2.0 */ public static Comparator<int[]> lexicographicalComparator() { return LexicographicalComparator.INSTANCE; } private enum LexicographicalComparator implements Comparator<int[]> { INSTANCE; @Override public int compare(int[] left, int[] right) { int minLength = Math.min(left.length, right.length); for (int i = 0; i < minLength; i++) { int result = Ints.compare(left[i], right[i]); if (result != 0) { return result; } } return left.length - right.length; } } /** * Returns an array containing each value of {@code collection}, converted to * a {@code int} value in the manner of {@link Number#intValue}. * * <p>Elements are copied from the argument collection as if by {@code * collection.toArray()}. Calling this method is as thread-safe as calling * that method. * * @param collection a collection of {@code Number} instances * @return an array containing the same values as {@code collection}, in the * same order, converted to primitives * @throws NullPointerException if {@code collection} or any of its elements * is null * @since 1.0 (parameter was {@code Collection<Integer>} before 12.0) */ public static int[] toArray(Collection<? extends Number> collection) { if (collection instanceof IntArrayAsList) { return ((IntArrayAsList) collection).toIntArray(); } Object[] boxedArray = collection.toArray(); int len = boxedArray.length; int[] array = new int[len]; for (int i = 0; i < len; i++) { // checkNotNull for GWT (do not optimize) array[i] = ((Number) checkNotNull(boxedArray[i])).intValue(); } return array; } /** * Returns a fixed-size list backed by the specified array, similar to {@link * Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, * but any attempt to set a value to {@code null} will result in a {@link * NullPointerException}. * * <p>The returned list maintains the values, but not the identities, of * {@code Integer} objects written to or read from it. For example, whether * {@code list.get(0) == list.get(0)} is true for the returned list is * unspecified. * * @param backingArray the array to back the list * @return a list view of the array */ public static List<Integer> asList(int... backingArray) { if (backingArray.length == 0) { return Collections.emptyList(); } return new IntArrayAsList(backingArray); } @GwtCompatible private static class IntArrayAsList extends AbstractList<Integer> implements RandomAccess, Serializable { final int[] array; final int start; final int end; IntArrayAsList(int[] array) { this(array, 0, array.length); } IntArrayAsList(int[] array, int start, int end) { this.array = array; this.start = start; this.end = end; } @Override public int size() { return end - start; } @Override public boolean isEmpty() { return false; } @Override public Integer get(int index) { checkElementIndex(index, size()); return array[start + index]; } @Override public boolean contains(Object target) { // Overridden to prevent a ton of boxing return (target instanceof Integer) && Ints.indexOf(array, (Integer) target, start, end) != -1; } @Override public int indexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Integer) { int i = Ints.indexOf(array, (Integer) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public int lastIndexOf(Object target) { // Overridden to prevent a ton of boxing if (target instanceof Integer) { int i = Ints.lastIndexOf(array, (Integer) target, start, end); if (i >= 0) { return i - start; } } return -1; } @Override public Integer set(int index, Integer element) { checkElementIndex(index, size()); int oldValue = array[start + index]; // checkNotNull for GWT (do not optimize) array[start + index] = checkNotNull(element); return oldValue; } @Override public List<Integer> subList(int fromIndex, int toIndex) { int size = size(); checkPositionIndexes(fromIndex, toIndex, size); if (fromIndex == toIndex) { return Collections.emptyList(); } return new IntArrayAsList(array, start + fromIndex, start + toIndex); } @Override public boolean equals(Object object) { if (object == this) { return true; } if (object instanceof IntArrayAsList) { IntArrayAsList that = (IntArrayAsList) object; int size = size(); if (that.size() != size) { return false; } for (int i = 0; i < size; i++) { if (array[start + i] != that.array[that.start + i]) { return false; } } return true; } return super.equals(object); } @Override public int hashCode() { int result = 1; for (int i = start; i < end; i++) { result = 31 * result + Ints.hashCode(array[i]); } return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(size() * 5); builder.append('[').append(array[start]); for (int i = start + 1; i < end; i++) { builder.append(", ").append(array[i]); } return builder.append(']').toString(); } int[] toIntArray() { // Arrays.copyOfRange() is not available under GWT int size = size(); int[] result = new int[size]; System.arraycopy(array, start, result, 0, size); return result; } private static final long serialVersionUID = 0; } private static final byte[] asciiDigits = new byte[128]; static { Arrays.fill(asciiDigits, (byte) -1); for (int i = 0; i <= 9; i++) { asciiDigits['0' + i] = (byte) i; } for (int i = 0; i <= 26; i++) { asciiDigits['A' + i] = (byte) (10 + i); asciiDigits['a' + i] = (byte) (10 + i); } } private static int digit(char c) { return (c < 128) ? asciiDigits[c] : -1; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import static org.apache.solr.core.CoreContainer.CORE_DISCOVERY_COMPLETE; import static org.apache.solr.core.CoreContainer.INITIAL_CORE_LOAD_COMPLETE; import static org.apache.solr.core.CoreContainer.LOAD_COMPLETE; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.core.StringContains.containsString; import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Properties; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; public class TestCoreDiscovery extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore(); } private final Path solrHomeDirectory = createTempDir(); private void setMeUp(String alternateCoreDir) throws Exception { String xmlStr = SOLR_XML; if (alternateCoreDir != null) { xmlStr = xmlStr.replace( "<solr>", "<solr> <str name=\"coreRootDirectory\">" + alternateCoreDir + "</str> "); } File tmpFile = new File(solrHomeDirectory.toFile(), SolrXmlConfig.SOLR_XML_FILE); FileUtils.write(tmpFile, xmlStr, IOUtils.UTF_8); } private void setMeUp() throws Exception { setMeUp(null); } private Properties makeCoreProperties( String name, boolean isTransient, boolean loadOnStartup, String... extraProps) { Properties props = new Properties(); props.put(CoreDescriptor.CORE_NAME, name); props.put(CoreDescriptor.CORE_SCHEMA, "schema-tiny.xml"); props.put(CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml"); props.put(CoreDescriptor.CORE_TRANSIENT, Boolean.toString(isTransient)); props.put(CoreDescriptor.CORE_LOADONSTARTUP, Boolean.toString(loadOnStartup)); props.put(CoreDescriptor.CORE_DATADIR, "${core.dataDir:stuffandnonsense}"); for (String extra : extraProps) { String[] parts = extra.split("="); props.put(parts[0], parts[1]); } return props; } private void addCoreWithProps(Properties stockProps, File propFile) throws Exception { if (!propFile.getParentFile().exists()) propFile.getParentFile().mkdirs(); Writer out = new OutputStreamWriter(new FileOutputStream(propFile), StandardCharsets.UTF_8); try { stockProps.store(out, null); } finally { out.close(); } addConfFiles(new File(propFile.getParent(), "conf")); } private void addCoreWithProps(String name, Properties stockProps) throws Exception { File propFile = new File( new File(solrHomeDirectory.toFile(), name), CorePropertiesLocator.PROPERTIES_FILENAME); File parent = propFile.getParentFile(); assertTrue("Failed to mkdirs for " + parent.getAbsolutePath(), parent.mkdirs()); addCoreWithProps(stockProps, propFile); } private void addConfFiles(File confDir) throws Exception { String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; assertTrue("Failed to mkdirs for " + confDir.getAbsolutePath(), confDir.mkdirs()); FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(confDir, "schema-tiny.xml")); FileUtils.copyFile( new File(top, "solrconfig-minimal.xml"), new File(confDir, "solrconfig-minimal.xml")); FileUtils.copyFile( new File(top, "solrconfig.snippet.randomindexconfig.xml"), new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); } private CoreContainer init() { final CoreContainer container = new CoreContainer(solrHomeDirectory, new Properties()); try { container.load(); } catch (Exception e) { container.shutdown(); throw e; } long status = container.getStatus(); assertTrue("Load complete flag should be set", (status & LOAD_COMPLETE) == LOAD_COMPLETE); assertTrue( "Core discovery should be complete", (status & CORE_DISCOVERY_COMPLETE) == CORE_DISCOVERY_COMPLETE); assertTrue( "Initial core loading should be complete", (status & INITIAL_CORE_LOAD_COMPLETE) == INITIAL_CORE_LOAD_COMPLETE); return container; } @After public void after() throws Exception {} // Test the basic setup, create some dirs with core.properties files in them, but solr.xml has // discoverCores set and insure that we find all the cores and can load them. @Test @SuppressWarnings({"try"}) public void testDiscovery() throws Exception { setMeUp(); // name, isLazy, loadOnStartup addCoreWithProps("core1", makeCoreProperties("core1", false, true, "dataDir=core1")); addCoreWithProps("core2", makeCoreProperties("core2", false, false, "dataDir=core2")); // I suspect what we're adding in here is a "configset" rather than a schema or solrconfig. // addCoreWithProps("lazy1", makeCoreProperties("lazy1", true, false, "dataDir=lazy1")); CoreContainer cc = init(); try { TestLazyCores.checkLoadedCores(cc, "core1"); TestLazyCores.checkCoresNotLoaded(cc, "lazy1", "core2"); // force loading of core2 and lazy1 by getting them from the CoreContainer try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2"); SolrCore lazy1 = cc.getCore("lazy1")) { // Let's assert we did the right thing for implicit properties too. CoreDescriptor desc = core1.getCoreDescriptor(); assertEquals("core1", desc.getName()); // This is too long and ugly to put in. Besides, it varies. assertNotNull(desc.getInstanceDir()); assertEquals("core1", desc.getDataDir()); assertEquals("solrconfig-minimal.xml", desc.getConfigName()); assertEquals("schema-tiny.xml", desc.getSchemaName()); TestLazyCores.checkLoadedCores(cc, "core1", "core2", "lazy1"); // Can we persist an existing core's properties? // Insure we can persist a new properties file if we want. CoreDescriptor cd1 = core1.getCoreDescriptor(); Properties persistable = cd1.getPersistableUserProperties(); persistable.setProperty("bogusprop", "bogusval"); cc.getCoresLocator().persist(cc, cd1); File propFile = new File( new File(solrHomeDirectory.toFile(), "core1"), CorePropertiesLocator.PROPERTIES_FILENAME); Properties newProps = new Properties(); try (InputStreamReader is = new InputStreamReader(new FileInputStream(propFile), StandardCharsets.UTF_8)) { newProps.load(is); } // is it there? assertEquals( "Should have persisted bogusprop to disk", "bogusval", newProps.getProperty("bogusprop")); // is it in the user properties? CorePropertiesLocator cpl = new CorePropertiesLocator(solrHomeDirectory); List<CoreDescriptor> cores = cpl.discover(cc); boolean found = false; for (CoreDescriptor cd : cores) { if (cd.getName().equals("core1")) { found = true; assertEquals( "Should have persisted bogusprop to disk in user properties", "bogusval", cd.getPersistableUserProperties().getProperty("bogusprop")); break; } } assertTrue("Should have found core descriptor for core1", found); } } finally { cc.shutdown(); } } @Test public void testPropFilePersistence() throws Exception { setMeUp(); // Test that an existing core.properties file is _not_ deleted if the core fails to load. Properties badProps = makeCoreProperties("corep1", false, true); badProps.setProperty(CoreDescriptor.CORE_SCHEMA, "not-there.xml"); addCoreWithProps("corep1", badProps); // Sanity check that a core did get loaded addCoreWithProps("corep2", makeCoreProperties("corep2", false, true)); Path coreP1PropFile = Paths.get(solrHomeDirectory.toString(), "corep1", "core.properties"); assertTrue( "Core.properties file should exist for before core load failure core corep1", Files.exists(coreP1PropFile)); CoreContainer cc = init(); try { Exception thrown = expectThrows(SolrCoreInitializationException.class, () -> cc.getCore("corep1")); assertTrue(thrown.getMessage().contains("init failure")); try (SolrCore sc = cc.getCore("corep2")) { assertNotNull("Core corep2 should be loaded", sc); } assertTrue( "Core.properties file should still exist for core corep1", Files.exists(coreP1PropFile)); // Creating a core successfully should create a core.properties file Path corePropFile = Paths.get(solrHomeDirectory.toString(), "corep3", "core.properties"); assertFalse("Should not be a properties file yet", Files.exists(corePropFile)); cc.create("corep3", ImmutableMap.of("configSet", "minimal")); assertTrue("Should be a properties file for newly created core", Files.exists(corePropFile)); // Failing to create a core should _not_ leave a core.properties file hanging around. corePropFile = Paths.get(solrHomeDirectory.toString(), "corep4", "core.properties"); assertFalse("Should not be a properties file yet for corep4", Files.exists(corePropFile)); thrown = expectThrows( SolrException.class, () -> { cc.create( "corep4", ImmutableMap.of( CoreDescriptor.CORE_NAME, "corep4", CoreDescriptor.CORE_SCHEMA, "not-there.xml", CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml", CoreDescriptor.CORE_TRANSIENT, "false", CoreDescriptor.CORE_LOADONSTARTUP, "true")); }); assertTrue(thrown.getMessage().contains("Can't find resource")); assertFalse( "Failed corep4 should not have left a core.properties file around", Files.exists(corePropFile)); // Finally, just for yucks, let's determine that a this create path also leaves a prop file. corePropFile = Paths.get(solrHomeDirectory.toString(), "corep5", "core.properties"); assertFalse("Should not be a properties file yet for corep5", Files.exists(corePropFile)); cc.create("corep5", ImmutableMap.of("configSet", "minimal")); assertTrue( "corep5 should have left a core.properties file on disk", Files.exists(corePropFile)); } finally { cc.shutdown(); } } // Insure that if the number of transient cores that are loaded on startup is greater than the // cache size that Solr "does the right thing". Which means // 1> stop loading cores after transient cache size is reached, in this case that magic number is // 3 one non-transient and two transient. // 2> still loads cores as time passes. // // This seems like a silly test, but it hangs forever on 4.10 so let's guard against it in future. // The behavior has gone away with the removal of the complexity around the old-style solr.xml // files. // // NOTE: The order that cores are loaded depends upon how the core discovery is traversed. I don't // think we can make the test depend on that order, so after load just insure that the cores // counts are correct. @Test public void testTooManyTransientCores() throws Exception { setMeUp(); // name, isLazy, loadOnStartup addCoreWithProps("coreLOS", makeCoreProperties("coreLOS", false, true, "dataDir=coreLOS")); addCoreWithProps("coreT1", makeCoreProperties("coreT1", true, true, "dataDir=coreT1")); addCoreWithProps("coreT2", makeCoreProperties("coreT2", true, true, "dataDir=coreT2")); addCoreWithProps("coreT3", makeCoreProperties("coreT3", true, true, "dataDir=coreT3")); addCoreWithProps("coreT4", makeCoreProperties("coreT4", true, true, "dataDir=coreT4")); addCoreWithProps("coreT5", makeCoreProperties("coreT5", true, true, "dataDir=coreT5")); addCoreWithProps("coreT6", makeCoreProperties("coreT6", true, true, "dataDir=coreT6")); // Do this specially since we need to search. final CoreContainer cc = new CoreContainer(solrHomeDirectory, new Properties()); try { cc.load(); // Just check that the proper number of cores are loaded since making the test depend on order // would be fragile assertEquals( "There should only be 3 cores loaded, coreLOS and two coreT? cores", 3, cc.getLoadedCoreNames().size()); SolrCore c1 = cc.getCore("coreT1"); assertNotNull("Core T1 should NOT BE NULL", c1); SolrCore c2 = cc.getCore("coreT2"); assertNotNull("Core T2 should NOT BE NULL", c2); SolrCore c3 = cc.getCore("coreT3"); assertNotNull("Core T3 should NOT BE NULL", c3); SolrCore c4 = cc.getCore("coreT4"); assertNotNull("Core T4 should NOT BE NULL", c4); SolrCore c5 = cc.getCore("coreT5"); assertNotNull("Core T5 should NOT BE NULL", c5); SolrCore c6 = cc.getCore("coreT6"); assertNotNull("Core T6 should NOT BE NULL", c6); c1.close(); c2.close(); c3.close(); c4.close(); c5.close(); c6.close(); } finally { cc.shutdown(); } } @Test public void testDuplicateNames() throws Exception { setMeUp(); // name, isLazy, loadOnStartup addCoreWithProps("core1", makeCoreProperties("core1", false, true)); addCoreWithProps("core2", makeCoreProperties("core2", false, false, "name=core1")); SolrException thrown = expectThrows( SolrException.class, () -> { CoreContainer cc = null; try { cc = init(); } finally { if (cc != null) cc.shutdown(); } }); final String message = thrown.getMessage(); assertTrue( "Wrong exception thrown on duplicate core names", message.indexOf("Found multiple cores with the name [core1]") != -1); assertTrue( File.separator + "core1 should have been mentioned in the message: " + message, message.indexOf(File.separator + "core1") != -1); assertTrue( File.separator + "core2 should have been mentioned in the message:" + message, message.indexOf(File.separator + "core2") != -1); } @Test public void testAlternateCoreDir() throws Exception { File alt = createTempDir().toFile(); setMeUp(alt.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true, "dataDir=core1"), new File(alt, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); addCoreWithProps( makeCoreProperties("core2", false, false, "dataDir=core2"), new File(alt, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); } finally { cc.shutdown(); } } @Test public void testAlternateRelativeCoreDir() throws Exception { String relative = "relativeCoreDir"; setMeUp(relative); // two cores under the relative directory addCoreWithProps( makeCoreProperties("core1", false, true, "dataDir=core1"), solrHomeDirectory .resolve(relative) .resolve("core1") .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) .toFile()); addCoreWithProps( makeCoreProperties("core2", false, false, "dataDir=core2"), solrHomeDirectory .resolve(relative) .resolve("core2") .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) .toFile()); // one core *not* under the relative directory addCoreWithProps( makeCoreProperties("core0", false, true, "datadir=core0"), solrHomeDirectory .resolve("core0") .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) .toFile()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); assertNull(cc.getCore("core0")); SolrCore core3 = cc.create("core3", ImmutableMap.of("configSet", "minimal")); assertThat(core3.getCoreDescriptor().getInstanceDir().toString(), containsString("relative")); } finally { cc.shutdown(); } } @Test public void testNoCoreDir() throws Exception { File noCoreDir = createTempDir().toFile(); setMeUp(noCoreDir.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true), new File(noCoreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); addCoreWithProps( makeCoreProperties("core2", false, false), new File(noCoreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); } finally { cc.shutdown(); } } @Test public void testCoreDirCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); // Insure that another core is opened successfully addCoreWithProps( makeCoreProperties("core2", false, false, "dataDir=core2"), new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(coreDir, "core1"); assumeTrue( "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", toSet.canRead()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2")) { assertNull(core1); assertNotNull(core2); } finally { cc.shutdown(); } // So things can be cleaned up by the framework! toSet.setReadable(true, false); } @Test public void testNonCoreDirCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); addCoreWithProps( makeCoreProperties("core2", false, false, "dataDir=core2"), new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(solrHomeDirectory.toFile(), "cantReadDir"); assertTrue( "Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", toSet.mkdirs()); assumeTrue( "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", toSet.canRead()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); SolrCore core2 = cc.getCore("core2")) { // Should be able to open the perfectly valid core1 despite a non-readable directory assertNotNull(core1); assertNotNull(core2); } finally { cc.shutdown(); } // So things can be cleaned up by the framework! toSet.setReadable(true, false); } @Test public void testFileCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(solrHomeDirectory.toFile(), "cantReadFile"); assertTrue( "Should have been able to make file '" + toSet.getAbsolutePath() + "' ", toSet.createNewFile()); assumeTrue( "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1")) { assertNotNull(core1); // Should still be able to create core despite r/o file. } finally { cc.shutdown(); } // So things can be cleaned up by the framework! toSet.setReadable(true, false); } @Test public void testSolrHomeDoesntExist() throws Exception { File homeDir = solrHomeDirectory.toFile(); IOUtils.rm(homeDir.toPath()); CoreContainer cc = null; try { cc = init(); } catch (SolrException ex) { assertTrue( "Core init doesn't report if solr home directory doesn't exist " + ex.getMessage(), ex.getMessage().contains("Error reading core root directory")); } finally { if (cc != null) { cc.shutdown(); } } } @Test public void testSolrHomeNotReadable() throws Exception { File homeDir = solrHomeDirectory.toFile(); setMeUp(homeDir.getAbsolutePath()); addCoreWithProps( makeCoreProperties("core1", false, true), new File(homeDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); assumeTrue( "Cannot make " + homeDir + " non-readable. Test aborted.", homeDir.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", homeDir.canRead()); Exception thrown = expectThrows( Exception.class, () -> { CoreContainer cc = null; try { cc = init(); } finally { if (cc != null) cc.shutdown(); } }); assertThat(thrown.getMessage(), containsString("Error reading core root directory")); // So things can be cleaned up by the framework! homeDir.setReadable(true, false); } // For testing whether finding a solr.xml overrides looking at solr.properties private static final String SOLR_XML = "<solr> " + "<int name=\"transientCacheSize\">2</int> " + "<str name=\"configSetBaseDir\">" + Paths.get(TEST_HOME()).resolve("configsets").toString() + "</str>" + "<solrcloud> " + "<str name=\"hostContext\">solrprop</str> " + "<int name=\"zkClientTimeout\">20</int> " + "<str name=\"host\">222.333.444.555</str> " + "<int name=\"hostPort\">6000</int> " + "</solrcloud> " + "</solr>"; @Test public void testRootDirectoryResolution() { NodeConfig config = SolrXmlConfig.fromString( solrHomeDirectory, "<solr><str name=\"coreRootDirectory\">relative</str></solr>"); assertThat( config.getCoreRootDirectory().toString(), containsString(solrHomeDirectory.toAbsolutePath().toString())); NodeConfig absConfig = SolrXmlConfig.fromString( solrHomeDirectory, "<solr><str name=\"coreRootDirectory\">/absolute</str></solr>"); assertThat( absConfig.getCoreRootDirectory().toString(), not(containsString(solrHomeDirectory.toAbsolutePath().toString()))); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.io.orc; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.orc.CompressionKind; import org.apache.orc.TypeDescription; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.IOConstants; import org.apache.hadoop.hive.ql.io.RecordUpdater; import org.apache.hadoop.hive.ql.io.StatsProvidingRecordWriter; import org.apache.hadoop.hive.ql.io.orc.OrcSerde.OrcSerdeRow; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.Progressable; /** * A Hive OutputFormat for ORC files. */ public class OrcOutputFormat extends FileOutputFormat<NullWritable, OrcSerdeRow> implements AcidOutputFormat<NullWritable, OrcSerdeRow> { private static final Logger LOG = LoggerFactory.getLogger(OrcOutputFormat.class); private static class OrcRecordWriter implements RecordWriter<NullWritable, OrcSerdeRow>, StatsProvidingRecordWriter { private Writer writer = null; private final Path path; private final OrcFile.WriterOptions options; private final SerDeStats stats; OrcRecordWriter(Path path, OrcFile.WriterOptions options) { this.path = path; this.options = options; this.stats = new SerDeStats(); } @Override public void write(NullWritable nullWritable, OrcSerdeRow row) throws IOException { if (writer == null) { init(row); } writer.addRow(row.getRow()); } @Override public void write(Writable row) throws IOException { OrcSerdeRow serdeRow = (OrcSerdeRow) row; if (writer == null) { init(serdeRow); } writer.addRow(serdeRow.getRow()); } @Override public void close(Reporter reporter) throws IOException { close(true); } @Override public void close(boolean b) throws IOException { if (writer == null) { // we are closing a file without writing any data in it FileSystem fs = options.getFileSystem() == null ? path.getFileSystem(options.getConfiguration()) : options.getFileSystem(); fs.createNewFile(path); return; } writer.close(); } @Override public SerDeStats getStats() { stats.setRawDataSize(null == writer ? 0 : writer.getRawDataSize()); stats.setRowCount(null == writer ? 0 : writer.getNumberOfRows()); return stats; } private void init(OrcSerdeRow serdeRow) throws IOException { options.inspector(serdeRow.getInspector()); writer = OrcFile.createWriter(path, options); if (options.isCompaction()) { AcidUtils.OrcAcidVersion.setAcidVersionInDataFile(writer); } } } private OrcFile.WriterOptions getOptions(JobConf conf, Properties props) { OrcFile.WriterOptions result = OrcFile.writerOptions(props, conf); if (props != null) { final String columnNameProperty = props.getProperty(IOConstants.COLUMNS); final String columnTypeProperty = props.getProperty(IOConstants.COLUMNS_TYPES); if (columnNameProperty != null && !columnNameProperty.isEmpty() && columnTypeProperty != null && !columnTypeProperty.isEmpty()) { List<String> columnNames; List<TypeInfo> columnTypes; final String columnNameDelimiter = props.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? props .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); if (columnNameProperty.length() == 0) { columnNames = new ArrayList<String>(); } else { columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList<TypeInfo>(); } else { columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); } TypeDescription schema = TypeDescription.createStruct(); for (int i = 0; i < columnNames.size(); ++i) { schema.addField(columnNames.get(i), OrcInputFormat.convertTypeInfo(columnTypes.get(i))); } if (LOG.isDebugEnabled()) { LOG.debug("ORC schema = " + schema); } result.setSchema(schema); } } return result; } @Override public RecordWriter<NullWritable, OrcSerdeRow> getRecordWriter(FileSystem fileSystem, JobConf conf, String name, Progressable reporter) throws IOException { return new OrcRecordWriter(new Path(name), getOptions(conf, null)); } @Override public StatsProvidingRecordWriter getHiveRecordWriter(JobConf conf, Path path, Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties, Progressable reporter) throws IOException { return new OrcRecordWriter(path, getOptions(conf, tableProperties)); } private class DummyOrcRecordUpdater implements RecordUpdater { private final Path path; private final ObjectInspector inspector; private final PrintStream out; private DummyOrcRecordUpdater(Path path, Options options) { this.path = path; this.inspector = options.getInspector(); this.out = options.getDummyStream(); } @Override public void insert(long currentWriteId, Object row) throws IOException { out.println("insert " + path + " currWriteId: " + currentWriteId + " obj: " + stringifyObject(row, inspector)); } @Override public void update(long currentWriteId, Object row) throws IOException { out.println("update " + path + " currWriteId: " + currentWriteId + " obj: " + stringifyObject(row, inspector)); } @Override public void delete(long currentWriteId, Object row) throws IOException { out.println("delete " + path + " currWriteId: " + currentWriteId + " obj: " + row); } @Override public void flush() throws IOException { out.println("flush " + path); } @Override public void close(boolean abort) throws IOException { out.println("close " + path); } @Override public SerDeStats getStats() { return null; } @Override public long getBufferedRowCount() { return 0; } private void stringifyObject(StringBuilder buffer, Object obj, ObjectInspector inspector ) throws IOException { if (inspector instanceof StructObjectInspector) { buffer.append("{ "); StructObjectInspector soi = (StructObjectInspector) inspector; boolean isFirst = true; for(StructField field: soi.getAllStructFieldRefs()) { if (isFirst) { isFirst = false; } else { buffer.append(", "); } buffer.append(field.getFieldName()); buffer.append(": "); stringifyObject(buffer, soi.getStructFieldData(obj, field), field.getFieldObjectInspector()); } buffer.append(" }"); } else if (inspector instanceof PrimitiveObjectInspector) { PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector; buffer.append(poi.getPrimitiveJavaObject(obj).toString()); } else { buffer.append("*unknown*"); } } private String stringifyObject(Object obj, ObjectInspector inspector ) throws IOException { StringBuilder buffer = new StringBuilder(); stringifyObject(buffer, obj, inspector); return buffer.toString(); } @Override public Path getUpdatedFilePath() { return null; } } @Override public RecordUpdater getRecordUpdater(Path path, Options options) throws IOException { if (options.getDummyStream() != null) { return new DummyOrcRecordUpdater(path, options); } else { return new OrcRecordUpdater(path, options); } } @Override public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getRawRecordWriter(Path path, Options options) throws IOException { final Path filename = AcidUtils.createFilename(path, options); final OrcFile.WriterOptions opts = OrcFile.writerOptions(options.getTableProperties(), options.getConfiguration()); if (!options.isWritingBase()) { opts.bufferSize(OrcRecordUpdater.DELTA_BUFFER_SIZE) .stripeSize(OrcRecordUpdater.DELTA_STRIPE_SIZE) .blockPadding(false); if(!MetastoreConf.getBoolVar(options.getConfiguration(), MetastoreConf.ConfVars.COMPACTOR_MINOR_STATS_COMPRESSION)) { opts.compress(CompressionKind.NONE).rowIndexStride(0); } } final OrcRecordUpdater.KeyIndexBuilder watcher = new OrcRecordUpdater.KeyIndexBuilder("compactor"); opts.inspector(options.getInspector()) .callback(watcher); final Writer writer = OrcFile.createWriter(filename, opts); AcidUtils.OrcAcidVersion.setAcidVersionInDataFile(writer); return new org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter() { @Override public void write(Writable w) throws IOException { OrcStruct orc = (OrcStruct) w; watcher.addKey( ((IntWritable) orc.getFieldValue(OrcRecordUpdater.OPERATION)).get(), ((LongWritable) orc.getFieldValue(OrcRecordUpdater.ORIGINAL_WRITEID)).get(), ((IntWritable) orc.getFieldValue(OrcRecordUpdater.BUCKET)).get(), ((LongWritable) orc.getFieldValue(OrcRecordUpdater.ROW_ID)).get()); writer.addRow(w); } @Override public void close(boolean abort) throws IOException { writer.close(); } }; } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.bridge; import javax.annotation.Nullable; import java.util.concurrent.CopyOnWriteArraySet; import android.app.Activity; import android.content.Context; import android.content.ContextWrapper; import android.content.Intent; import android.os.Bundle; import android.view.LayoutInflater; import com.facebook.infer.annotation.Assertions; import com.facebook.react.bridge.queue.ReactQueueConfiguration; import com.facebook.react.bridge.queue.MessageQueueThread; /** * Abstract ContextWrapper for Android applicaiton or activity {@link Context} and * {@link CatalystInstance} */ public class ReactContext extends ContextWrapper { private final CopyOnWriteArraySet<LifecycleEventListener> mLifecycleEventListeners = new CopyOnWriteArraySet<>(); private final CopyOnWriteArraySet<ActivityEventListener> mActivityEventListeners = new CopyOnWriteArraySet<>(); private @Nullable CatalystInstance mCatalystInstance; private @Nullable LayoutInflater mInflater; private @Nullable MessageQueueThread mUiMessageQueueThread; private @Nullable MessageQueueThread mNativeModulesMessageQueueThread; private @Nullable MessageQueueThread mJSMessageQueueThread; private @Nullable NativeModuleCallExceptionHandler mNativeModuleCallExceptionHandler; private @Nullable Activity mCurrentActivity; public ReactContext(Context base) { super(base); } /** * Set and initialize CatalystInstance for this Context. This should be called exactly once. */ public void initializeWithInstance(CatalystInstance catalystInstance) { if (catalystInstance == null) { throw new IllegalArgumentException("CatalystInstance cannot be null."); } if (mCatalystInstance != null) { throw new IllegalStateException("ReactContext has been already initialized"); } mCatalystInstance = catalystInstance; ReactQueueConfiguration queueConfig = catalystInstance.getReactQueueConfiguration(); mUiMessageQueueThread = queueConfig.getUIQueueThread(); mNativeModulesMessageQueueThread = queueConfig.getNativeModulesQueueThread(); mJSMessageQueueThread = queueConfig.getJSQueueThread(); } public void setNativeModuleCallExceptionHandler( @Nullable NativeModuleCallExceptionHandler nativeModuleCallExceptionHandler) { mNativeModuleCallExceptionHandler = nativeModuleCallExceptionHandler; } // We override the following method so that views inflated with the inflater obtained from this // context return the ReactContext in #getContext(). The default implementation uses the base // context instead, so it couldn't be cast to ReactContext. // TODO: T7538796 Check requirement for Override of getSystemService ReactContext @Override public Object getSystemService(String name) { if (LAYOUT_INFLATER_SERVICE.equals(name)) { if (mInflater == null) { mInflater = LayoutInflater.from(getBaseContext()).cloneInContext(this); } return mInflater; } return getBaseContext().getSystemService(name); } /** * @return handle to the specified JS module for the CatalystInstance associated with this Context */ public <T extends JavaScriptModule> T getJSModule(Class<T> jsInterface) { if (mCatalystInstance == null) { throw new RuntimeException("Trying to invoke JS before CatalystInstance has been set!"); } return mCatalystInstance.getJSModule(jsInterface); } public <T extends JavaScriptModule> T getJSModule(ExecutorToken executorToken, Class<T> jsInterface) { if (mCatalystInstance == null) { throw new RuntimeException("Trying to invoke JS before CatalystInstance has been set!"); } return mCatalystInstance.getJSModule(executorToken, jsInterface); } /** * @return the instance of the specified module interface associated with this ReactContext. */ public <T extends NativeModule> T getNativeModule(Class<T> nativeModuleInterface) { if (mCatalystInstance == null) { throw new RuntimeException("Trying to invoke JS before CatalystInstance has been set!"); } return mCatalystInstance.getNativeModule(nativeModuleInterface); } public CatalystInstance getCatalystInstance() { return Assertions.assertNotNull(mCatalystInstance); } public boolean hasActiveCatalystInstance() { return mCatalystInstance != null && !mCatalystInstance.isDestroyed(); } public void addLifecycleEventListener(LifecycleEventListener listener) { mLifecycleEventListeners.add(listener); } public void removeLifecycleEventListener(LifecycleEventListener listener) { mLifecycleEventListeners.remove(listener); } public void addActivityEventListener(ActivityEventListener listener) { mActivityEventListeners.add(listener); } public void removeActivityEventListener(ActivityEventListener listener) { mActivityEventListeners.remove(listener); } /** * Should be called by the hosting Fragment in {@link Fragment#onResume} */ public void onHostResume(@Nullable Activity activity) { UiThreadUtil.assertOnUiThread(); mCurrentActivity = activity; for (LifecycleEventListener listener : mLifecycleEventListeners) { listener.onHostResume(); } } /** * Should be called by the hosting Fragment in {@link Fragment#onPause} */ public void onHostPause() { UiThreadUtil.assertOnUiThread(); for (LifecycleEventListener listener : mLifecycleEventListeners) { listener.onHostPause(); } mCurrentActivity = null; } /** * Should be called by the hosting Fragment in {@link Fragment#onDestroy} */ public void onHostDestroy() { UiThreadUtil.assertOnUiThread(); for (LifecycleEventListener listener : mLifecycleEventListeners) { listener.onHostDestroy(); } } /** * Destroy this instance, making it unusable. */ public void destroy() { UiThreadUtil.assertOnUiThread(); if (mCatalystInstance != null) { mCatalystInstance.destroy(); } } /** * Should be called by the hosting Fragment in {@link Fragment#onActivityResult} */ public void onActivityResult(int requestCode, int resultCode, Intent data) { for (ActivityEventListener listener : mActivityEventListeners) { listener.onActivityResult(requestCode, resultCode, data); } } public void assertOnUiQueueThread() { Assertions.assertNotNull(mUiMessageQueueThread).assertIsOnThread(); } public boolean isOnUiQueueThread() { return Assertions.assertNotNull(mUiMessageQueueThread).isOnThread(); } public void runOnUiQueueThread(Runnable runnable) { Assertions.assertNotNull(mUiMessageQueueThread).runOnQueue(runnable); } public void assertOnNativeModulesQueueThread() { Assertions.assertNotNull(mNativeModulesMessageQueueThread).assertIsOnThread(); } public boolean isOnNativeModulesQueueThread() { return Assertions.assertNotNull(mNativeModulesMessageQueueThread).isOnThread(); } public void runOnNativeModulesQueueThread(Runnable runnable) { Assertions.assertNotNull(mNativeModulesMessageQueueThread).runOnQueue(runnable); } public void assertOnJSQueueThread() { Assertions.assertNotNull(mJSMessageQueueThread).assertIsOnThread(); } public boolean isOnJSQueueThread() { return Assertions.assertNotNull(mJSMessageQueueThread).isOnThread(); } public void runOnJSQueueThread(Runnable runnable) { Assertions.assertNotNull(mJSMessageQueueThread).runOnQueue(runnable); } /** * Passes the given exception to the current * {@link com.facebook.react.bridge.NativeModuleCallExceptionHandler} if one exists, rethrowing * otherwise. */ public void handleException(RuntimeException e) { if (mCatalystInstance != null && !mCatalystInstance.isDestroyed() && mNativeModuleCallExceptionHandler != null) { mNativeModuleCallExceptionHandler.handleException(e); } else { throw e; } } public boolean hasCurrentActivity() { return mCurrentActivity != null; } /** * Same as {@link Activity#startActivityForResult(Intent, int)}, this just redirects the call to * the current activity. Returns whether the activity was started, as this might fail if this * was called before the context is in the right state. */ public boolean startActivityForResult(Intent intent, int code, Bundle bundle) { Assertions.assertNotNull(mCurrentActivity); mCurrentActivity.startActivityForResult(intent, code, bundle); return true; } /** * Get the activity to which this context is currently attached, or {@code null} if not attached. * DO NOT HOLD LONG-LIVED REFERENCES TO THE OBJECT RETURNED BY THIS METHOD, AS THIS WILL CAUSE * MEMORY LEAKS. */ /* package */ @Nullable Activity getCurrentActivity() { return mCurrentActivity; } }
/* * Orika - simpler, better and faster Java bean mapping * * Copyright (C) 2011-2013 Orika authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ma.glasnost.orika.test.converter; import java.math.BigDecimal; import java.math.BigInteger; import ma.glasnost.orika.MapperFacade; import ma.glasnost.orika.MapperFactory; import ma.glasnost.orika.MappingException; import ma.glasnost.orika.converter.builtin.NumericConverters.BigDecimalToDoubleConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.BigDecimalToFloatConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.BigIntegerToIntegerConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.BigIntegerToLongConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.DoubleToIntegerConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.DoubleToLongConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.DoubleToShortConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.FloatToIntegerConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.FloatToLongConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.FloatToShortConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.IntegerToShortConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.LongToIntegerConverter; import ma.glasnost.orika.converter.builtin.NumericConverters.LongToShortConverter; import ma.glasnost.orika.test.MappingUtil; import org.junit.Assert; import org.junit.Test; /** * DateAndTimeConverters provides a set of individual converters for conversion * between the below listed enumeration of commonly used data/time * representations: * <ul> * <li>java.util.Date * <li>java.util.Calendar * <li>java.lang.Long or long * <li>javax.xml.datatype.XMLGregorianCalendar * </ul> * * @author matt.deboer@gmail.com * */ public class NumericConvertersTestCase { private static final double DELTA = 0.000000001; @Test public void testBigDecimalToDoubleConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigDecimalToDoubleConverter()); MapperFacade mapper = factory.getMapperFacade(); BigDecimal bd = new BigDecimal("5423.51478"); Double db = mapper.map(bd, Double.class); Assert.assertEquals(bd.doubleValue(), db.doubleValue(), 0.00001d); BigDecimal reverse = mapper.map(db, BigDecimal.class); Assert.assertEquals(bd.doubleValue(), reverse.doubleValue(), 0.00001d); } @Test public void testBigDecimalToFloatConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigDecimalToFloatConverter()); MapperFacade mapper = factory.getMapperFacade(); BigDecimal bd = new BigDecimal("5423.51"); Float ft = mapper.map(bd, Float.class); Assert.assertEquals(bd.floatValue(), ft.floatValue(), 0.01d); BigDecimal reverse = mapper.map(ft, BigDecimal.class); Assert.assertEquals(bd.doubleValue(), reverse.doubleValue(), 0.01d); } @Test public void testBigIntegerToLongConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigIntegerToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); BigInteger bi = new BigInteger("" + Long.MAX_VALUE); Long lg = mapper.map(bi, Long.class); Assert.assertEquals(bi.longValue(), lg.longValue()); BigInteger reverse = mapper.map(lg, BigInteger.class); Assert.assertEquals(bi.longValue(), reverse.longValue()); } @Test public void testBigIntegerToIntegerConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigIntegerToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); BigInteger bi = new BigInteger("" + Integer.MAX_VALUE); Integer i = mapper.map(bi, Integer.class); Assert.assertEquals(bi.longValue(), i.longValue()); BigInteger reverse = mapper.map(i, BigInteger.class); Assert.assertEquals(bi.longValue(), reverse.longValue()); } @Test(expected = MappingException.class) public void testBigIntegerToLongConverter_Overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigIntegerToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); BigInteger bi = new BigInteger("1" + Long.MAX_VALUE); Long lg = mapper.map(bi, Long.class); Assert.assertEquals(bi.longValue(), lg.longValue()); BigInteger reverse = mapper.map(lg, BigInteger.class); Assert.assertEquals(bi.longValue(), reverse.longValue()); } @Test(expected = MappingException.class) public void testBigIntegerToIntegerConverter_Overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new BigIntegerToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); BigInteger bi = new BigInteger("1" + Long.MAX_VALUE); Integer i = mapper.map(bi, Integer.class); Assert.assertEquals(bi.longValue(), i.longValue()); BigInteger reverse = mapper.map(i, BigInteger.class); Assert.assertEquals(bi.longValue(), reverse.longValue()); } @Test public void testLongToShortConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new LongToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Long value = (long) Short.MAX_VALUE; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.longValue(), result.longValue()); Long reverse = mapper.map(result, Long.class); Assert.assertEquals(result.longValue(), reverse.longValue()); } @Test public void testLongToIntegerConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new LongToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Long value = (long) Integer.MAX_VALUE; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.longValue(), result.longValue()); Long reverse = mapper.map(result, Long.class); Assert.assertEquals(result.longValue(), reverse.longValue()); } @Test public void testIntegerToShortConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new IntegerToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Integer value = (int) Short.MAX_VALUE; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.intValue(), result.intValue()); Integer reverse = mapper.map(result, Integer.class); Assert.assertEquals(result.intValue(), reverse.intValue()); } @Test public void testDoubleToShortConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = (double) Short.MAX_VALUE; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test public void testDoubleToIntegerConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = (double) Integer.MAX_VALUE; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test public void testDoubleToLongConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = (double) Long.MAX_VALUE; Long result = mapper.map(value, Long.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test public void testFloatToShortConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (float) Short.MAX_VALUE; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } @Test public void testFloatToIntegerConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (float) Integer.MAX_VALUE; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } @Test public void testFloatToLongConverter() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (float) Long.MAX_VALUE; Long result = mapper.map(value, Long.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } // ~ overflow exceptions @Test(expected = MappingException.class) public void testLongToShortConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new LongToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Long value = (long) Short.MAX_VALUE + 1; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.longValue(), result.longValue()); Long reverse = mapper.map(result, Long.class); Assert.assertEquals(result.longValue(), reverse.longValue()); } @Test(expected = MappingException.class) public void testLongToIntegerConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new LongToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Long value = (long) Integer.MAX_VALUE + 1; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.longValue(), result.longValue()); Long reverse = mapper.map(result, Long.class); Assert.assertEquals(result.longValue(), reverse.longValue()); } @Test(expected = MappingException.class) public void testIntegerToShortConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new IntegerToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Integer value = Short.MAX_VALUE + 1; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.intValue(), result.intValue()); Integer reverse = mapper.map(result, Integer.class); Assert.assertEquals(result.intValue(), reverse.intValue()); } @Test(expected = MappingException.class) public void testDoubleToShortConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = (double) Short.MAX_VALUE + 1; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test(expected = MappingException.class) public void testDoubleToIntegerConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = (double) Integer.MAX_VALUE + 1; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test(expected = MappingException.class) public void testDoubleToLongConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new DoubleToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Double value = Long.MAX_VALUE + 10000.0; Long result = mapper.map(value, Long.class); Assert.assertEquals(value.doubleValue(), result.doubleValue(), DELTA); Double reverse = mapper.map(result, Double.class); Assert.assertEquals(result.doubleValue(), reverse.doubleValue(), DELTA); } @Test(expected = MappingException.class) public void testFloatToShortConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToShortConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (Short.MAX_VALUE) * 1.1f; Short result = mapper.map(value, Short.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } @Test(expected = MappingException.class) public void testFloatToIntegerConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToIntegerConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (Integer.MAX_VALUE) * 1.1f; Integer result = mapper.map(value, Integer.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } @Test(expected = MappingException.class) public void testFloatToLongConverter_overflow() { MapperFactory factory = MappingUtil.getMapperFactory(); factory.getConverterFactory().registerConverter(new FloatToLongConverter(false)); MapperFacade mapper = factory.getMapperFacade(); Float value = (Long.MAX_VALUE) * 1.1f; Long result = mapper.map(value, Long.class); Assert.assertEquals(value.floatValue(), result.floatValue(), DELTA); Float reverse = mapper.map(result, Float.class); Assert.assertEquals(result.floatValue(), reverse.floatValue(), DELTA); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.ipojo.test.scenarios.service.dependency; import java.util.Properties; import org.apache.felix.ipojo.ComponentInstance; import org.apache.felix.ipojo.architecture.Architecture; import org.apache.felix.ipojo.architecture.InstanceDescription; import org.apache.felix.ipojo.junit4osgi.OSGiTestCase; import org.apache.felix.ipojo.test.scenarios.service.dependency.service.CheckService; import org.apache.felix.ipojo.test.scenarios.util.Utils; import org.osgi.framework.ServiceReference; public class ProxiedDelayedOptionalMultipleDependencies extends OSGiTestCase { ComponentInstance instance1, instance2, instance3, instance4; ComponentInstance fooProvider1, fooProvider2; public void setUp() { try { Properties i1 = new Properties(); i1.put("instance.name","Simple"); instance1 = Utils.getFactoryByName(getContext(), "ProxiedSimpleOptionalMultipleCheckServiceProvider").createComponentInstance(i1); instance1.stop(); Properties i2 = new Properties(); i2.put("instance.name","Void"); instance2 = Utils.getFactoryByName(getContext(), "ProxiedVoidOptionalMultipleCheckServiceProvider").createComponentInstance(i2); instance2.stop(); Properties i3 = new Properties(); i3.put("instance.name","Object"); instance3 = Utils.getFactoryByName(getContext(), "ProxiedObjectOptionalMultipleCheckServiceProvider").createComponentInstance(i3); instance3.stop(); Properties i4 = new Properties(); i4.put("instance.name","Ref"); instance4 = Utils.getFactoryByName(getContext(), "ProxiedRefOptionalMultipleCheckServiceProvider").createComponentInstance(i4); instance4.stop(); Properties prov = new Properties(); prov.put("instance.name","FooProvider1"); fooProvider1 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov); Properties prov2 = new Properties(); prov2.put("instance.name","FooProvider2"); fooProvider2 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov2); } catch(Exception e) { fail(e.getMessage()); } } public void tearDown() { instance1.dispose(); instance2.dispose(); instance3.dispose(); instance4.dispose(); fooProvider1.dispose(); fooProvider2.dispose(); instance1 = null; instance2 = null; instance3 = null; instance4 = null; fooProvider1 = null; fooProvider2 = null; } public void testSimple() { instance1.start(); ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance1.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance1.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, it still one provider. assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 4", ((Boolean)props.get("result")).booleanValue()); // False, no more provider. assertEquals("check void bind invocation - 4", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 4", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 4", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 4", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 4", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 4", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 4", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 4", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 4", ((Double)props.get("double")).doubleValue(), 0.0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); instance1.stop(); } public void testVoid() { instance2.start(); ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance2.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance2.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 2); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 2); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 1); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 4", ((Boolean)props.get("result")).booleanValue()); // False : no provider assertEquals("check void bind invocation - 4", ((Integer)props.get("voidB")).intValue(), 2); assertEquals("check void unbind callback invocation - 4", ((Integer)props.get("voidU")).intValue(), 2); assertEquals("check object bind callback invocation - 4", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 4", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 4", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 4", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 4", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 4", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 4", ((Double)props.get("double")).doubleValue(), 0.0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); instance2.stop(); } public void testObject() { instance3.start(); ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance3.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance3.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 2); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 2); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 1); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); // False : no provider assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 2); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 2); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 0.0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); instance3.stop(); } public void testRef() { instance4.start(); ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance4.getInstanceName()); assertNotNull("Check architecture availability", arch_ref); InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.VALID); ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance4.getInstanceName()); assertNotNull("Check CheckService availability", cs_ref); CheckService cs = (CheckService) getContext().getService(cs_ref); Properties props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 2); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 2); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 2); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 2.0); fooProvider1.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 2); assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 1); assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1); assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1); assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0); fooProvider2.stop(); id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription(); assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID); cs = (CheckService) getContext().getService(cs_ref); props = cs.getProps(); //Check properties assertFalse("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); // False : no provider assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0); assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0); assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0); assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0); assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 2); assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 2); assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 0); assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 0); assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 0.0); id = null; cs = null; getContext().ungetService(arch_ref); getContext().ungetService(cs_ref); instance4.stop(); } }
/* * Copyright (c) 2012, JInterval Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * Redistributions in * binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package net.java.jinterval.p1788; import java.lang.reflect.Type; import java.math.BigInteger; import java.util.Arrays; import net.java.jinterval.interval.Decoration; import net.java.jinterval.interval.MidRad; import net.java.jinterval.interval.set.IntvlPartOfNaI; import net.java.jinterval.interval.set.MulRevPair; import net.java.jinterval.interval.set.SetInterval; import net.java.jinterval.interval.set.SetIntervalContext; import net.java.jinterval.interval.set.SetIntervalOps; import net.java.jinterval.rational.BinaryValueSet; import net.java.jinterval.rational.ExtendedRational; import net.java.jinterval.text2interval.gen.GenP1788; import net.java.jinterval.text2interval.gen.GenP1788Base; import org.bridj.Platform; /** * */ public enum Operation { empty(GenP1788.opEmpty) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.empty(); } }, entire(GenP1788.opEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.entire(); } }, nai(GenP1788.opNaI) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.nai(); } }, convertType(GenP1788.opConvertType) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.hull((SetInterval) a[0]); } }, numsToInterval(GenP1788.opNumsToInterval) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.numsToInterval((ExtendedRational) a[0], (ExtendedRational) a[1]); } }, numsDecToInterval(GenP1788.opNumsDecToInterval) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.numsDecToInterval((ExtendedRational) a[0], (ExtendedRational) a[1], (Decoration) a[2]); } }, textToInterval(GenP1788.opTextToInterval) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.textToInterval((String) a[0]); } }, textToDecoratedInterval(GenP1788.opTextToDecoratedInterval) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.textToDecoratedInterval((String) a[0]); } }, pos(GenP1788.opPos) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.hull((SetInterval) a[0]); } }, neg(GenP1788.opNeg) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.neg((SetInterval) a[0]); } }, add(GenP1788.opAdd) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.add((SetInterval) a[0], (SetInterval) a[1]); } }, sub(GenP1788.opSub) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sub((SetInterval) a[0], (SetInterval) a[1]); } }, mul(GenP1788.opMul) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.mul((SetInterval) a[0], (SetInterval) a[1]); } }, div(GenP1788.opDiv) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.div((SetInterval) a[0], (SetInterval) a[1]); } }, recip(GenP1788.opRecip) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.recip((SetInterval) a[0]); } }, sqr(GenP1788.opSqr) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sqr((SetInterval) a[0]); } }, sqrt(GenP1788.opSqrt) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sqrt((SetInterval) a[0]); } }, fma(GenP1788.opFma) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.fma((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, pown(GenP1788.opPown) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.pown((SetInterval) a[0], (BigInteger) a[1]); } }, pow(GenP1788.opPow) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.pow((SetInterval) a[0], (SetInterval) a[1]); } }, exp(GenP1788.opExp) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.exp((SetInterval) a[0]); } }, exp2(GenP1788.opExp2) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.exp2((SetInterval) a[0]); } }, exp10(GenP1788.opExp10) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.exp10((SetInterval) a[0]); } }, log(GenP1788.opLog) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.log((SetInterval) a[0]); } }, log2(GenP1788.opLog2) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.log2((SetInterval) a[0]); } }, log10(GenP1788.opLog10) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.log10((SetInterval) a[0]); } }, sin(GenP1788.opSin) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sin((SetInterval) a[0]); } }, cos(GenP1788.opCos) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cos((SetInterval) a[0]); } }, tan(GenP1788.opTan) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.tan((SetInterval) a[0]); } }, asin(GenP1788.opAsin) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.asin((SetInterval) a[0]); } }, acos(GenP1788.opAcos) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.acos((SetInterval) a[0]); } }, atan(GenP1788.opAtan) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan((SetInterval) a[0]); } }, atan2(GenP1788.opAtan2) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan2((SetInterval) a[0], (SetInterval) a[1]); } }, sinh(GenP1788.opSinh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sinh((SetInterval) a[0]); } }, cosh(GenP1788.opCosh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cosh((SetInterval) a[0]); } }, tanh(GenP1788.opTanh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.tanh((SetInterval) a[0]); } }, asinh(GenP1788.opAsinh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.asinh((SetInterval) a[0]); } }, acosh(GenP1788.opAcosh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.acosh((SetInterval) a[0]); } }, atanh(GenP1788.opAtanh) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atanh((SetInterval) a[0]); } }, sign(GenP1788.opSign) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sign((SetInterval) a[0]); } }, ceil(GenP1788.opCeil) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.ceil((SetInterval) a[0]); } }, floor(GenP1788.opFloor) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.floor((SetInterval) a[0]); } }, trunc(GenP1788.opTrunc) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.trunc((SetInterval) a[0]); } }, roundTiesToEven(GenP1788.opRoundTiesToEven) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.roundTiesToEven((SetInterval) a[0]); } }, roundTiesToAway(GenP1788.opRoundTiesToAway) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.roundTiesToAway((SetInterval) a[0]); } }, abs(GenP1788.opAbs) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.abs((SetInterval) a[0]); } }, min(GenP1788.opMin) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.min((SetInterval) a[0], (SetInterval) a[1]); } }, max(GenP1788.opMax) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.max((SetInterval) a[0], (SetInterval) a[1]); } }, mulRevToPair(GenP1788.opMulRevToPair) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; MulRevPair pair = ctx.mulRevToPair((SetInterval) a[0], (SetInterval) a[1]); r[0] = pair.first; r[1] = pair.second; } }, sqrRev(GenP1788.opSqrRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sqrRev((SetInterval) a[0], (SetInterval) a[1]); } }, sqrRevEntire(GenP1788.opSqrRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sqrRev((SetInterval) a[0], ENTIRE); } }, absRev(GenP1788.opAbsRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.absRev((SetInterval) a[0], (SetInterval) a[1]); } }, absRevEntire(GenP1788.opAbsRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.absRev((SetInterval) a[0], ENTIRE); } }, pownRev(GenP1788.opPownRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.pownRev((SetInterval) a[0], (SetInterval) a[1], (BigInteger) a[2]); } }, pownRevEntire(GenP1788.opPownRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.pownRev((SetInterval) a[0], ENTIRE, (BigInteger) a[1]); } }, sinRev(GenP1788.opSinRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sinRev((SetInterval) a[0], (SetInterval) a[1]); } }, sinRevEntire(GenP1788.opSinRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.sinRev((SetInterval) a[0], ENTIRE); } }, cosRev(GenP1788.opCosRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cosRev((SetInterval) a[0], (SetInterval) a[1]); } }, cosRevEntire(GenP1788.opCosRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cosRev((SetInterval) a[0], ENTIRE); } }, tanRev(GenP1788.opTanRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.tanRev((SetInterval) a[0], (SetInterval) a[1]); } }, tanRevEntire(GenP1788.opTanRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.tanRev((SetInterval) a[0], ENTIRE); } }, coshRev(GenP1788.opCoshRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.coshRev((SetInterval) a[0], (SetInterval) a[1]); } }, coshRevEntire(GenP1788.opCoshRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.coshRev((SetInterval) a[0], ENTIRE); } }, mulRev(GenP1788.opMulRev) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.mulRev((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, mulRevEntire(GenP1788.opMulRevEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.mulRev((SetInterval) a[0], (SetInterval) a[1], ENTIRE); } }, powRev1(GenP1788.opPowRev1) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.powRev1((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, powRev1Entire(GenP1788.opPowRev1Entire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.powRev1((SetInterval) a[0], (SetInterval) a[1], ENTIRE); } }, powRev2(GenP1788.opPowRev2) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.powRev2((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, powRev2Entire(GenP1788.opPowRev2Entire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.powRev2((SetInterval) a[0], (SetInterval) a[1], ENTIRE); } }, atan2Rev1(GenP1788.opAtan2Rev1) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan2Rev1((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, atan2Rev1Entire(GenP1788.opAtan2Rev1Entire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan2Rev1((SetInterval) a[0], (SetInterval) a[1], ENTIRE); } }, atan2Rev2(GenP1788.opAtan2Rev2) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan2Rev2((SetInterval) a[0], (SetInterval) a[1], (SetInterval) a[2]); } }, atan2Rev2Entire(GenP1788.opAtan2Rev2Entire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.atan2Rev2((SetInterval) a[0], (SetInterval) a[1], ENTIRE); } }, cancelMinus(GenP1788.opCancelMinus) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cancelMinus((SetInterval) a[0], (SetInterval) a[1]); } }, cancelPlus(GenP1788.opCancelPlus) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.cancelPlus((SetInterval) a[0], (SetInterval) a[1]); } }, intersection(GenP1788.opIntersection) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.intersection((SetInterval) a[0], (SetInterval) a[1]); } }, convexHull(GenP1788.opConvexHull) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.convexHull((SetInterval) a[0], (SetInterval) a[1]); } }, inf(GenP1788.opInf) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isNaI() ? null : x.inf(numberFormat); } }, sup(GenP1788.opSup) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isNaI() ? null : x.sup(numberFormat); } }, mid(GenP1788.opMid) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isEmpty() ? null : x.mid(numberFormat); } }, rad(GenP1788.opRad) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isEmpty() ? null : x.rad(numberFormat); } }, wid(GenP1788.opWid) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isEmpty() ? null : x.wid(numberFormat); } }, mag(GenP1788.opMag) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isEmpty() ? null : x.mag(numberFormat); } }, mig(GenP1788.opMig) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet numberFormat = (BinaryValueSet) contexts[0]; SetInterval x = (SetInterval) a[0]; r[0] = x.isEmpty() ? null : x.mig(numberFormat); } }, midRad(GenP1788.opMidRad) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet midNumberFormat = (BinaryValueSet) contexts[0]; BinaryValueSet radNumberFormat = (BinaryValueSet) contexts[1]; SetInterval x = (SetInterval) a[0]; if (x.isEmpty()) { r[0] = r[1] = null; } else { MidRad midRad = x.midRad(midNumberFormat, radNumberFormat); r[0] = midRad.mid; r[1] = midRad.rad; } } }, isEmpty(GenP1788.opIsEmpty) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; r[0] = !x.isNaI() && x.isEmpty(); } }, isEntire(GenP1788.opIsEntire) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; r[0] = !x.isNaI() && x.isEntire(); } }, isNaI(GenP1788.opIsNaI) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; r[0] = x.isNaI(); } }, isCommonInterval(GenP1788.opIsCommonInterval) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; r[0] = !x.isNaI() && x.isCommonInterval(); } }, isSingleton(GenP1788.opIsSingleton) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; r[0] = !x.isNaI() && x.isSingleton(); } }, isMember(GenP1788.opIsMember) { @Override void eval(Object[] contexts, Object[] r, Object... a) { ExtendedRational m = (ExtendedRational) a[0]; SetInterval x = (SetInterval) a[1]; r[0] = m != null && !x.isNaI() && x.isMember(m); } }, equal(GenP1788.opEqual) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.equal(y); } }, subset(GenP1788.opSubset) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.subset(y); } }, less(GenP1788.opLess) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.less(y); } }, precedes(GenP1788.opPrecedes) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.precedes(y); } }, interior(GenP1788.opInterior) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.interior(y); } }, strictLess(GenP1788.opStrictLess) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.strictLess(y); } }, strictPrecedes(GenP1788.opStrictPrecedes) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.strictPrecedes(y); } }, disjoint(GenP1788.opDisjoint) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() && x.disjoint(y); } }, overlap(GenP1788.opOverlap) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetInterval x = (SetInterval) a[0]; SetInterval y = (SetInterval) a[1]; r[0] = !x.isNaI() && !y.isNaI() ? x.overlap(y) : null; } }, newDec(GenP1788.opNewDec) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.newDec((SetInterval) a[0]); } }, intervalPart(GenP1788.opIntervalPart) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; SetInterval x = (SetInterval) a[0]; if (x.isNaI()) { throw new IntvlPartOfNaI(); } r[0] = ctx.newDec((SetInterval) a[0]); } }, decorationPart(GenP1788.opDecorationPart) { @Override void eval(Object[] contexts, Object[] r, Object... a) { r[0] = ((SetInterval) a[0]).getDecoration(); } }, setDec(GenP1788.opSetDec) { @Override void eval(Object[] contexts, Object[] r, Object... a) { SetIntervalContext ctx = (SetIntervalContext) contexts[0]; r[0] = ctx.setDec((SetInterval) a[0], (Decoration) a[1]); } }, intervalToExact(GenP1788.opIntervalToExact) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet valueSet = (BinaryValueSet) contexts[0]; r[0] = ((SetInterval) a[0]).intervalToExact(valueSet); } }, intervalToDecoratedExact(GenP1788.opIntervalToExact) { @Override void eval(Object[] contexts, Object[] r, Object... a) { BinaryValueSet valueSet = (BinaryValueSet) contexts[0]; r[0] = ((SetInterval) a[0]).intervalToExactDecorated(valueSet); } }; private final GenP1788Base.Operation oper; private final Type[] parameters; private Operation(GenP1788Base.Operation oper) { this.oper = oper; parameters = new Type[oper.results.length + oper.args.length]; Arrays.fill(parameters, Platform.is64Bits() ? long.class : int.class); } GenP1788Base.Operation getBaseOperation() { return oper; } Type[] getParameters() { return parameters; } abstract void eval(Object[] contexts, Object[] r, Object... a); private static SetInterval ENTIRE = SetIntervalOps.nums2(ExtendedRational.NEGATIVE_INFINITY, ExtendedRational.POSITIVE_INFINITY); }
/** * Copyright (C) 2009 Aisino Corporation Inc. * * No.18A, Xingshikou street, Haidian District,Beijing * All rights reserved. * * This software is the confidential and proprietary information of * Aisino Corporation Inc. ("Confidential Information"). You shall not * disclose such Confidential Information and shall use it only in * accordance with the terms of the license agreement you entered into * with Aisino. */ package org.amote.client; import java.io.IOException; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.HashMap; import java.util.Timer; import java.util.TimerTask; import org.amote.client.android.ScreenInfo; import org.amote.utils.SensorHub; import org.gmote.common.DataReceiverIF; import org.gmote.common.ISuccessResponse; import org.gmote.common.MulticastClient; import org.gmote.common.MulticastClient.ServerFoundHandler; import org.gmote.common.Protocol.Command; import org.gmote.common.Protocol.CommandEvent; import org.gmote.common.ServerInfo; import org.gmote.common.ServerOutOfDateException; import org.gmote.common.TcpConnection; import org.gmote.common.packet.AbstractPacket; import org.gmote.common.packet.CommandPacket; import org.gmote.common.packet.SensorStatePacket; import org.gmote.common.packet.SimplePacket; import org.gmote.common.security.AuthenticationException; import org.gmote.common.security.AuthenticationHandler; import android.content.Intent; import android.hardware.Sensor; import android.os.Handler; import android.os.Message; import android.util.Log; /** * Wrapper class responsible for finding and communicating with the server. * * @author Aisino * */ public class Remote implements DataReceiverIF { // Current version of the Gmote Client. We don't use the value that is in // the // manifest since its possible that we don't have access to this value (for // example, when the program crashes and gets restarted by android) public static final String GMOTE_CLIENT_VERSION = "2.0.2"; public static final String MINIMUM_SERVER_VERSION = "2.0.0"; // Response codes public static final int NORMAL = 0; public static final int CONNECTION_FAILURE = 1; public static final int CONNECTING = 2; public static final int CONNECTED = 6; public static final int SEARCHING = 3; public static final int AUTHENTICATION_FAILURE = 4; public static final int SERVER_LIST_ADD_SERVER = 5; public static final int SERVER_LIST_DONE = 6; public static final int SERVER_OUT_OF_DATE = 7; public static final int IP_LIST_SAVE = 8; public static final int LONG_PRESS_UP_CONFIRM = 9; public static final String FILE_NAME = "IP.txt"; /** heart beat Interval 10 min */ private static final int HEARTBEATTIME = 10*1000*60; // Timing constants public static final int MAX_ATTEMPTS = 3; // number of connection attempts // before report giving up public static final int TIMEOUT = 3000; // milliseconds server connection // timeout public static final int FINDSERVERS_TIMEOUT = 6500; // milliseconds private static final String DEBUG_TAG = "Remote"; private ServerInfo server = null; public String password = ""; private static Remote remote = new Remote(); private Handler callback; private TcpConnection con = null; private Thread worker = null; private BlockingQueue<AbstractPacket> packetQueue = new LinkedBlockingQueue<AbstractPacket>(200); InetAddress serverInetAddress = null; public Boolean flag = true; // add the heartbeat function by zhangdawei private Timer mTimer; private TimerTask mTimerTask; private Remote() { // Start a new thread that will send packets for us. worker = new Thread(new PacketSender()); worker.start(); // add by zhangdawei, in order to solve the disconnection after several minutes. mTimer = new Timer(true); mTimerTask = new TimerTask() { public void run() { if (remote.isConnected()) { remote.queuePacket(new SimplePacket(Command.BEATHEART)); System.out.println("###############BeatHeart Packet queued!"); } } }; mTimer.schedule(mTimerTask, 0, HEARTBEATTIME); System.out.println("BeatHeart Timer StarTing!"); // end add } private void setCallback(Handler callback) { this.callback = callback; } public static synchronized Remote getInstance(Handler handler) { remote.setCallback(handler); return remote; } public static Remote getInstance() { return remote; } public synchronized void setServer(ServerInfo serverInfo) { server = serverInfo; Log.d(DEBUG_TAG, "Gmote# set server to: " + server.getServer() + ":" + server.getPort()); try { if (serverInfo == null || serverInfo.getIp() == null) { serverInetAddress = null; } else { serverInetAddress = InetAddress.getByName(serverInfo.getIp()); } } catch (final UnknownHostException e) { Log.e(DEBUG_TAG, e.getMessage(), e); serverInetAddress = null; } disconnect(); } public InetAddress getServerInetAddress() { return serverInetAddress; } protected synchronized void disconnect() { if (con != null) { con.closeConnection(); con = null; } packetQueue.clear(); } public synchronized void setPassword(String newPassword) { password = newPassword; Log.d(DEBUG_TAG, "Remote# set password"); } public void detach() { callback = null; } public String getServerString() { if (server != null) return server.toString(); return ""; } public String getServerIp() { if (server != null) return server.getIp(); return ""; } public int getServerPort() { if (server != null) { return server.getPort(); } return 8851; } public int getServerUdpPort() { if (server != null) { return server.getUdpPort(); } return ServerInfo.DEFAULT_UDP_PORT; } public synchronized boolean isConnected() { return con == null ? false : con.isConnected(); } public synchronized boolean connect(boolean ignoreErrors) { if (callback == null) { Log.w(DEBUG_TAG, "Callback is null in connect()"); return false; } if(flag) { flag = false; callback.sendEmptyMessage(CONNECTING); } if (server == null) { Log.w(DEBUG_TAG, "Server was null in connect"); disconnect(); if (!ignoreErrors) { callback.sendEmptyMessage(CONNECTION_FAILURE); } flag = true; return false; } for (int i = 0; i < MAX_ATTEMPTS && callback != null; i++) { try { connectToServer(); if (callback != null) { callback.sendEmptyMessage(CONNECTED); } return true; } catch (IOException e) { Log.e(DEBUG_TAG, "Connection attempt " + i + " failed: " + e.getMessage(), e); flag = true; } catch (AuthenticationException e) { Log.e(DEBUG_TAG, "Authentication failure: " + e.getMessage(), e); disconnect(); if (callback != null) { callback.sendEmptyMessage(AUTHENTICATION_FAILURE); } else { Log.w(DEBUG_TAG, "Authentication failure with callback = null. We won't be able to notify anyone"); } return false; } catch (ServerOutOfDateException e) { Log.e(DEBUG_TAG, "Server out of date error: " + e.getMessage(), e); flag = true; if (callback != null) { callback.sendMessage(Message.obtain(callback, SERVER_OUT_OF_DATE, e.getServerVersion())); return true; } else { Log.e(DEBUG_TAG, "The server is out of date, but no callback was found. This means we won't be able to notify the user of the current error."); disconnect(); return false; } } } Log.w(DEBUG_TAG, "Failed to connect after " + MAX_ATTEMPTS + " attempts. Aborting."); if (callback != null) { if (!ignoreErrors) { callback.sendEmptyMessage(CONNECTION_FAILURE); } } else { Log.w(DEBUG_TAG, "Connection failure, and call back is null"); } disconnect(); flag = true; return false; } private synchronized void connectToServer() throws IOException, AuthenticationException, ServerOutOfDateException { con = new TcpConnection(new AuthenticationHandler(GMOTE_CLIENT_VERSION, MINIMUM_SERVER_VERSION),successResponse); Log.i(DEBUG_TAG, "Connecting to server: " + server.getIp() + ":" + server.getPort() + ":" + password); con.connectToServerAsync(server.getPort(), server.getIp(), (DataReceiverIF) Remote.this, TIMEOUT, password); } /** push the packet into the packet sending queue. */ protected synchronized void queuePacket(AbstractPacket packet) { try { packetQueue.put(packet); } catch (InterruptedException e) { Log.e(DEBUG_TAG, e.getMessage(), e); } } /** handle the data received from server, by TCP. */ public void handleReceiveData(final AbstractPacket reply, final TcpConnection connection) { if (callback != null) { Log.w(DEBUG_TAG,"==========remote handleRecieveData1"); switch (reply.getCommand()) { case BEATHEART_REPLY: System.out.println("======heart beat from server Pong! Pong!"); break; case COMMAND_EVENT: int sensorType = ((CommandPacket) reply).getType(); CommandEvent ce = ((CommandPacket) reply).getCommandEvent(); if (ce == CommandEvent.CMD_ENABLE){ Log.i(DEBUG_TAG, "get server TCP packet: enable Sensor"); SensorHub.getInstance().saveSensorState(new Integer(sensorType), true); } else if (ce == CommandEvent.CMD_DISABLE){ Log.i(DEBUG_TAG, "get server TCP packet: disable Sensor"); SensorHub.getInstance().saveSensorState(new Integer(sensorType), false); } else { Log.e(DEBUG_TAG, "get server TCP packet: unknown command packet!"); } break; case SENSOR_STATE_EVENT: SensorHub.getInstance().saveSensorState(((SensorStatePacket) reply).getSensorStateHashMap()) ; Log.i(DEBUG_TAG, "get sensor State hash map!"); break; default: callback.sendMessage(Message.obtain(callback, -1, reply)); break; } } else { Log.w(DEBUG_TAG, "Received a packet, but call back is null, " + "so I won't be able to deliver it to anyone."); } } public void getServerList(Handler findServerCallback) { Thread serverFinder = new Thread(new ServerFinder(findServerCallback)); serverFinder.start(); } protected class ServerFinder implements Runnable { private Handler findServerCallback; public ServerFinder(Handler findServerCallback) { this.findServerCallback = findServerCallback; } public void run() { Log.e(DEBUG_TAG, "Creating MC"); MulticastClient mc = new MulticastClient(); final ServerFoundHandler serverFoundHandler = new ServerFoundHandler() { public void onServerFound(final ServerInfo server) { if (findServerCallback != null) { findServerCallback.sendMessage(Message.obtain( findServerCallback, SERVER_LIST_ADD_SERVER, server)); } else { Log.w(DEBUG_TAG, "Find Server callback was null. We can't notify anyone that we found a new server."); } } }; mc.findServers(FINDSERVERS_TIMEOUT, serverFoundHandler); Log.e(DEBUG_TAG, "Got Servers"); if (findServerCallback != null) { findServerCallback.sendMessage(Message.obtain( findServerCallback, SERVER_LIST_DONE)); } else { Log.w(DEBUG_TAG, "Find Server callback was null. We can't notify anyone that find server has finished."); } } } /** basic thread class, for send Packet. */ class PacketSender implements Runnable { public void run() { AbstractPacket packet; while (true) { // Get the packet that is at the head of the queue, waiting if // necessary. try { packet = packetQueue.take(); } catch (final InterruptedException e) { Log.w(DEBUG_TAG, e.getMessage(), e); packet = null; } catch (final Exception e) { Log.e(DEBUG_TAG, e.getMessage(), e); packet = null; createNewQueue(); } if (packet != null) { try { sendPacketToServer(packet); } catch (final Exception e) { Log.d(DEBUG_TAG, "Send packet failed. " + e.getMessage(), e); disconnect(); } } } } /** recreate the queue if problem occured. */ private synchronized void createNewQueue() { packetQueue = new LinkedBlockingQueue<AbstractPacket>(); } /** * send Packet to server. * @param packet * @throws IOException */ private synchronized void sendPacketToServer(final AbstractPacket packet) throws IOException { // Try to connect. // We'll try this twice since the connection may be down but we // don't know about it. Log.e(DEBUG_TAG,"========================client sendPacketToServer"); boolean tryAgain = false; do { if (con != null || connect(false)) { try { con.sendPacket(packet); tryAgain = false; } catch (final IOException e) { Log.e(DEBUG_TAG, e.getMessage(), e); disconnect(); tryAgain = (tryAgain == false); if (!tryAgain) { if (callback != null) { callback.sendEmptyMessage(CONNECTION_FAILURE); } else { Log.e(DEBUG_TAG, "Unable to notify client of io error in send packet since callback is null"); } } } } else { tryAgain = false; } } while (tryAgain); } } public String getSessionId() { if (con == null) { return null; } return con.getSessionId(); } public ServerInfo getServer() { return server; } ISuccessResponse successResponse = new ISuccessResponse(){ public void CheckSuccess(AbstractPacket packet) { Log.i(DEBUG_TAG, "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&CheckSuccess running!"); callback.sendMessage(Message.obtain(callback, IP_LIST_SAVE, server)); flag = true; final SimplePacket sp = (SimplePacket) packet; ScreenInfo.handleScreenType(sp.getWidth(), sp.getHeight(), sp.getdensityDpi()); } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.test.spring.junit5; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Properties; import java.util.StringJoiner; import java.util.concurrent.TimeUnit; import java.util.function.Function; import org.apache.camel.ExtendedCamelContext; import org.apache.camel.api.management.ManagedCamelContext; import org.apache.camel.api.management.mbean.ManagedCamelContextMBean; import org.apache.camel.component.mock.InterceptSendToMockEndpointStrategy; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.impl.debugger.DefaultDebugger; import org.apache.camel.spi.Breakpoint; import org.apache.camel.spi.Debugger; import org.apache.camel.spi.EventNotifier; import org.apache.camel.spi.PropertiesComponent; import org.apache.camel.spring.SpringCamelContext; import org.apache.camel.test.junit5.CamelTestSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.config.BeanPostProcessor; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.annotation.AnnotationUtils; public final class CamelAnnotationsHandler { private static final Logger LOGGER = LoggerFactory.getLogger(CamelAnnotationsHandler.class); private CamelAnnotationsHandler() { } /** * Cleanup/restore global state to defaults / pre-test values after the test setup is complete. * * @param testClass the test class being executed */ public static void cleanup(Class<?> testClass) { DefaultCamelContext.clearOptions(); } /** * Handles @ExcludeRoutes to make it easier to exclude other routes when testing with Spring. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleExcludeRoutes(ConfigurableApplicationContext context, Class<?> testClass) { String key = SpringCamelContext.EXCLUDE_ROUTES; String exists = System.getProperty(key); if (exists != null) { LOGGER.warn("The JVM property " + key + " is set, but not supported anymore."); } if (testClass.isAnnotationPresent(ExcludeRoutes.class)) { Class<?>[] routes = testClass.getAnnotation(ExcludeRoutes.class).value(); // need to setup this as a JVM system property StringJoiner routesBuilder = new StringJoiner(","); for (Class<?> clazz : routes) { routesBuilder.add(clazz.getName()); } String value = routesBuilder.toString(); LOGGER.info("@ExcludeRoutes annotation found. Setting up JVM property {}={}", key, value); DefaultCamelContext.setExcludeRoutes(value); } } /** * Handles disabling of JMX on Camel contexts based on {@link DisableJmx}. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleDisableJmx(ConfigurableApplicationContext context, Class<?> testClass) { if (testClass.isAnnotationPresent(DisableJmx.class)) { if (testClass.getAnnotation(DisableJmx.class).value()) { LOGGER.info("Disabling Camel JMX globally as DisableJmx annotation was found and disableJmx is set to true."); DefaultCamelContext.setDisableJmx(true); } else { LOGGER.info("Enabling Camel JMX as DisableJmx annotation was found and disableJmx is set to false."); DefaultCamelContext.setDisableJmx(false); } } else if (!testClass.isAnnotationPresent(EnableRouteCoverage.class)) { // route coverage need JMX so do not disable it by default LOGGER.info( "Disabling Camel JMX globally for tests by default. Use the DisableJMX annotation to override the default setting."); DefaultCamelContext.setDisableJmx(true); } else { LOGGER.info("Enabling Camel JMX as EnableRouteCoverage is used."); DefaultCamelContext.setDisableJmx(false); } } /** * Handles disabling of JMX on Camel contexts based on {@link EnableRouteCoverage}. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleRouteCoverage(ConfigurableApplicationContext context, Class<?> testClass, Function testMethod) throws Exception { if (testClass.isAnnotationPresent(EnableRouteCoverage.class)) { System.setProperty(CamelTestSupport.ROUTE_COVERAGE_ENABLED, "true"); CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOGGER.info("Enabling RouteCoverage"); EventNotifier notifier = new RouteCoverageEventNotifier(testClass.getName(), testMethod); camelContext.addService(notifier, true); camelContext.getManagementStrategy().addEventNotifier(notifier); } }); } } public static void handleRouteCoverageDump(ConfigurableApplicationContext context, Class<?> testClass, Function testMethod) throws Exception { if (testClass.isAnnotationPresent(EnableRouteCoverage.class)) { CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOGGER.debug("Dumping RouteCoverage"); String testMethodName = (String) testMethod.apply(this); RouteCoverageDumper.dumpRouteCoverage(camelContext, testClass.getName(), testMethodName); // reset JMX statistics ManagedCamelContextMBean managedCamelContext = camelContext.getExtension(ManagedCamelContext.class).getManagedCamelContext(); if (managedCamelContext != null) { LOGGER.debug("Resetting JMX statistics for RouteCoverage"); managedCamelContext.reset(true); } // turn off dumping one more time by removing the event listener (which would dump as well when Camel is stopping) // but this method was explicit invoked to dump such as from afterTest callbacks from JUnit. RouteCoverageEventNotifier eventNotifier = camelContext.hasService(RouteCoverageEventNotifier.class); if (eventNotifier != null) { camelContext.getManagementStrategy().removeEventNotifier(eventNotifier); camelContext.removeService(eventNotifier); } } }); } } public static void handleProvidesBreakpoint(ConfigurableApplicationContext context, Class<?> testClass) throws Exception { Collection<Method> methods = CamelSpringTestHelper.getAllMethods(testClass); final List<Breakpoint> breakpoints = new LinkedList<>(); for (Method method : methods) { if (AnnotationUtils.findAnnotation(method, ProvidesBreakpoint.class) != null) { Class<?>[] argTypes = method.getParameterTypes(); if (argTypes.length != 0) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not a no-argument method."); } else if (!Breakpoint.class.isAssignableFrom(method.getReturnType())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but does not return a Breakpoint."); } else if (!Modifier.isStatic(method.getModifiers())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not static."); } else if (!Modifier.isPublic(method.getModifiers())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not public."); } try { breakpoints.add((Breakpoint) method.invoke(null)); } catch (Exception e) { throw new RuntimeException( "Method [" + method.getName() + "] threw exception during evaluation.", e); } } } if (breakpoints.size() != 0) { CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { public void execute(String contextName, SpringCamelContext camelContext) throws Exception { Debugger debugger = camelContext.getDebugger(); if (debugger == null) { debugger = new DefaultDebugger(); camelContext.setDebugger(debugger); } for (Breakpoint breakpoint : breakpoints) { LOGGER.info("Adding Breakpoint [{}] to CamelContext with name [{}].", breakpoint, contextName); debugger.addBreakpoint(breakpoint); } } }); } } /** * Handles updating shutdown timeouts on Camel contexts based on {@link ShutdownTimeout}. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleShutdownTimeout(ConfigurableApplicationContext context, Class<?> testClass) throws Exception { final int shutdownTimeout; final TimeUnit shutdownTimeUnit; if (testClass.isAnnotationPresent(ShutdownTimeout.class)) { shutdownTimeout = testClass.getAnnotation(ShutdownTimeout.class).value(); shutdownTimeUnit = testClass.getAnnotation(ShutdownTimeout.class).timeUnit(); } else { shutdownTimeout = 10; shutdownTimeUnit = TimeUnit.SECONDS; } CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOGGER.info("Setting shutdown timeout to [{} {}] on CamelContext with name [{}].", shutdownTimeout, shutdownTimeUnit, contextName); camelContext.getShutdownStrategy().setTimeout(shutdownTimeout); camelContext.getShutdownStrategy().setTimeUnit(shutdownTimeUnit); } }); } /** * Handles auto-intercepting of endpoints with mocks based on {@link MockEndpoints}. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleMockEndpoints(ConfigurableApplicationContext context, Class<?> testClass) throws Exception { if (testClass.isAnnotationPresent(MockEndpoints.class)) { final String mockEndpoints = testClass.getAnnotation(MockEndpoints.class).value(); CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOGGER.info("Enabling auto mocking of endpoints matching pattern [{}] on CamelContext with name [{}].", mockEndpoints, contextName); camelContext.adapt(ExtendedCamelContext.class) .registerEndpointCallback(new InterceptSendToMockEndpointStrategy(mockEndpoints)); } }); } } /** * Handles auto-intercepting of endpoints with mocks based on {@link MockEndpointsAndSkip} and skipping the original * endpoint. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleMockEndpointsAndSkip(ConfigurableApplicationContext context, Class<?> testClass) throws Exception { if (testClass.isAnnotationPresent(MockEndpointsAndSkip.class)) { final String mockEndpoints = testClass.getAnnotation(MockEndpointsAndSkip.class).value(); CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { public void execute(String contextName, SpringCamelContext camelContext) throws Exception { // resolve the property place holders of the mockEndpoints String mockEndpointsValue = camelContext.resolvePropertyPlaceholders(mockEndpoints); LOGGER.info( "Enabling auto mocking and skipping of endpoints matching pattern [{}] on CamelContext with name [{}].", mockEndpointsValue, contextName); camelContext.adapt(ExtendedCamelContext.class) .registerEndpointCallback(new InterceptSendToMockEndpointStrategy(mockEndpointsValue, true)); } }); } } /** * Handles override this method to include and override properties with the Camel * {@link org.apache.camel.component.properties.PropertiesComponent}. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleUseOverridePropertiesWithPropertiesComponent( ConfigurableApplicationContext context, Class<?> testClass) throws Exception { Collection<Method> methods = CamelSpringTestHelper.getAllMethods(testClass); final List<Properties> properties = new LinkedList<>(); for (Method method : methods) { if (AnnotationUtils.findAnnotation(method, UseOverridePropertiesWithPropertiesComponent.class) != null) { Class<?>[] argTypes = method.getParameterTypes(); if (argTypes.length > 0) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with UseOverridePropertiesWithPropertiesComponent but is not a no-argument method."); } else if (!Properties.class.isAssignableFrom(method.getReturnType())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with UseOverridePropertiesWithPropertiesComponent but does not return a java.util.Properties."); } else if (!Modifier.isStatic(method.getModifiers())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with UseOverridePropertiesWithPropertiesComponent but is not static."); } else if (!Modifier.isPublic(method.getModifiers())) { throw new IllegalArgumentException( "Method [" + method.getName() + "] is annotated with UseOverridePropertiesWithPropertiesComponent but is not public."); } try { properties.add((Properties) method.invoke(null)); } catch (Exception e) { throw new RuntimeException( "Method [" + method.getName() + "] threw exception during evaluation.", e); } } } Properties extra = new Properties(); for (Properties prop : properties) { extra.putAll(prop); } if (!extra.isEmpty()) { context.addBeanFactoryPostProcessor(beanFactory -> beanFactory.addBeanPostProcessor(new BeanPostProcessor() { @Override public Object postProcessBeforeInitialization(Object bean, String beanName) { if (bean instanceof PropertiesComponent) { PropertiesComponent pc = (PropertiesComponent) bean; LOGGER.info("Using {} properties to override any existing properties on the PropertiesComponent", extra.size()); pc.setOverrideProperties(extra); } return bean; } })); } } /** * Handles starting of Camel contexts based on {@link UseAdviceWith} and other state in the JVM. * * @param context the initialized Spring context * @param testClass the test class being executed */ public static void handleCamelContextStartup(ConfigurableApplicationContext context, Class<?> testClass) throws Exception { boolean skip = "true".equalsIgnoreCase(System.getProperty("skipStartingCamelContext")); if (skip) { LOGGER.info("Skipping starting CamelContext(s) as system property skipStartingCamelContext is set to be true."); } else if (testClass.isAnnotationPresent(UseAdviceWith.class)) { if (testClass.getAnnotation(UseAdviceWith.class).value()) { LOGGER.info( "Skipping starting CamelContext(s) as UseAdviceWith annotation was found and isUseAdviceWith is set to true."); skip = true; } else { LOGGER.info( "Starting CamelContext(s) as UseAdviceWith annotation was found, but isUseAdviceWith is set to false."); skip = false; } } if (!skip) { CamelSpringTestHelper.doToSpringCamelContexts(context, new CamelSpringTestHelper.DoToSpringCamelContextsStrategy() { public void execute( String contextName, SpringCamelContext camelContext) throws Exception { if (!camelContext.isStarted()) { LOGGER.info("Starting CamelContext with name [{}].", contextName); camelContext.start(); } else { LOGGER.debug("CamelContext with name [{}] already started.", contextName); } } }); } } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.httpd.rpc.patch; import com.google.gerrit.common.data.CommentDetail; import com.google.gerrit.common.data.PatchScript; import com.google.gerrit.common.data.PatchScript.DisplayMethod; import com.google.gerrit.prettify.common.EditList; import com.google.gerrit.prettify.common.SparseFileContent; import com.google.gerrit.reviewdb.AccountDiffPreference; import com.google.gerrit.reviewdb.Change; import com.google.gerrit.reviewdb.Patch; import com.google.gerrit.reviewdb.PatchLineComment; import com.google.gerrit.reviewdb.AccountDiffPreference.Whitespace; import com.google.gerrit.server.FileTypeRegistry; import com.google.gerrit.server.patch.PatchListEntry; import com.google.gerrit.server.patch.Text; import com.google.inject.Inject; import eu.medsea.mimeutil.MimeType; import eu.medsea.mimeutil.MimeUtil2; import org.eclipse.jgit.diff.Edit; import org.eclipse.jgit.errors.CorruptObjectException; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.FileMode; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevTree; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.TreeWalk; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; class PatchScriptBuilder { static final int MAX_CONTEXT = 5000000; static final int BIG_FILE = 9000; private static final Comparator<Edit> EDIT_SORT = new Comparator<Edit>() { @Override public int compare(final Edit o1, final Edit o2) { return o1.getBeginA() - o2.getBeginA(); } }; private Repository db; private ObjectReader reader; private Change change; private AccountDiffPreference diffPrefs; private boolean againstParent; private ObjectId aId; private ObjectId bId; private final Side a; private final Side b; private List<Edit> edits; private final FileTypeRegistry registry; private int context; @Inject PatchScriptBuilder(final FileTypeRegistry ftr) { a = new Side(); b = new Side(); registry = ftr; } void setRepository(final Repository r) { db = r; } void setChange(final Change c) { this.change = c; } void setDiffPrefs(final AccountDiffPreference dp) { diffPrefs = dp; context = diffPrefs.getContext(); if (context == AccountDiffPreference.WHOLE_FILE_CONTEXT) { context = MAX_CONTEXT; } else if (context > MAX_CONTEXT) { context = MAX_CONTEXT; } } void setTrees(final boolean ap, final ObjectId a, final ObjectId b) { againstParent = ap; aId = a; bId = b; } PatchScript toPatchScript(final PatchListEntry content, final boolean intralineDifference, final CommentDetail comments, final List<Patch> history) throws IOException { reader = db.newObjectReader(); try { return build(content, intralineDifference, comments, history); } finally { reader.release(); } } private PatchScript build(final PatchListEntry content, final boolean intralineDifference, final CommentDetail comments, final List<Patch> history) throws IOException { a.path = oldName(content); b.path = newName(content); a.resolve(null, aId); b.resolve(a, bId); edits = new ArrayList<Edit>(content.getEdits()); ensureCommentsVisible(comments); boolean hugeFile = false; if (a.mode == FileMode.GITLINK || b.mode == FileMode.GITLINK) { } else if (a.src == b.src && a.size() <= context && content.getEdits().isEmpty()) { // Odd special case; the files are identical (100% rename or copy) // and the user has asked for context that is larger than the file. // Send them the entire file, with an empty edit after the last line. // for (int i = 0; i < a.size(); i++) { a.addLine(i); } edits = new ArrayList<Edit>(1); edits.add(new Edit(a.size(), a.size())); } else { if (BIG_FILE < Math.max(a.size(), b.size())) { // IF the file is really large, we disable things to avoid choking // the browser client. // diffPrefs.setContext((short) Math.min(25, context)); diffPrefs.setSyntaxHighlighting(false); context = diffPrefs.getContext(); hugeFile = true; } else if (diffPrefs.isSyntaxHighlighting()) { // In order to syntax highlight the file properly we need to // give the client the complete file contents. So force our // context temporarily to the complete file size. // context = MAX_CONTEXT; } packContent(diffPrefs.getIgnoreWhitespace() != Whitespace.IGNORE_NONE); } return new PatchScript(change.getKey(), content.getChangeType(), content .getOldName(), content.getNewName(), a.fileMode, b.fileMode, content .getHeaderLines(), diffPrefs, a.dst, b.dst, edits, a.displayMethod, b.displayMethod, comments, history, hugeFile, intralineDifference); } private static String oldName(final PatchListEntry entry) { switch (entry.getChangeType()) { case ADDED: return null; case DELETED: case MODIFIED: return entry.getNewName(); case COPIED: case RENAMED: default: return entry.getOldName(); } } private static String newName(final PatchListEntry entry) { switch (entry.getChangeType()) { case DELETED: return null; case ADDED: case MODIFIED: case COPIED: case RENAMED: default: return entry.getNewName(); } } private void ensureCommentsVisible(final CommentDetail comments) { if (comments.getCommentsA().isEmpty() && comments.getCommentsB().isEmpty()) { // No comments, no additional dummy edits are required. // return; } // Construct empty Edit blocks around each location where a comment is. // This will force the later packContent method to include the regions // containing comments, potentially combining those regions together if // they have overlapping contexts. UI renders will also be able to make // correct hunks from this, but because the Edit is empty they will not // style it specially. // final List<Edit> empty = new ArrayList<Edit>(); int lastLine; lastLine = -1; for (PatchLineComment plc : comments.getCommentsA()) { final int a = plc.getLine(); if (lastLine != a) { final int b = mapA2B(a - 1); if (0 <= b) { safeAdd(empty, new Edit(a - 1, b)); } lastLine = a; } } lastLine = -1; for (PatchLineComment plc : comments.getCommentsB()) { final int b = plc.getLine(); if (lastLine != b) { final int a = mapB2A(b - 1); if (0 <= a) { safeAdd(empty, new Edit(a, b - 1)); } lastLine = b; } } // Sort the final list by the index in A, so packContent can combine // them correctly later. // edits.addAll(empty); Collections.sort(edits, EDIT_SORT); } private void safeAdd(final List<Edit> empty, final Edit toAdd) { final int a = toAdd.getBeginA(); final int b = toAdd.getBeginB(); for (final Edit e : edits) { if (e.getBeginA() <= a && a <= e.getEndA()) { return; } if (e.getBeginB() <= b && b <= e.getEndB()) { return; } } empty.add(toAdd); } private int mapA2B(final int a) { if (edits.isEmpty()) { // Magic special case of an unmodified file. // return a; } for (int i = 0; i < edits.size(); i++) { final Edit e = edits.get(i); if (a < e.getBeginA()) { if (i == 0) { // Special case of context at start of file. // return a; } return e.getBeginB() - (e.getBeginA() - a); } if (e.getBeginA() <= a && a <= e.getEndA()) { return -1; } } final Edit last = edits.get(edits.size() - 1); return last.getBeginB() + (a - last.getEndA()); } private int mapB2A(final int b) { if (edits.isEmpty()) { // Magic special case of an unmodified file. // return b; } for (int i = 0; i < edits.size(); i++) { final Edit e = edits.get(i); if (b < e.getBeginB()) { if (i == 0) { // Special case of context at start of file. // return b; } return e.getBeginA() - (e.getBeginB() - b); } if (e.getBeginB() <= b && b <= e.getEndB()) { return -1; } } final Edit last = edits.get(edits.size() - 1); return last.getBeginA() + (b - last.getEndB()); } private void packContent(boolean ignoredWhitespace) { EditList list = new EditList(edits, context, a.size(), b.size()); for (final EditList.Hunk hunk : list.getHunks()) { while (hunk.next()) { if (hunk.isContextLine()) { final String lineA = a.src.getLine(hunk.getCurA()); a.dst.addLine(hunk.getCurA(), lineA); if (ignoredWhitespace) { // If we ignored whitespace in some form, also get the line // from b when it does not exactly match the line from a. // final String lineB = b.src.getLine(hunk.getCurB()); if (!lineA.equals(lineB)) { b.dst.addLine(hunk.getCurB(), lineB); } } hunk.incBoth(); continue; } if (hunk.isDeletedA()) { a.addLine(hunk.getCurA()); hunk.incA(); } if (hunk.isInsertedB()) { b.addLine(hunk.getCurB()); hunk.incB(); } } } } private class Side { String path; ObjectId id; FileMode mode; byte[] srcContent; Text src; MimeType mimeType = MimeUtil2.UNKNOWN_MIME_TYPE; DisplayMethod displayMethod = DisplayMethod.DIFF; PatchScript.FileMode fileMode = PatchScript.FileMode.FILE; final SparseFileContent dst = new SparseFileContent(); int size() { return src != null ? src.size() : 0; } void addLine(int line) { dst.addLine(line, src.getLine(line)); } void resolve(final Side other, final ObjectId within) throws IOException { try { final boolean reuse; if (Patch.COMMIT_MSG.equals(path)) { if (againstParent && (aId == within || within.equals(aId))) { id = ObjectId.zeroId(); src = Text.EMPTY; srcContent = Text.NO_BYTES; mode = FileMode.MISSING; displayMethod = DisplayMethod.NONE; } else { id = within; src = Text.forCommit(db, reader, within); srcContent = src.getContent(); if (src == Text.EMPTY) { mode = FileMode.MISSING; displayMethod = DisplayMethod.NONE; } else { mode = FileMode.REGULAR_FILE; } } reuse = false; } else { final TreeWalk tw = find(within); id = tw != null ? tw.getObjectId(0) : ObjectId.zeroId(); mode = tw != null ? tw.getFileMode(0) : FileMode.MISSING; reuse = other != null && other.id.equals(id) && other.mode == mode; if (reuse) { srcContent = other.srcContent; } else if (mode.getObjectType() == Constants.OBJ_BLOB) { srcContent = Text.asByteArray(db.open(id, Constants.OBJ_BLOB)); } else { srcContent = Text.NO_BYTES; } if (reuse) { mimeType = other.mimeType; displayMethod = other.displayMethod; src = other.src; } else if (srcContent.length > 0 && FileMode.SYMLINK != mode) { mimeType = registry.getMimeType(path, srcContent); if ("image".equals(mimeType.getMediaType()) && registry.isSafeInline(mimeType)) { displayMethod = DisplayMethod.IMG; } } } if (mode == FileMode.MISSING) { displayMethod = DisplayMethod.NONE; } if (!reuse) { if (srcContent == Text.NO_BYTES) { src = Text.EMPTY; } else { src = new Text(srcContent); } } if (srcContent.length > 0 && srcContent[srcContent.length - 1] != '\n') { dst.setMissingNewlineAtEnd(true); } dst.setSize(size()); dst.setPath(path); if (mode == FileMode.SYMLINK) { fileMode = PatchScript.FileMode.SYMLINK; } else if (mode == FileMode.GITLINK) { fileMode = PatchScript.FileMode.GITLINK; } } catch (IOException err) { throw new IOException("Cannot read " + within.name() + ":" + path, err); } } private TreeWalk find(final ObjectId within) throws MissingObjectException, IncorrectObjectTypeException, CorruptObjectException, IOException { if (path == null || within == null) { return null; } final RevWalk rw = new RevWalk(reader); final RevTree tree = rw.parseTree(within); return TreeWalk.forPath(reader, path, tree); } } }
package io.github.taverna_extras.component.registry; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import io.github.taverna_extras.component.api.ComponentException; import io.github.taverna_extras.component.api.Family; import io.github.taverna_extras.component.api.License; import io.github.taverna_extras.component.api.SharingPolicy; import io.github.taverna_extras.component.api.Version; import io.github.taverna_extras.component.api.profile.Profile; /** * A ComponentRegistry contains ComponentFamilies and ComponentProfiles. * * @author David Withers */ public abstract class ComponentRegistry implements io.github.taverna_extras.component.api.Registry { protected Map<String, Family> familyCache = new HashMap<>(); protected List<Profile> profileCache = new ArrayList<>(); protected List<SharingPolicy> permissionCache = new ArrayList<>(); protected List<License> licenseCache = new ArrayList<>(); private URL registryBase; protected ComponentRegistry(URL registryBase) throws ComponentException { this.registryBase = registryBase; } protected ComponentRegistry(File fileDir) throws ComponentException { try { this.registryBase = fileDir.toURI().toURL(); } catch (MalformedURLException e) { throw new ComponentException(e); } } @Override public final List<Family> getComponentFamilies() throws ComponentException { checkFamilyCache(); return new ArrayList<Family>(familyCache.values()); } private void checkFamilyCache() throws ComponentException { synchronized (familyCache) { if (familyCache.isEmpty()) populateFamilyCache(); } } protected abstract void populateFamilyCache() throws ComponentException; @Override public final Family getComponentFamily(String familyName) throws ComponentException { checkFamilyCache(); return familyCache.get(familyName); } @Override public final Family createComponentFamily(String familyName, Profile componentProfile, String description, License license, SharingPolicy sharingPolicy) throws ComponentException { if (familyName == null) throw new ComponentException( "Component family name must not be null"); if (componentProfile == null) throw new ComponentException("Component profile must not be null"); if (getComponentFamily(familyName) != null) throw new ComponentException("Component family already exists"); Family result = internalCreateComponentFamily(familyName, componentProfile, description, license, sharingPolicy); checkFamilyCache(); synchronized (familyCache) { familyCache.put(familyName, result); } return result; } protected abstract Family internalCreateComponentFamily(String familyName, Profile componentProfile, String description, License license, SharingPolicy sharingPolicy) throws ComponentException; @Override public final void removeComponentFamily(Family componentFamily) throws ComponentException { if (componentFamily != null) { checkFamilyCache(); synchronized (familyCache) { familyCache.remove(componentFamily.getName()); } internalRemoveComponentFamily(componentFamily); } } protected abstract void internalRemoveComponentFamily(Family componentFamily) throws ComponentException; @Override public final URL getRegistryBase() { return registryBase; } @Override public final String getRegistryBaseString() { String urlString = getRegistryBase().toString(); if (urlString.endsWith("/")) urlString = urlString.substring(0, urlString.length() - 1); return urlString; } private void checkProfileCache() throws ComponentException { synchronized (profileCache) { if (profileCache.isEmpty()) populateProfileCache(); } } protected abstract void populateProfileCache() throws ComponentException; @Override public final List<Profile> getComponentProfiles() throws ComponentException { checkProfileCache(); return profileCache; } @Override public final Profile getComponentProfile(String id) throws ComponentException { // TODO use a map instead of a *linear search*... for (Profile p : getComponentProfiles()) if (p.getId().equals(id)) return p; return null; } @Override public final Profile addComponentProfile(Profile componentProfile, License license, SharingPolicy sharingPolicy) throws ComponentException { if (componentProfile == null) { throw new ComponentException("componentProfile is null"); } Profile result = null; checkProfileCache(); for (Profile p : getComponentProfiles()) if (p.getId().equals(componentProfile.getId())) { result = p; break; } if (result == null) { result = internalAddComponentProfile(componentProfile, license, sharingPolicy); synchronized (profileCache) { profileCache.add(result); } } return result; } protected abstract Profile internalAddComponentProfile( Profile componentProfile, License license, SharingPolicy sharingPolicy) throws ComponentException; private void checkPermissionCache() { synchronized (permissionCache) { if (permissionCache.isEmpty()) populatePermissionCache(); } } protected abstract void populatePermissionCache(); @Override public final List<SharingPolicy> getPermissions() throws ComponentException { checkPermissionCache(); return permissionCache; } private void checkLicenseCache() { synchronized (licenseCache) { if (licenseCache.isEmpty()) populateLicenseCache(); } } protected abstract void populateLicenseCache(); @Override public final List<License> getLicenses() throws ComponentException { checkLicenseCache(); return licenseCache; } protected License getLicenseByAbbreviation(String licenseString) throws ComponentException { checkLicenseCache(); for (License l : getLicenses()) if (l.getAbbreviation().equals(licenseString)) return l; return null; } @Override public abstract License getPreferredLicense() throws ComponentException; @Override public abstract Set<Version.ID> searchForComponents(String prefixString, String text) throws ComponentException; @Override public String toString() { String[] names = getClass().getName().split("\\."); return names[names.length-1] + ": " + registryBase; } }
/** * */ package nz.co.senanque.dbloader; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FilenameFilter; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.text.MessageFormat; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.apache.commons.lang.time.DurationFormatUtils; import org.apache.tools.ant.Task; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.StringUtils; /** * @author roger * */ public class RunDatabaseScripts extends Task { private static final Logger logger = LoggerFactory.getLogger(RunDatabaseScripts.class); String m_dir; String m_driver; String m_jdbcURL; boolean m_drop; boolean m_debug; String m_systemPassword; String m_systemUser = "SYSTEM"; public String getSystemUser() { return m_systemUser; } public void setSystemUser(String systemUser) { m_systemUser = systemUser; } String m_defaultTableSpace = "USERS"; String m_currentFile = ""; String m_delimiter = "-"; public String getDelimiter() { return m_delimiter; } public void setDelimiter(String delimiter) { m_delimiter = delimiter; } String[] m_ignoreCommands = new String[]{"PROMPT","EXIT","--","SET","REM","COMMIT"}; private int m_lineCount=0; private int m_errorCount=0; private boolean m_ignoreConstraints; public boolean isDebug() { return m_debug; } public void setDebug(boolean debug) { m_debug = debug; } public String getDefaultTableSpace() { return m_defaultTableSpace; } public void setDefaultTableSpace(String defaultTableSpace) { m_defaultTableSpace = defaultTableSpace; } public String getDir() { return m_dir; } public void setDir(String dir) { m_dir = dir; } public String getDriver() { return m_driver; } public void setDriver(String driver) { m_driver = driver; } public String getJdbcURL() { return m_jdbcURL; } public void setJdbcURL(String jdbcURL) { m_jdbcURL = jdbcURL; } public boolean isDrop() { return m_drop; } public void setDrop(boolean drop) { m_drop = drop; } public String getSystemPassword() { return m_systemPassword; } public void setSystemPassword(String systemPassword) { m_systemPassword = systemPassword; } public void execute() { long start = System.currentTimeMillis(); registerDriver(); File dir = new File(getDir()); if (!dir.isDirectory()) { throw new RuntimeException(getDir()+" is not a directory"); } File[] files = dir.listFiles(new FilenameFilter(){ public boolean accept(File arg0, String arg1) { if (arg1.toUpperCase().endsWith("SQL")) { return true; } return false; }}); Set<String> sqlFiles = new TreeSet<String>(); Set<String> users = new TreeSet<String>(); for (File f: files) { String name = f.getName(); try { String userName = extractUserName(name); sqlFiles.add(name); users.add(userName); } catch (Exception e) { logger.warn("Problem with {} - ignoring {}",name,e.getMessage()); } } if (m_drop) { dropUsers(users); } for (String f: sqlFiles) { m_currentFile = f; processFile(getDir()+"/"+f,f); } logger.info(MessageFormat.format("{0,number,integer} files processed in {1}", files.length,DurationFormatUtils.formatDuration(System.currentTimeMillis()-start,"HH:mm:ss.SS"))); if (m_errorCount>0) { throw new RuntimeException(MessageFormat.format("Error count: {0,number,integer}. See log for details",m_errorCount)); } } private void displayVersion() { try { Properties p = new Properties(); p.load(this.getClass().getResourceAsStream("madura-dbloader")); System.out.println("madura-dbloader "+p.getProperty("build.version")); } catch (IOException e) { // ignore } } private void registerDriver() { try { Driver driver = (Driver)Class.forName(getDriver()).newInstance(); DriverManager.registerDriver(driver); } catch (Exception e) { throw new RuntimeException(e); } } private String extractUserName(String name) { String delimiter = getDelimiter(); int i = name.indexOf(delimiter); if (i == -1) { throw new RuntimeException(MessageFormat.format("{0} missing first {1}",name,delimiter)); } int i1 = name.indexOf(delimiter,i+1); if (i1 == -1) { throw new RuntimeException(MessageFormat.format("{0} missing second {1}",name,delimiter)); } String ret = name.substring(i+1,i1); return ret; } private void processFile(String f, String shortF) { String user = extractUserName(f); String password = user.toLowerCase(); if (user.equals(getSystemUser())) { password = getSystemPassword(); } logger.info(MessageFormat.format("processing file: {0}",f)); Connection connection = getJDBCConnection(user,password); try { connection.setAutoCommit(false); processScript(connection, new FileReader(f),false,shortF); connection.commit(); connection.close(); } catch (Exception e) { logger.error(e.getMessage(),e); } } private void processScript(Connection connection, Reader reader, boolean keepGoingOnError,String fileName) { Statement statement=null; try { statement = connection.createStatement(); } catch (SQLException e1) { throw new RuntimeException(e1); } m_lineCount = 0; BufferedReader br = new BufferedReader(reader); String s = getNextStatement(br); while (s != null) { try { String s1 = s.trim().toUpperCase(); logger.debug(s); if ((m_lineCount % 1000) == 0) { logger.info(MessageFormat.format("processing line: {0,number,integer} of {1}",m_lineCount,m_currentFile)); } boolean ignoreCommand = false; if (!StringUtils.hasLength(s)) { ignoreCommand = true; } for (String ignore: m_ignoreCommands) { if (s1.startsWith(ignore)) { ignoreCommand = true; break; } } if (m_ignoreConstraints) { if (s1.startsWith("ALTER TABLE") && s1.indexOf("ADD CONSTRAINT")>0) { ignoreCommand = true; } } if (!ignoreCommand) { boolean b = statement.execute(s); if (b) { // We have a resultSet ResultSet rs = statement.getResultSet(); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); { StringBuilder sb = new StringBuilder(); for (int i=1;i<=columnCount;i++) { sb.append(rsmd.getColumnName(i)); sb.append(","); } debug(sb); } while (rs.next()) { StringBuilder sb = new StringBuilder(); for (int i=1;i<=columnCount;i++) { sb.append(rs.getString(i)); sb.append(","); } debug(sb); } rs.close(); } } } catch (SQLException e) { // e.printStackTrace(); String s1 = e.getMessage(); String s2 = MessageFormat.format("error at line: {0,number,integer} of {1}: {2}\n{3}",new Object[]{m_lineCount,fileName,s1,s}); if (!s1.startsWith("ORA-00955")) { if (!keepGoingOnError) { logger.error(s2); m_errorCount++; break; } } logger.warn(s2); } s = getNextStatement(br); } try { statement.close(); } catch (SQLException e) { e.printStackTrace(); } } private void debug(Object message) { if (m_debug) { logger.info("[debug] "+message.toString()); } } private String getNextStatement(BufferedReader br) { StringBuilder ret = new StringBuilder(); try { String line = getNextLine(br); while (line != null) { line = line.trim(); String s = line.toUpperCase(); ret.append(line); ret.append(" "); if (s.startsWith("CREATE OR REPLACE FUNCTION")) { huntForEnd(br,ret); } if (s.startsWith("CREATE OR REPLACE PROCEDURE")) { huntForEnd(br,ret); } if (s.startsWith("CREATE OR REPLACE TRIGGER")) { huntForEnd(br,ret); } if (ret.toString().endsWith("; ")) { break; } line = getNextLine(br); } if (ret.length()==0) { return null; } return ret.substring(0, ret.length()-2).toString(); } catch (IOException e) { throw new RuntimeException(e); } } private void huntForEnd(BufferedReader br, StringBuilder ret) { try { String line = getNextLine(br); while (line != null) { line = line.trim(); if (!line.startsWith("--")) { if (line.equals("/")) { ret.append("; "); break; } ret.append(line); ret.append(" "); } line = getNextLine(br); } } catch (IOException e) { throw new RuntimeException(e); } } private String getNextLine(BufferedReader br) throws IOException { String line = br.readLine(); m_lineCount++; while (line != null) { String s = line.trim().toUpperCase(); if (!s.startsWith("--") && !s.startsWith("REM") && !s.startsWith("#") && !s.startsWith("//")) { int dash = line.indexOf("--"); if (dash != -1) { String newLine = line.substring(0,dash); int singleQuotes = StringUtils.countOccurrencesOf(newLine, "'"); int doubleQuotes = StringUtils.countOccurrencesOf(newLine, "\""); if ((singleQuotes % 2) == 0 && (doubleQuotes %2 == 0)) { // only if the quotes balance. line = newLine; } } return line; } line = br.readLine(); m_lineCount++; } return null; } private void dropUsers(Set<String> users) { Connection connection = getJDBCConnection("SYSTEM",getSystemPassword()); logger.info(MessageFormat.format("dropping users: {0}",users.toString())); StringBuilder sb = new StringBuilder(); for (String user: users) { if (user.equals(getSystemUser())) { continue; } // echo "drop user $CurUser cascade;" > tmprecreateorauser.sql sb.append("drop user "+user+" cascade;\n"); } processScript(connection, new StringReader(sb.toString()),true, "internal-drop users"); logger.info("dropped users"); logger.info(MessageFormat.format("recreating users: {0}",users.toString())); sb = new StringBuilder(); for (String user: users) { if (user.equals(getSystemUser())) { continue; } // echo "CREATE USER \"$CurUser\" PROFILE \"DEFAULT\" " >> tmprecreateorauser.sql // echo "IDENTIFIED BY \"$CurPassword\" DEFAULT TABLESPACE \"$DEFAULTTABLESPACE\" " >> tmprecreateorauser.sql // echo "ACCOUNT UNLOCK;" >> tmprecreateorauser.sql // echo "GRANT \"CONNECT\", \"RESOURCE\" TO \"$CurUser\"; " >> tmprecreateorauser.sql // echo "GRANT CREATE SESSION, CREATE TABLE, CREATE VIEW, CREATE PROCEDURE, CREATE TRIGGER, CREATE SEQUENCE TO \"$CurUser\";" >> tmprecreateorauser.sql // echo "quit" >> tmprecreateorauser.sql sb.append("create user "+user+" profile \"DEFAULT\" \n"); sb.append(" identified by "+user.toLowerCase()+" default tablespace \""+getDefaultTableSpace()+"\" \n"); sb.append(" account unlock;\n"); sb.append("grant \"CONNECT\",\"RESOURCE\" to "+user+";\n"); sb.append("grant CREATE SESSION, CREATE TABLE, CREATE VIEW, CREATE PROCEDURE, CREATE TRIGGER, CREATE SEQUENCE to "+user+";\n"); } processScript(connection, new StringReader(sb.toString()), false, "internal-create users"); try { connection.close(); } catch (SQLException e) { logger.debug(MessageFormat.format("error on connection close: {0}",e.getMessage())); } logger.info("dropping/recreating users complete"); } private Connection getJDBCConnection(String user, String password) { try { debug(MessageFormat.format("connecting to {0} {1}", getJdbcURL(),user)); return DriverManager.getConnection(getJdbcURL(), user, password); } catch (SQLException e) { throw new RuntimeException(e); } } public boolean isIgnoreConstraints() { return m_ignoreConstraints; } public void setIgnoreConstraints(boolean ignoreConstraints) { m_ignoreConstraints = ignoreConstraints; } }
/* * Copyright 2013 Rackspace * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rackspacecloud.blueflood.outputs.serializers; import com.google.common.collect.Collections2; import com.google.common.collect.Sets; import com.rackspacecloud.blueflood.outputs.formats.MetricData; import com.rackspacecloud.blueflood.outputs.serializers.BasicRollupsOutputSerializer.MetricStat; import com.rackspacecloud.blueflood.outputs.utils.PlotRequestParser; import com.rackspacecloud.blueflood.types.BasicRollup; import com.rackspacecloud.blueflood.types.CounterRollup; import com.rackspacecloud.blueflood.types.Points; import com.rackspacecloud.blueflood.exceptions.SerializationException; import com.rackspacecloud.blueflood.types.SimpleNumber; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.junit.Assert; import org.junit.Test; import java.util.HashSet; import java.util.Set; public class JSONBasicRollupOutputSerializerTest { private final Set<MetricStat> filterStats; public JSONBasicRollupOutputSerializerTest() { filterStats = new HashSet<MetricStat>(); filterStats.add(MetricStat.AVERAGE); filterStats.add(MetricStat.MIN); filterStats.add(MetricStat.MAX); } @Test public void testTransformRollupDataAtFullRes() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData(FakeMetricDataGenerator.generateFakeFullResPoints(), "unknown", MetricData.Type.NUMBER); JSONObject metricDataJSON = serializer.transformRollupData(metricData, filterStats); final JSONArray data = (JSONArray) metricDataJSON.get("values"); // Assert that we have some data to test Assert.assertTrue(data.size() > 0); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject) data.get(i); final Points.Point<SimpleNumber> point = (Points.Point<SimpleNumber>) metricData.getData().getPoints().get(dataJSON.get("timestamp")); Assert.assertEquals(point.getData().getValue(), dataJSON.get("average")); Assert.assertEquals(point.getData().getValue(), dataJSON.get("min")); Assert.assertEquals(point.getData().getValue(), dataJSON.get("max")); // Assert that variance isn't present Assert.assertNull(dataJSON.get("variance")); // Assert numPoints isn't present Assert.assertNull(dataJSON.get("numPoints")); } } @Test public void testTransformRollupDataForCoarserGran() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData(FakeMetricDataGenerator.generateFakeRollupPoints(), "unknown", MetricData.Type.NUMBER); Set<MetricStat> filters = new HashSet<MetricStat>(); filters.add(MetricStat.AVERAGE); filters.add(MetricStat.MIN); filters.add(MetricStat.MAX); filters.add(MetricStat.NUM_POINTS); JSONObject metricDataJSON = serializer.transformRollupData(metricData, filters); final JSONArray data = (JSONArray) metricDataJSON.get("values"); // Assert that we have some data to test Assert.assertTrue(data.size() > 0); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject) data.get(i); final Points.Point point = (Points.Point) metricData.getData().getPoints().get(dataJSON.get("timestamp")); long numPoints = ((BasicRollup) point.getData()).getCount(); Assert.assertEquals(numPoints, dataJSON.get("numPoints")); if (numPoints == 0) { Assert.assertNull(dataJSON.get("average")); Assert.assertNull(dataJSON.get("min")); Assert.assertNull(dataJSON.get("max")); } else { Assert.assertEquals(((BasicRollup) point.getData()).getAverage(), dataJSON.get("average")); Assert.assertEquals(((BasicRollup) point.getData()).getMaxValue(), dataJSON.get("max")); Assert.assertEquals(((BasicRollup) point.getData()).getMinValue(), dataJSON.get("min")); } // Assert that variance isn't present Assert.assertNull(dataJSON.get("variance")); } } @Test public void testTransformRollupDataString() throws SerializationException{ final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData(FakeMetricDataGenerator.generateFakeStringPoints(), "unknown", MetricData.Type.STRING); JSONObject metricDataJSON = serializer.transformRollupData(metricData, filterStats); final JSONArray data = (JSONArray) metricDataJSON.get("values"); // Assert that we have some data to test Assert.assertTrue(data.size() > 0); for (int i = 0; i < data.size(); i++ ) { final JSONObject dataJSON = (JSONObject) data.get(i); final Points.Point point = (Points.Point) metricData.getData().getPoints().get(dataJSON.get("timestamp")); Assert.assertEquals(point.getData(), dataJSON.get("value")); Assert.assertNull(dataJSON.get("average")); Assert.assertNull(dataJSON.get("min")); Assert.assertNull(dataJSON.get("max")); Assert.assertNull(dataJSON.get("variance")); } } @Test public void testCounters() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData( FakeMetricDataGenerator.generateFakeCounterRollupPoints(), "unknown", MetricData.Type.NUMBER); JSONObject metricDataJSON = serializer.transformRollupData(metricData, PlotRequestParser.DEFAULT_COUNTER); final JSONArray data = (JSONArray)metricDataJSON.get("values"); Assert.assertEquals(5, data.size()); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject)data.get(i); Assert.assertNotNull(dataJSON.get("numPoints")); Assert.assertEquals((long) (i + 1000), dataJSON.get("numPoints")); Assert.assertNull(dataJSON.get("rate")); } } @Test public void testGauges() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData( FakeMetricDataGenerator.generateFakeGaugeRollups(), "unknown", MetricData.Type.NUMBER); JSONObject metricDataJSON = serializer.transformRollupData(metricData, PlotRequestParser.DEFAULT_GAUGE); final JSONArray data = (JSONArray)metricDataJSON.get("values"); Assert.assertEquals(5, data.size()); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject)data.get(i); Assert.assertNotNull(dataJSON.get("numPoints")); Assert.assertEquals(1L, dataJSON.get("numPoints")); Assert.assertNotNull("latest"); Assert.assertEquals(i, dataJSON.get("latest")); // other fields were filtered out. Assert.assertNull(dataJSON.get(MetricStat.AVERAGE.toString())); Assert.assertNull(dataJSON.get(MetricStat.VARIANCE.toString())); Assert.assertNull(dataJSON.get(MetricStat.MIN.toString())); Assert.assertNull(dataJSON.get(MetricStat.MAX.toString())); } } @Test public void testSets() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData( FakeMetricDataGenerator.generateFakeSetRollupPoints(), "unknown", MetricData.Type.NUMBER); JSONObject metricDataJSON = serializer.transformRollupData(metricData, PlotRequestParser.DEFAULT_SET); final JSONArray data = (JSONArray)metricDataJSON.get("values"); Assert.assertEquals(5, data.size()); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject)data.get(i); Assert.assertNotNull(dataJSON.get("numPoints")); Assert.assertEquals(Sets.newHashSet(i, i % 2, i / 2).size(), dataJSON.get("numPoints")); } } @Test public void setTimers() throws Exception { final JSONBasicRollupsOutputSerializer serializer = new JSONBasicRollupsOutputSerializer(); final MetricData metricData = new MetricData( FakeMetricDataGenerator.generateFakeTimerRollups(), "unknown", MetricData.Type.NUMBER); JSONObject metricDataJSON = serializer.transformRollupData(metricData, PlotRequestParser.DEFAULT_TIMER); final JSONArray data = (JSONArray)metricDataJSON.get("values"); Assert.assertEquals(5, data.size()); for (int i = 0; i < data.size(); i++) { final JSONObject dataJSON = (JSONObject)data.get(i); Assert.assertNotNull(dataJSON.get("numPoints")); Assert.assertNotNull(dataJSON.get("average")); Assert.assertNotNull(dataJSON.get("rate")); // bah. I'm too lazy to check equals. } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.project; import com.intellij.execution.configurations.ParametersList; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.module.StdModuleTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.Consumer; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.importing.MavenAnnotationProcessorsModuleService; import org.jetbrains.idea.maven.importing.MavenExtraArtifactType; import org.jetbrains.idea.maven.importing.MavenImporter; import org.jetbrains.idea.maven.model.*; import org.jetbrains.idea.maven.plugins.api.MavenModelPropertiesPatcher; import org.jetbrains.idea.maven.server.MavenEmbedderWrapper; import org.jetbrains.idea.maven.server.NativeMavenProjectHolder; import org.jetbrains.idea.maven.utils.*; import org.jetbrains.idea.maven.utils.MavenJDOMUtil; import org.jetbrains.jps.util.JpsPathUtil; import java.io.*; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import static com.intellij.openapi.roots.OrderEnumerator.orderEntries; public class MavenProject { private static final Key<MavenArtifactIndex> DEPENDENCIES_CACHE_KEY = Key.create("MavenProject.DEPENDENCIES_CACHE_KEY"); private static final Key<List<String>> FILTERS_CACHE_KEY = Key.create("MavenProject.FILTERS_CACHE_KEY"); public enum ConfigFileKind { MAVEN_CONFIG(MavenConstants.MAVEN_CONFIG_RELATIVE_PATH, "true"), JVM_CONFIG(MavenConstants.JVM_CONFIG_RELATIVE_PATH, ""); final Key<Map<String, String>> CACHE_KEY = Key.create("MavenProject." + name()); final String myRelativeFilePath; final String myValueIfMissing; ConfigFileKind(String relativeFilePath, String valueIfMissing) { myRelativeFilePath = relativeFilePath; myValueIfMissing = valueIfMissing; } } @NotNull private final VirtualFile myFile; @NotNull private volatile State myState = new State(); public enum ProcMode {BOTH, ONLY, NONE} @Nullable public static MavenProject read(DataInputStream in) throws IOException { String path = in.readUTF(); int length = in.readInt(); byte[] bytes = new byte[length]; in.readFully(bytes); // should read full byte content first!!! VirtualFile file = LocalFileSystem.getInstance().findFileByPath(path); if (file == null) return null; ByteArrayInputStream bs = new ByteArrayInputStream(bytes); ObjectInputStream os = new ObjectInputStream(bs); try { try { MavenProject result = new MavenProject(file); result.myState = (State)os.readObject(); return result; } catch (ClassNotFoundException e) { IOException ioException = new IOException(); ioException.initCause(e); throw ioException; } } finally { os.close(); bs.close(); } } public void write(@NotNull DataOutputStream out) throws IOException { out.writeUTF(getPath()); BufferExposingByteArrayOutputStream bs = new BufferExposingByteArrayOutputStream(); ObjectOutputStream os = new ObjectOutputStream(bs); try { os.writeObject(myState); out.writeInt(bs.size()); out.write(bs.getInternalBuffer(), 0, bs.size()); } finally { os.close(); bs.close(); } } public MavenProject(@NotNull VirtualFile file) { myFile = file; } @NotNull MavenProjectChanges set(@NotNull MavenProjectReaderResult readerResult, @NotNull MavenGeneralSettings settings, boolean updateLastReadStamp, boolean resetArtifacts, boolean resetProfiles) { State newState = myState.clone(); if (updateLastReadStamp) newState.myLastReadStamp = myState.myLastReadStamp + 1; newState.myReadingProblems = readerResult.readingProblems; newState.myLocalRepository = settings.getEffectiveLocalRepository(); newState.myActivatedProfilesIds = readerResult.activatedProfiles; MavenModel model = readerResult.mavenModel; newState.myMavenId = model.getMavenId(); if (model.getParent() != null) { newState.myParentId = model.getParent().getMavenId(); } newState.myPackaging = model.getPackaging(); newState.myName = model.getName(); newState.myFinalName = model.getBuild().getFinalName(); newState.myDefaultGoal = model.getBuild().getDefaultGoal(); newState.myBuildDirectory = model.getBuild().getDirectory(); newState.myOutputDirectory = model.getBuild().getOutputDirectory(); newState.myTestOutputDirectory = model.getBuild().getTestOutputDirectory(); doSetFolders(newState, readerResult); newState.myFilters = model.getBuild().getFilters(); newState.myProperties = model.getProperties(); doSetResolvedAttributes(newState, readerResult, resetArtifacts); MavenModelPropertiesPatcher.patch(newState.myProperties, newState.myPlugins); newState.myModulesPathsAndNames = collectModulePathsAndNames(model, getDirectory()); Collection<String> newProfiles = collectProfilesIds(model.getProfiles()); if (resetProfiles || newState.myProfilesIds == null) { newState.myProfilesIds = newProfiles; } else { Set<String> mergedProfiles = new THashSet<>(newState.myProfilesIds); mergedProfiles.addAll(newProfiles); newState.myProfilesIds = new ArrayList<>(mergedProfiles); } newState.myModelMap = readerResult.nativeModelMap; return setState(newState); } private MavenProjectChanges setState(State newState) { MavenProjectChanges changes = myState.getChanges(newState); myState = newState; return changes; } private static void doSetResolvedAttributes(State state, MavenProjectReaderResult readerResult, boolean reset) { MavenModel model = readerResult.mavenModel; Set<MavenId> newUnresolvedArtifacts = new THashSet<>(); LinkedHashSet<MavenRemoteRepository> newRepositories = new LinkedHashSet<>(); LinkedHashSet<MavenArtifact> newDependencies = new LinkedHashSet<>(); LinkedHashSet<MavenArtifactNode> newDependencyTree = new LinkedHashSet<>(); LinkedHashSet<MavenPlugin> newPlugins = new LinkedHashSet<>(); LinkedHashSet<MavenArtifact> newExtensions = new LinkedHashSet<>(); LinkedHashSet<MavenArtifact> newAnnotationProcessors = new LinkedHashSet<>(); if (!reset) { if (state.myUnresolvedArtifactIds != null) newUnresolvedArtifacts.addAll(state.myUnresolvedArtifactIds); if (state.myRemoteRepositories != null) newRepositories.addAll(state.myRemoteRepositories); if (state.myDependencies != null) newDependencies.addAll(state.myDependencies); if (state.myDependencyTree != null) newDependencyTree.addAll(state.myDependencyTree); if (state.myPlugins != null) newPlugins.addAll(state.myPlugins); if (state.myExtensions != null) newExtensions.addAll(state.myExtensions); if (state.myAnnotationProcessors != null) newAnnotationProcessors.addAll(state.myAnnotationProcessors); } newUnresolvedArtifacts.addAll(readerResult.unresolvedArtifactIds); newRepositories.addAll(model.getRemoteRepositories()); newDependencyTree.addAll(model.getDependencyTree()); newDependencies.addAll(model.getDependencies()); newPlugins.addAll(model.getPlugins()); newExtensions.addAll(model.getExtensions()); state.myUnresolvedArtifactIds = newUnresolvedArtifacts; state.myRemoteRepositories = new ArrayList<>(newRepositories); state.myDependencies = new ArrayList<>(newDependencies); state.myDependencyTree = new ArrayList<>(newDependencyTree); state.myPlugins = new ArrayList<>(newPlugins); state.myExtensions = new ArrayList<>(newExtensions); state.myAnnotationProcessors = new ArrayList<>(newAnnotationProcessors); } private MavenProjectChanges setFolders(MavenProjectReaderResult readerResult) { State newState = myState.clone(); doSetFolders(newState, readerResult); return setState(newState); } private static void doSetFolders(State newState, MavenProjectReaderResult readerResult) { MavenModel model = readerResult.mavenModel; newState.mySources = model.getBuild().getSources(); newState.myTestSources = model.getBuild().getTestSources(); newState.myResources = model.getBuild().getResources(); newState.myTestResources = model.getBuild().getTestResources(); } private Map<String, String> collectModulePathsAndNames(MavenModel mavenModel, String baseDir) { String basePath = baseDir + "/"; Map<String, String> result = new LinkedHashMap<>(); for (Map.Entry<String, String> each : collectModulesRelativePathsAndNames(mavenModel, basePath).entrySet()) { result.put(new Path(basePath + each.getKey()).getPath(), each.getValue()); } return result; } private Map<String, String> collectModulesRelativePathsAndNames(MavenModel mavenModel, String basePath) { String extension = StringUtil.notNullize(myFile.getExtension()); LinkedHashMap<String, String> result = new LinkedHashMap<>(); for (String name : mavenModel.getModules()) { name = name.trim(); if (name.length() == 0) continue; String originalName = name; // module name can be relative and contain either / of \\ separators name = FileUtil.toSystemIndependentName(name); if (!name.endsWith('.' + extension)) { if (!name.endsWith("/")) name += "/"; name += MavenConstants.POM_EXTENSION + '.' + extension; } else { String systemDependentName = FileUtil.toSystemDependentName(basePath + name); if (new File(systemDependentName).isDirectory()) { name += "/" + MavenConstants.POM_XML; } } result.put(name, originalName); } return result; } private static Collection<String> collectProfilesIds(Collection<MavenProfile> profiles) { if (profiles == null) return Collections.emptyList(); Set<String> result = new THashSet<>(profiles.size()); for (MavenProfile each : profiles) { result.add(each.getId()); } return result; } public long getLastReadStamp() { return myState.myLastReadStamp; } @NotNull public VirtualFile getFile() { return myFile; } @NotNull public String getPath() { return myFile.getPath(); } @NotNull public String getDirectory() { return myFile.getParent().getPath(); } @NotNull public VirtualFile getDirectoryFile() { return myFile.getParent(); } @Nullable public VirtualFile getProfilesXmlFile() { return MavenUtil.findProfilesXmlFile(myFile); } @Nullable public File getProfilesXmlIoFile() { return MavenUtil.getProfilesXmlIoFile(myFile); } public boolean hasReadingProblems() { return !myState.myReadingProblems.isEmpty(); } @Nullable public String getName() { return myState.myName; } @NotNull public String getDisplayName() { State state = myState; if (StringUtil.isEmptyOrSpaces(state.myName)) { return StringUtil.notNullize(state.myMavenId.getArtifactId()); } return state.myName; } @NotNull public Map<String, String> getModelMap() { return myState.myModelMap; } @NotNull public MavenId getMavenId() { return myState.myMavenId; } @Nullable public MavenId getParentId() { return myState.myParentId; } @NotNull public String getPackaging() { return myState.myPackaging; } @NotNull public String getFinalName() { return myState.myFinalName; } @Nullable public String getDefaultGoal() { return myState.myDefaultGoal; } @NotNull public String getBuildDirectory() { return myState.myBuildDirectory; } @NotNull public String getGeneratedSourcesDirectory(boolean testSources) { return getBuildDirectory() + (testSources ? "/generated-test-sources" : "/generated-sources"); } @NotNull public String getAnnotationProcessorDirectory(boolean testSources) { if (getProcMode() == ProcMode.NONE) { MavenPlugin bscMavenPlugin = findPlugin("org.bsc.maven", "maven-processor-plugin"); Element cfg = getPluginGoalConfiguration(bscMavenPlugin, testSources ? "process-test" : "process"); if (bscMavenPlugin != null && cfg == null) { return getBuildDirectory() + (testSources ? "/generated-sources/apt-test" : "/generated-sources/apt"); } if (cfg != null) { String out = MavenJDOMUtil.findChildValueByPath(cfg, "outputDirectory"); if (out == null) { out = MavenJDOMUtil.findChildValueByPath(cfg, "defaultOutputDirectory"); if (out == null) { return getBuildDirectory() + (testSources ? "/generated-sources/apt-test" : "/generated-sources/apt"); } } if (!new File(out).isAbsolute()) { out = getDirectory() + '/' + out; } return out; } } String def = getGeneratedSourcesDirectory(testSources) + (testSources ? "/test-annotations" : "/annotations"); return MavenJDOMUtil.findChildValueByPath( getCompilerConfig(), testSources ? "generatedTestSourcesDirectory" : "generatedSourcesDirectory", def); } @NotNull public ProcMode getProcMode() { Element compilerConfiguration = getPluginExecutionConfiguration("org.apache.maven.plugins", "maven-compiler-plugin", "default-compile"); if (compilerConfiguration == null) { compilerConfiguration = getCompilerConfig(); } if (compilerConfiguration == null) { return ProcMode.BOTH; } Element procElement = compilerConfiguration.getChild("proc"); if (procElement != null) { String procMode = procElement.getValue(); return ("only".equalsIgnoreCase(procMode)) ? ProcMode.ONLY : ("none".equalsIgnoreCase(procMode)) ? ProcMode.NONE : ProcMode.BOTH; } String compilerArgument = compilerConfiguration.getChildTextTrim("compilerArgument"); if ("-proc:none".equals(compilerArgument)) { return ProcMode.NONE; } if ("-proc:only".equals(compilerArgument)) { return ProcMode.ONLY; } Element compilerArguments = compilerConfiguration.getChild("compilerArgs"); if (compilerArguments != null) { for (Element element : compilerArguments.getChildren()) { String arg = element.getValue(); if ("-proc:none".equals(arg)) { return ProcMode.NONE; } if ("-proc:only".equals(arg)) { return ProcMode.ONLY; } } } return ProcMode.BOTH; } public Map<String, String> getAnnotationProcessorOptions() { Element compilerConfig = getCompilerConfig(); if (compilerConfig == null) { return Collections.emptyMap(); } if (getProcMode() != MavenProject.ProcMode.NONE) { return getAnnotationProcessorOptionsFromCompilerConfig(compilerConfig); } MavenPlugin bscMavenPlugin = findPlugin("org.bsc.maven", "maven-processor-plugin"); if (bscMavenPlugin != null) { return getAnnotationProcessorOptionsFromProcessorPlugin(bscMavenPlugin); } return Collections.emptyMap(); } private static Map<String, String> getAnnotationProcessorOptionsFromCompilerConfig(Element compilerConfig) { Map<String, String> res = new LinkedHashMap<>(); String compilerArgument = compilerConfig.getChildText("compilerArgument"); addAnnotationProcessorOptionFomrParametersString(compilerArgument, res); Element compilerArgs = compilerConfig.getChild("compilerArgs"); if (compilerArgs != null) { for (Element e : compilerArgs.getChildren()) { if (!StringUtil.equals(e.getName(), "arg")) continue; String arg = e.getTextTrim(); addAnnotationProcessorOption(arg, res); } } Element compilerArguments = compilerConfig.getChild("compilerArguments"); if (compilerArguments != null) { for (Element e : compilerArguments.getChildren()) { String name = e.getName(); name = StringUtil.trimStart(name, "-"); if (name.length() > 1 && name.charAt(0) == 'A') { res.put(name.substring(1), e.getTextTrim()); } } } return res; } private static void addAnnotationProcessorOptionFomrParametersString(String compilerArguments, Map<String, String> res) { if (!StringUtil.isEmptyOrSpaces(compilerArguments)) { ParametersList parametersList = new ParametersList(); parametersList.addParametersString(compilerArguments); for (String param : parametersList.getParameters()) { addAnnotationProcessorOption(param, res); } } } private static void addAnnotationProcessorOption(String compilerArg, Map<String, String> optionsMap) { if (compilerArg == null || compilerArg.trim().isEmpty()) return; if (compilerArg.startsWith("-A")) { int idx = compilerArg.indexOf('=', 3); if (idx >= 0) { optionsMap.put(compilerArg.substring(2, idx), compilerArg.substring(idx + 1)); } else { optionsMap.put(compilerArg.substring(2), ""); } } } private static Map<String, String> getAnnotationProcessorOptionsFromProcessorPlugin(MavenPlugin bscMavenPlugin) { Element cfg = bscMavenPlugin.getGoalConfiguration("process"); if (cfg == null) { cfg = bscMavenPlugin.getConfigurationElement(); } LinkedHashMap<String, String> res = new LinkedHashMap<>(); if (cfg != null) { String compilerArguments = cfg.getChildText("compilerArguments"); addAnnotationProcessorOptionFomrParametersString(compilerArguments, res); final Element optionsElement = cfg.getChild("options"); if (optionsElement != null) { for (Element option : optionsElement.getChildren()) { res.put(option.getName(), option.getText()); } } } return res; } @Nullable public List<String> getDeclaredAnnotationProcessors() { Element compilerConfig = getCompilerConfig(); if (compilerConfig == null) { return null; } List<String> result = new ArrayList<>(); if (getProcMode() != MavenProject.ProcMode.NONE) { Element processors = compilerConfig.getChild("annotationProcessors"); if (processors != null) { for (Element element : processors.getChildren("annotationProcessor")) { String processorClassName = element.getTextTrim(); if (!processorClassName.isEmpty()) { result.add(processorClassName); } } } } else { MavenPlugin bscMavenPlugin = findPlugin("org.bsc.maven", "maven-processor-plugin"); if (bscMavenPlugin != null) { Element bscCfg = bscMavenPlugin.getGoalConfiguration("process"); if (bscCfg == null) { bscCfg = bscMavenPlugin.getConfigurationElement(); } if (bscCfg != null) { Element bscProcessors = bscCfg.getChild("processors"); if (bscProcessors != null) { for (Element element : bscProcessors.getChildren("processor")) { String processorClassName = element.getTextTrim(); if (!processorClassName.isEmpty()) { result.add(processorClassName); } } } } } } return result; } @NotNull public String getOutputDirectory() { return myState.myOutputDirectory; } @NotNull public String getTestOutputDirectory() { return myState.myTestOutputDirectory; } @NotNull public List<String> getSources() { return myState.mySources; } @NotNull public List<String> getTestSources() { return myState.myTestSources; } @NotNull public List<MavenResource> getResources() { return myState.myResources; } @NotNull public List<MavenResource> getTestResources() { return myState.myTestResources; } @NotNull public List<String> getFilters() { return myState.myFilters; } public List<String> getFilterPropertiesFiles() { List<String> res = getCachedValue(FILTERS_CACHE_KEY); if (res == null) { Element propCfg = getPluginGoalConfiguration("org.codehaus.mojo", "properties-maven-plugin", "read-project-properties"); if (propCfg != null) { Element files = propCfg.getChild("files"); if (files != null) { res = new ArrayList<>(); for (Element file : files.getChildren("file")) { File f = new File(file.getValue()); if (!f.isAbsolute()) { f = new File(getDirectory(), file.getValue()); } res.add(f.getAbsolutePath()); } } } if (res == null) { res = getFilters(); } else { res.addAll(getFilters()); } res = putCachedValue(FILTERS_CACHE_KEY, res); } return res; } @NotNull public MavenProjectChanges read(@NotNull MavenGeneralSettings generalSettings, @NotNull MavenExplicitProfiles profiles, @NotNull MavenProjectReader reader, @NotNull MavenProjectReaderProjectLocator locator) { return set(reader.readProject(generalSettings, myFile, profiles, locator), generalSettings, true, false, true); } @NotNull public Pair<MavenProjectChanges, NativeMavenProjectHolder> resolve(@NotNull Project project, @NotNull MavenGeneralSettings generalSettings, @NotNull MavenEmbedderWrapper embedder, @NotNull MavenProjectReader reader, @NotNull MavenProjectReaderProjectLocator locator, @NotNull ResolveContext context) throws MavenProcessCanceledException { Collection<MavenProjectReaderResult> results = reader.resolveProject(generalSettings, embedder, Collections.singleton(getFile()), getActivatedProfilesIds(), locator); final MavenProjectReaderResult result = results.iterator().next(); MavenProjectChanges changes = set(result, generalSettings, false, result.readingProblems.isEmpty(), false); if (result.nativeMavenProject != null) { for (MavenImporter eachImporter : getSuitableImporters()) { eachImporter.resolve(project, this, result.nativeMavenProject, embedder, context); } } return Pair.create(changes, result.nativeMavenProject); } @NotNull public Pair<Boolean, MavenProjectChanges> resolveFolders(@NotNull MavenEmbedderWrapper embedder, @NotNull MavenImportingSettings importingSettings, @NotNull MavenConsole console) throws MavenProcessCanceledException { MavenProjectReaderResult result = MavenProjectReader.generateSources(embedder, importingSettings, getFile(), getActivatedProfilesIds(), console); if (result == null || !result.readingProblems.isEmpty()) return Pair.create(false, MavenProjectChanges.NONE); MavenProjectChanges changes = setFolders(result); return Pair.create(true, changes); } public void resetCache() { // todo a bit hacky synchronized (myState) { myState.resetCache(); } } public boolean isAggregator() { return "pom".equals(getPackaging()) || !getModulePaths().isEmpty(); } @NotNull public List<MavenProjectProblem> getProblems() { State state = myState; synchronized (state) { if (state.myProblemsCache == null) { state.myProblemsCache = collectProblems(myFile, state); } return state.myProblemsCache; } } private static List<MavenProjectProblem> collectProblems(VirtualFile file, State state) { List<MavenProjectProblem> result = new ArrayList<>(); validateParent(file, state, result); result.addAll(state.myReadingProblems); for (Map.Entry<String, String> each : state.myModulesPathsAndNames.entrySet()) { if (LocalFileSystem.getInstance().findFileByPath(each.getKey()) == null) { result.add(createDependencyProblem(file, ProjectBundle.message("maven.project.problem.moduleNotFound", each.getValue()))); } } validateDependencies(file, state, result); validateExtensions(file, state, result); validatePlugins(file, state, result); return result; } private static void validateParent(VirtualFile file, State state, List<MavenProjectProblem> result) { if (!isParentResolved(state)) { result.add(createDependencyProblem(file, ProjectBundle.message("maven.project.problem.parentNotFound", state.myParentId))); } } private static void validateDependencies(VirtualFile file, State state, List<MavenProjectProblem> result) { for (MavenArtifact each : getUnresolvedDependencies(state)) { result.add(createDependencyProblem(file, ProjectBundle.message("maven.project.problem.unresolvedDependency", each.getDisplayStringWithType()))); } } private static void validateExtensions(VirtualFile file, State state, List<MavenProjectProblem> result) { for (MavenArtifact each : getUnresolvedExtensions(state)) { result.add(createDependencyProblem(file, ProjectBundle.message("maven.project.problem.unresolvedExtension", each.getDisplayStringSimple()))); } } private static void validatePlugins(VirtualFile file, State state, List<MavenProjectProblem> result) { for (MavenPlugin each : getUnresolvedPlugins(state)) { result.add(createDependencyProblem(file, ProjectBundle.message("maven.project.problem.unresolvedPlugin", each))); } } private static MavenProjectProblem createDependencyProblem(VirtualFile file, String description) { return new MavenProjectProblem(file.getPath(), description, MavenProjectProblem.ProblemType.DEPENDENCY); } private static boolean isParentResolved(State state) { return !state.myUnresolvedArtifactIds.contains(state.myParentId); } private static List<MavenArtifact> getUnresolvedDependencies(State state) { synchronized (state) { if (state.myUnresolvedDependenciesCache == null) { List<MavenArtifact> result = new ArrayList<>(); for (MavenArtifact each : state.myDependencies) { if (!each.isResolved()) result.add(each); } state.myUnresolvedDependenciesCache = result; } return state.myUnresolvedDependenciesCache; } } private static List<MavenArtifact> getUnresolvedExtensions(State state) { synchronized (state) { if (state.myUnresolvedExtensionsCache == null) { List<MavenArtifact> result = new ArrayList<>(); for (MavenArtifact each : state.myExtensions) { // Collect only extensions that were attempted to be resolved. // It is because embedder does not even try to resolve extensions that // are not necessary. if (state.myUnresolvedArtifactIds.contains(each.getMavenId()) && !pomFileExists(state.myLocalRepository, each)) { result.add(each); } } state.myUnresolvedExtensionsCache = result; } return state.myUnresolvedExtensionsCache; } } private static List<MavenArtifact> getUnresolvedAnnotationProcessors(State state) { synchronized (state) { if (state.myUnresolvedAnnotationProcessors == null) { List<MavenArtifact> result = new ArrayList<>(); for (MavenArtifact each : state.myAnnotationProcessors) { if (!each.isResolved()) result.add(each); } state.myUnresolvedAnnotationProcessors = result; } return state.myUnresolvedAnnotationProcessors; } } private static boolean pomFileExists(File localRepository, MavenArtifact artifact) { return MavenArtifactUtil.hasArtifactFile(localRepository, artifact.getMavenId(), "pom"); } private static List<MavenPlugin> getUnresolvedPlugins(State state) { synchronized (state) { if (state.myUnresolvedPluginsCache == null) { List<MavenPlugin> result = new ArrayList<>(); for (MavenPlugin each : getDeclaredPlugins(state)) { if (!MavenArtifactUtil.hasArtifactFile(state.myLocalRepository, each.getMavenId())) { result.add(each); } } state.myUnresolvedPluginsCache = result; } return state.myUnresolvedPluginsCache; } } @NotNull public List<VirtualFile> getExistingModuleFiles() { LocalFileSystem fs = LocalFileSystem.getInstance(); List<VirtualFile> result = new ArrayList<>(); Set<String> pathsInStack = getModulePaths(); for (String each : pathsInStack) { VirtualFile f = fs.findFileByPath(each); if (f != null) result.add(f); } return result; } @NotNull public Set<String> getModulePaths() { return getModulesPathsAndNames().keySet(); } @NotNull public Map<String, String> getModulesPathsAndNames() { return myState.myModulesPathsAndNames; } @NotNull public Collection<String> getProfilesIds() { return myState.myProfilesIds; } @NotNull public MavenExplicitProfiles getActivatedProfilesIds() { return myState.myActivatedProfilesIds; } @NotNull public List<MavenArtifact> getDependencies() { return myState.myDependencies; } @NotNull public List<MavenArtifact> getExternalAnnotationProcessors() { return myState.myAnnotationProcessors; } @NotNull public String getAnnotationProcessorPath(Project project) { StringJoiner annotationProcessorPath = new StringJoiner(File.pathSeparator); Consumer<String> resultAppender = path -> annotationProcessorPath.add(FileUtil.toSystemDependentName(path)); for (MavenArtifact artifact : getExternalAnnotationProcessors()) { resultAppender.consume(artifact.getPath()); } MavenProjectsManager projectsManager = MavenProjectsManager.getInstance(project); Module module = projectsManager.findModule(this); if (module != null) { MavenAnnotationProcessorsModuleService apService = MavenAnnotationProcessorsModuleService.getInstance(module); for (String moduleName : apService.getAnnotationProcessorModules()) { Module annotationProcessorModule = ModuleManager.getInstance(project).findModuleByName(moduleName); if (annotationProcessorModule != null) { OrderEnumerator enumerator = orderEntries(annotationProcessorModule).withoutSdk().productionOnly().runtimeOnly().recursively(); for (String url : enumerator.classes().getUrls()) { resultAppender.consume(JpsPathUtil.urlToPath(url)); } } } } return annotationProcessorPath.toString(); } @NotNull public List<MavenArtifactNode> getDependencyTree() { return myState.myDependencyTree; } @NotNull public Set<String> getSupportedPackagings() { Set<String> result = ContainerUtil.newHashSet(MavenConstants.TYPE_POM, MavenConstants.TYPE_JAR, "ejb", "ejb-client", "war", "ear", "bundle", "maven-plugin"); for (MavenImporter each : getSuitableImporters()) { each.getSupportedPackagings(result); } return result; } public Set<String> getDependencyTypesFromImporters(@NotNull SupportedRequestType type) { THashSet<String> res = new THashSet<>(); for (MavenImporter each : getSuitableImporters()) { each.getSupportedDependencyTypes(res, type); } return res; } @NotNull public Set<String> getSupportedDependencyScopes() { Set<String> result = new THashSet<>(Arrays.asList(MavenConstants.SCOPE_COMPILE, MavenConstants.SCOPE_PROVIDED, MavenConstants.SCOPE_RUNTIME, MavenConstants.SCOPE_TEST, MavenConstants.SCOPE_SYSTEM)); for (MavenImporter each : getSuitableImporters()) { each.getSupportedDependencyScopes(result); } return result; } public void addDependency(@NotNull MavenArtifact dependency) { State state = myState; List<MavenArtifact> dependenciesCopy = new ArrayList<>(state.myDependencies); dependenciesCopy.add(dependency); state.myDependencies = dependenciesCopy; state.myCache.clear(); } public void addAnnotationProcessors(@NotNull Collection<MavenArtifact> annotationProcessors) { State state = myState; List<MavenArtifact> annotationProcessorsCopy = new ArrayList<>(state.myAnnotationProcessors); annotationProcessorsCopy.addAll(annotationProcessors); state.myAnnotationProcessors = annotationProcessorsCopy; state.myUnresolvedAnnotationProcessors = null; } @NotNull public List<MavenArtifact> findDependencies(@NotNull MavenProject depProject) { return findDependencies(depProject.getMavenId()); } public List<MavenArtifact> findDependencies(@NotNull MavenId id) { return getDependencyArtifactIndex().findArtifacts(id); } @NotNull public List<MavenArtifact> findDependencies(@Nullable String groupId, @Nullable String artifactId) { return getDependencyArtifactIndex().findArtifacts(groupId, artifactId); } public boolean hasUnresolvedArtifacts() { State state = myState; return !isParentResolved(state) || !getUnresolvedDependencies(state).isEmpty() || !getUnresolvedExtensions(state).isEmpty() || !getUnresolvedAnnotationProcessors(state).isEmpty(); } public boolean hasUnresolvedPlugins() { return !getUnresolvedPlugins(myState).isEmpty(); } @NotNull public List<MavenPlugin> getPlugins() { return myState.myPlugins; } @NotNull public List<MavenPlugin> getDeclaredPlugins() { return getDeclaredPlugins(myState); } private static List<MavenPlugin> getDeclaredPlugins(State state) { return ContainerUtil.findAll(state.myPlugins, mavenPlugin -> !mavenPlugin.isDefault()); } @Nullable public Element getPluginConfiguration(@Nullable String groupId, @Nullable String artifactId) { return getPluginGoalConfiguration(groupId, artifactId, null); } @Nullable public Element getPluginGoalConfiguration(@Nullable String groupId, @Nullable String artifactId, @Nullable String goal) { return getPluginGoalConfiguration(findPlugin(groupId, artifactId), goal); } @Nullable public Element getPluginGoalConfiguration(@Nullable MavenPlugin plugin, @Nullable String goal) { if (plugin == null) return null; return goal == null ? plugin.getConfigurationElement() : plugin.getGoalConfiguration(goal); } public Element getPluginExecutionConfiguration(@Nullable String groupId, @Nullable String artifactId, @NotNull String executionId) { MavenPlugin plugin = findPlugin(groupId, artifactId); if (plugin == null) return null; return plugin.getExecutionConfiguration(executionId); } @Nullable public MavenPlugin findPlugin(@Nullable String groupId, @Nullable String artifactId) { return findPlugin(groupId, artifactId, false); } @Nullable public MavenPlugin findPlugin(@Nullable String groupId, @Nullable String artifactId, final boolean explicitlyDeclaredOnly) { final List<MavenPlugin> plugins = explicitlyDeclaredOnly ? getDeclaredPlugins() : getPlugins(); for (MavenPlugin each : plugins) { if (each.getMavenId().equals(groupId, artifactId)) return each; } return null; } @Nullable public String getEncoding() { String encoding = myState.myProperties.getProperty("project.build.sourceEncoding"); if (encoding != null) return encoding; Element pluginConfiguration = getPluginConfiguration("org.apache.maven.plugins", "maven-resources-plugin"); if (pluginConfiguration != null) { return pluginConfiguration.getChildTextTrim("encoding"); } return null; } @Nullable public String getSourceLevel() { return getCompilerLevel("source"); } @Nullable public String getTargetLevel() { return getCompilerLevel("target"); } @Nullable public String getReleaseLevel() { return getCompilerLevel("release"); } @Nullable private String getCompilerLevel(String level) { String result = MavenJDOMUtil.findChildValueByPath(getCompilerConfig(), level); if (result == null) { result = myState.myProperties.getProperty("maven.compiler." + level); } return result; } @Nullable private Element getCompilerConfig() { Element executionConfiguration = getPluginExecutionConfiguration("org.apache.maven.plugins", "maven-compiler-plugin", "default-compile"); if(executionConfiguration != null) return executionConfiguration; return getPluginConfiguration("org.apache.maven.plugins", "maven-compiler-plugin"); } @NotNull public Properties getProperties() { return myState.myProperties; } @NotNull public Map<String, String> getMavenConfig() { return getPropertiesFromConfig(ConfigFileKind.MAVEN_CONFIG); } @NotNull private Map<String, String> getPropertiesFromConfig(ConfigFileKind kind) { Map<String, String> mavenConfig = getCachedValue(kind.CACHE_KEY); if (mavenConfig == null) { mavenConfig = readConfigFile(MavenUtil.getBaseDir(getDirectoryFile()), kind); putCachedValue(kind.CACHE_KEY, mavenConfig); } return mavenConfig; } @NotNull public Map<String, String> getJvmConfig() { return getPropertiesFromConfig(ConfigFileKind.JVM_CONFIG); } @NotNull public static Map<String, String> readConfigFile(final File baseDir, ConfigFileKind kind) { File configFile = new File(baseDir + FileUtil.toSystemDependentName(kind.myRelativeFilePath)); ParametersList parametersList = new ParametersList(); if (configFile.isFile()) { try { parametersList.addParametersString(FileUtil.loadFile(configFile, CharsetToolkit.UTF8)); } catch (IOException ignore) { } } Map<String, String> config = parametersList.getProperties(kind.myValueIfMissing); return config.isEmpty() ? Collections.emptyMap() : config; } @NotNull public File getLocalRepository() { return myState.myLocalRepository; } @NotNull public List<MavenRemoteRepository> getRemoteRepositories() { return myState.myRemoteRepositories; } @NotNull public List<MavenImporter> getSuitableImporters() { return MavenImporter.getSuitableImporters(this); } @NotNull public ModuleType getModuleType() { final List<MavenImporter> importers = getSuitableImporters(); // getSuitableImporters() guarantees that all returned importers require the same module type return importers.size() > 0 ? importers.get(0).getModuleType() : StdModuleTypes.JAVA; } @NotNull public Pair<String, String> getClassifierAndExtension(@NotNull MavenArtifact artifact, @NotNull MavenExtraArtifactType type) { for (MavenImporter each : getSuitableImporters()) { Pair<String, String> result = each.getExtraArtifactClassifierAndExtension(artifact, type); if (result != null) return result; } return Pair.create(type.getDefaultClassifier(), type.getDefaultExtension()); } public MavenArtifactIndex getDependencyArtifactIndex() { MavenArtifactIndex res = getCachedValue(DEPENDENCIES_CACHE_KEY); if (res == null) { res = MavenArtifactIndex.build(getDependencies()); res = putCachedValue(DEPENDENCIES_CACHE_KEY, res); } return res; } @Nullable public <V> V getCachedValue(Key<V> key) { //noinspection unchecked return (V)myState.myCache.get(key); } @NotNull public <V> V putCachedValue(Key<V> key, @NotNull V value) { ConcurrentHashMap<Key, Object> map = myState.myCache; Object oldValue = map.putIfAbsent(key, value); if (oldValue != null) { return (V)oldValue; } return value; } @Override public String toString() { return getMavenId().toString(); } private static class State implements Cloneable, Serializable { long myLastReadStamp = 0; MavenId myMavenId; MavenId myParentId; String myPackaging; String myName; String myFinalName; String myDefaultGoal; String myBuildDirectory; String myOutputDirectory; String myTestOutputDirectory; List<String> mySources; List<String> myTestSources; List<MavenResource> myResources; List<MavenResource> myTestResources; List<String> myFilters; Properties myProperties; List<MavenPlugin> myPlugins; List<MavenArtifact> myExtensions; List<MavenArtifact> myDependencies; List<MavenArtifactNode> myDependencyTree; List<MavenRemoteRepository> myRemoteRepositories; List<MavenArtifact> myAnnotationProcessors; Map<String, String> myModulesPathsAndNames; Map<String, String> myModelMap; Collection<String> myProfilesIds; MavenExplicitProfiles myActivatedProfilesIds; Collection<MavenProjectProblem> myReadingProblems; Set<MavenId> myUnresolvedArtifactIds; File myLocalRepository; volatile List<MavenProjectProblem> myProblemsCache; volatile List<MavenArtifact> myUnresolvedDependenciesCache; volatile List<MavenPlugin> myUnresolvedPluginsCache; volatile List<MavenArtifact> myUnresolvedExtensionsCache; volatile List<MavenArtifact> myUnresolvedAnnotationProcessors; transient ConcurrentHashMap<Key, Object> myCache = new ConcurrentHashMap<>(); @Override public State clone() { try { State result = (State)super.clone(); myCache = new ConcurrentHashMap<>(); result.resetCache(); return result; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } private void resetCache() { myProblemsCache = null; myUnresolvedDependenciesCache = null; myUnresolvedPluginsCache = null; myUnresolvedExtensionsCache = null; myUnresolvedAnnotationProcessors = null; myCache.clear(); } public MavenProjectChanges getChanges(State other) { if (myLastReadStamp == 0) return MavenProjectChanges.ALL; MavenProjectChanges result = new MavenProjectChanges(); result.packaging = !Comparing.equal(myPackaging, other.myPackaging); result.output = !Comparing.equal(myFinalName, other.myFinalName) || !Comparing.equal(myBuildDirectory, other.myBuildDirectory) || !Comparing.equal(myOutputDirectory, other.myOutputDirectory) || !Comparing.equal(myTestOutputDirectory, other.myTestOutputDirectory); result.sources = !Comparing.equal(mySources, other.mySources) || !Comparing.equal(myTestSources, other.myTestSources) || !Comparing.equal(myResources, other.myResources) || !Comparing.equal(myTestResources, other.myTestResources); boolean repositoryChanged = !Comparing.equal(myLocalRepository, other.myLocalRepository); result.dependencies = repositoryChanged || !Comparing.equal(myDependencies, other.myDependencies); result.plugins = repositoryChanged || !Comparing.equal(myPlugins, other.myPlugins); return result; } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); myCache = new ConcurrentHashMap<>(); } } }
package com.alibaba.otter.canal.client.adapter.config.bind; import java.beans.PropertyDescriptor; import java.util.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeanUtils; import org.springframework.beans.PropertyValues; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.support.ResourceEditorRegistrar; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.MessageSource; import org.springframework.context.MessageSourceAware; import org.springframework.core.convert.ConversionService; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import org.springframework.validation.*; import com.alibaba.otter.canal.client.adapter.config.common.PropertySources; /** * Validate some {@link Properties} (or optionally * {@link org.springframework.core.env.PropertySources}) by binding them to an * object of a specified type and then optionally running a {@link Validator} * over it. * * @param <T> the target type * @author Dave Syer */ public class PropertiesConfigurationFactory<T> implements FactoryBean<T>, ApplicationContextAware, MessageSourceAware, InitializingBean { private static final char[] EXACT_DELIMITERS = { '_', '.', '[' }; private static final char[] TARGET_NAME_DELIMITERS = { '_', '.' }; private static final Log logger = LogFactory.getLog(PropertiesConfigurationFactory.class); private boolean ignoreUnknownFields = true; private boolean ignoreInvalidFields; private boolean exceptionIfInvalid = true; private PropertySources propertySources; private final T target; private Validator validator; private ApplicationContext applicationContext; private MessageSource messageSource; private boolean hasBeenBound = false; private boolean ignoreNestedProperties = false; private String targetName; private ConversionService conversionService; private boolean resolvePlaceholders = true; /** * Create a new {@link PropertiesConfigurationFactory} instance. * * @param target the target object to bind too * @see #PropertiesConfigurationFactory(Class) */ public PropertiesConfigurationFactory(T target){ Assert.notNull(target, "target must not be null"); this.target = target; } /** * Create a new {@link PropertiesConfigurationFactory} instance. * * @param type the target type * @see #PropertiesConfigurationFactory(Class) */ @SuppressWarnings("unchecked") public PropertiesConfigurationFactory(Class<?> type){ Assert.notNull(type, "type must not be null"); this.target = (T) BeanUtils.instantiate(type); } /** * Flag to disable binding of nested properties (i.e. those with period * separators in their paths). Can be useful to disable this if the name prefix * is empty and you don't want to ignore unknown fields. * * @param ignoreNestedProperties the flag to set (default false) */ public void setIgnoreNestedProperties(boolean ignoreNestedProperties) { this.ignoreNestedProperties = ignoreNestedProperties; } /** * Set whether to ignore unknown fields, that is, whether to ignore bind * parameters that do not have corresponding fields in the target object. * <p> * Default is "true". Turn this off to enforce that all bind parameters must * have a matching field in the target object. * * @param ignoreUnknownFields if unknown fields should be ignored */ public void setIgnoreUnknownFields(boolean ignoreUnknownFields) { this.ignoreUnknownFields = ignoreUnknownFields; } /** * Set whether to ignore invalid fields, that is, whether to ignore bind * parameters that have corresponding fields in the target object which are not * accessible (for example because of null values in the nested path). * <p> * Default is "false". Turn this on to ignore bind parameters for nested objects * in non-existing parts of the target object graph. * * @param ignoreInvalidFields if invalid fields should be ignored */ public void setIgnoreInvalidFields(boolean ignoreInvalidFields) { this.ignoreInvalidFields = ignoreInvalidFields; } /** * Set the target name. * * @param targetName the target name */ public void setTargetName(String targetName) { this.targetName = targetName; } @Override public void setApplicationContext(ApplicationContext applicationContext) { this.applicationContext = applicationContext; } /** * Set the message source. * * @param messageSource the message source */ @Override public void setMessageSource(MessageSource messageSource) { this.messageSource = messageSource; } /** * Set the property sources. * * @param propertySources the property sources */ public void setPropertySources(PropertySources propertySources) { this.propertySources = propertySources; } /** * Set the conversion service. * * @param conversionService the conversion service */ public void setConversionService(ConversionService conversionService) { this.conversionService = conversionService; } /** * Set the validator. * * @param validator the validator */ public void setValidator(Validator validator) { this.validator = validator; } /** * Set a flag to indicate that an exception should be raised if a Validator is * available and validation fails. * * @param exceptionIfInvalid the flag to set * @deprecated as of 1.5, do not specify a {@link Validator} if validation * should not occur */ @Deprecated public void setExceptionIfInvalid(boolean exceptionIfInvalid) { this.exceptionIfInvalid = exceptionIfInvalid; } /** * Flag to indicate that placeholders should be replaced during binding. Default * is true. * * @param resolvePlaceholders flag value */ public void setResolvePlaceholders(boolean resolvePlaceholders) { this.resolvePlaceholders = resolvePlaceholders; } @Override public void afterPropertiesSet() throws Exception { bindPropertiesToTarget(); } @Override public Class<?> getObjectType() { if (this.target == null) { return Object.class; } return this.target.getClass(); } @Override public boolean isSingleton() { return true; } @Override public T getObject() throws Exception { if (!this.hasBeenBound) { bindPropertiesToTarget(); } return this.target; } public void bindPropertiesToTarget() throws BindException { Assert.state(this.propertySources != null, "PropertySources should not be null"); try { if (logger.isTraceEnabled()) { logger.trace("Property Sources: " + this.propertySources); } this.hasBeenBound = true; doBindPropertiesToTarget(); } catch (BindException ex) { if (this.exceptionIfInvalid) { throw ex; } logger.error("Failed to load Properties validation bean. " + "Your Properties may be invalid.", ex); } } private void doBindPropertiesToTarget() throws BindException { RelaxedDataBinder dataBinder = (this.targetName != null ? new RelaxedDataBinder(this.target, this.targetName) : new RelaxedDataBinder(this.target)); if (this.validator != null && this.validator.supports(dataBinder.getTarget().getClass())) { dataBinder.setValidator(this.validator); } if (this.conversionService != null) { dataBinder.setConversionService(this.conversionService); } dataBinder.setAutoGrowCollectionLimit(Integer.MAX_VALUE); dataBinder.setIgnoreNestedProperties(this.ignoreNestedProperties); dataBinder.setIgnoreInvalidFields(this.ignoreInvalidFields); dataBinder.setIgnoreUnknownFields(this.ignoreUnknownFields); customizeBinder(dataBinder); if (this.applicationContext != null) { ResourceEditorRegistrar resourceEditorRegistrar = new ResourceEditorRegistrar(this.applicationContext, this.applicationContext.getEnvironment()); resourceEditorRegistrar.registerCustomEditors(dataBinder); } Iterable<String> relaxedTargetNames = getRelaxedTargetNames(); Set<String> names = getNames(relaxedTargetNames); PropertyValues propertyValues = getPropertySourcesPropertyValues(names, relaxedTargetNames); dataBinder.bind(propertyValues); if (this.validator != null) { dataBinder.validate(); } checkForBindingErrors(dataBinder); } private Iterable<String> getRelaxedTargetNames() { return (this.target != null && StringUtils.hasLength(this.targetName) ? new RelaxedNames(this.targetName) : null); } private Set<String> getNames(Iterable<String> prefixes) { Set<String> names = new LinkedHashSet<String>(); if (this.target != null) { PropertyDescriptor[] descriptors = BeanUtils.getPropertyDescriptors(this.target.getClass()); for (PropertyDescriptor descriptor : descriptors) { String name = descriptor.getName(); if (!name.equals("class")) { RelaxedNames relaxedNames = RelaxedNames.forCamelCase(name); if (prefixes == null) { for (String relaxedName : relaxedNames) { names.add(relaxedName); } } else { for (String prefix : prefixes) { for (String relaxedName : relaxedNames) { names.add(prefix + "." + relaxedName); names.add(prefix + "_" + relaxedName); } } } } } } return names; } private PropertyValues getPropertySourcesPropertyValues(Set<String> names, Iterable<String> relaxedTargetNames) { PropertyNamePatternsMatcher includes = getPropertyNamePatternsMatcher(names, relaxedTargetNames); return new PropertySourcesPropertyValues(this.propertySources, names, includes, this.resolvePlaceholders); } private PropertyNamePatternsMatcher getPropertyNamePatternsMatcher(Set<String> names, Iterable<String> relaxedTargetNames) { if (this.ignoreUnknownFields && !isMapTarget()) { // Since unknown fields are ignored we can filter them out early to save // unnecessary calls to the PropertySource. return new DefaultPropertyNamePatternsMatcher(EXACT_DELIMITERS, true, names); } if (relaxedTargetNames != null) { // We can filter properties to those starting with the target name, but // we can't do a complete filter since we need to trigger the // unknown fields check Set<String> relaxedNames = new HashSet<String>(); for (String relaxedTargetName : relaxedTargetNames) { relaxedNames.add(relaxedTargetName); } return new DefaultPropertyNamePatternsMatcher(TARGET_NAME_DELIMITERS, true, relaxedNames); } // Not ideal, we basically can't filter anything return PropertyNamePatternsMatcher.ALL; } private boolean isMapTarget() { return this.target != null && Map.class.isAssignableFrom(this.target.getClass()); } private void checkForBindingErrors(RelaxedDataBinder dataBinder) throws BindException { BindingResult errors = dataBinder.getBindingResult(); if (errors.hasErrors()) { logger.error("Properties configuration failed validation"); for (ObjectError error : errors.getAllErrors()) { logger.error(this.messageSource != null ? this.messageSource.getMessage(error, Locale.getDefault()) + " (" + error + ")" : error); } if (this.exceptionIfInvalid) { throw new BindException(errors); } } } /** * Customize the data binder. * * @param dataBinder the data binder that will be used to bind and validate */ protected void customizeBinder(DataBinder dataBinder) { } }
/* * Copyright 2018 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.rabbitmq.spring; import com.navercorp.pinpoint.bootstrap.plugin.test.Expectations; import com.navercorp.pinpoint.bootstrap.plugin.test.ExpectedTrace; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.plugin.rabbitmq.util.RabbitMQTestConstants; import com.navercorp.pinpoint.plugin.rabbitmq.util.TestBroker; import com.navercorp.pinpoint.pluginit.utils.AgentPath; import com.navercorp.pinpoint.test.plugin.Dependency; import com.navercorp.pinpoint.test.plugin.ImportPlugin; import com.navercorp.pinpoint.test.plugin.JvmArgument; import com.navercorp.pinpoint.test.plugin.JvmVersion; import com.navercorp.pinpoint.test.plugin.PinpointAgent; import com.navercorp.pinpoint.test.plugin.PinpointConfig; import com.navercorp.pinpoint.test.plugin.PinpointPluginTestSuite; import com.navercorp.test.pinpoint.plugin.rabbitmq.PropagationMarker; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.CommonConfig; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.MessageListenerConfig_Post_1_4_0; import com.navercorp.test.pinpoint.plugin.rabbitmq.spring.config.ReceiverConfig_Post_1_6_0; import com.rabbitmq.client.AMQP; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Consumer; import com.rabbitmq.client.Envelope; import com.rabbitmq.client.impl.AMQCommand; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.amqp.core.Message; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; /** * Spring-amqp rabbit 2.1.0 removed previously added <tt>BlockingQueueConsumer$ConsumerDecorator</tt>. * <p> * Skip 2.1.1.RELEASE as it has spring 5.1.2.BUILD-SNAPSHOT dependencies not in maven central. * * @author HyunGil Jeong */ @RunWith(PinpointPluginTestSuite.class) @PinpointAgent(AgentPath.PATH) @PinpointConfig("rabbitmq/client/pinpoint-rabbitmq.config") @ImportPlugin({"com.navercorp.pinpoint:pinpoint-rabbitmq-plugin", "com.navercorp.pinpoint:pinpoint-jetty-plugin", "com.navercorp.pinpoint:pinpoint-user-plugin"}) @Dependency({"org.springframework.amqp:spring-rabbit:[2.1.0.RELEASE],(2.1.1.RELEASE,2.1.9.RELEASE),(2.1.9.RELEASE,)", "com.fasterxml.jackson.core:jackson-core:2.8.11", "org.apache.qpid:qpid-broker:6.1.1"}) @JvmVersion(8) @JvmArgument("-DtestLoggerEnable=false") public class SpringAmqpRabbit_2_1_x_to_2_x_IT { private static final TestBroker BROKER = new TestBroker(); private static final TestApplicationContext CONTEXT = new TestApplicationContext(); private final SpringAmqpRabbitTestRunner testRunner = new SpringAmqpRabbitTestRunner(CONTEXT); @BeforeClass public static void setUpBeforeClass() throws Exception { BROKER.start(); CONTEXT.init( CommonConfig.class, MessageListenerConfig_Post_1_4_0.class, ReceiverConfig_Post_1_6_0.class); } @AfterClass public static void tearDownAfterClass() { CONTEXT.close(); BROKER.shutdown(); } @Test public void testPush() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PUSH)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PUSH)); Class<?> consumerDispatcherClass = Class.forName("com.rabbitmq.client.impl.ConsumerDispatcher"); Method consumerDispatcherHandleDelivery = consumerDispatcherClass.getDeclaredMethod("handleDelivery", Consumer.class, String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace consumerDispatcherHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType consumerDispatcherHandleDelivery); // method ExpectedTrace asynchronousInvocationTrace = Expectations.event( ServiceType.ASYNC.getName(), "Asynchronous Invocation"); Class<?> blockingQueueConsumerInternalConsumerClass = Class.forName("org.springframework.amqp.rabbit.listener.BlockingQueueConsumer$InternalConsumer"); Method blockingQueueConsumerInternalConsumerHandleDelivery = blockingQueueConsumerInternalConsumerClass.getDeclaredMethod("handleDelivery", String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace blockingQueueConsumerInternalConsumerHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType blockingQueueConsumerInternalConsumerHandleDelivery); Class<?> deliveryClass = Class.forName("org.springframework.amqp.rabbit.support.Delivery"); Constructor<?> deliveryConstructor = deliveryClass.getDeclaredConstructor(String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class, String.class); ExpectedTrace deliveryConstructorTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType deliveryConstructor); Class<?> abstractMessageListenerContainerClass = Class.forName("org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer"); Method abstractMessageListenerContainerExecuteListener = getExecuteListenerMethod(abstractMessageListenerContainerClass); ExpectedTrace abstractMessageListenerContainerExecuteListenerTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), abstractMessageListenerContainerExecuteListener); Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, consumerDispatcherHandleDeliveryTrace, asynchronousInvocationTrace, blockingQueueConsumerInternalConsumerHandleDeliveryTrace, deliveryConstructorTrace, asynchronousInvocationTrace, abstractMessageListenerContainerExecuteListenerTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length; final PluginTestVerifier verifier = testRunner.runPush(expectedTraceCount); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyTraceCount(0); } private Method getExecuteListenerMethod(Class<?> abstractMessageListenerContainerClass) throws NoSuchMethodException { Method abstractMessageListenerContainerExecuteListener = getExecuteListenerMethod0(abstractMessageListenerContainerClass); Class<?>[] parameterTypes = abstractMessageListenerContainerExecuteListener.getParameterTypes(); if (parameterTypes.length == 2) { if (!parameterTypes[0].equals(Channel.class)) { throw new NoSuchMethodException("executeListener"); } if (parameterTypes[1].equals(Message.class) || parameterTypes[1].equals(Object.class)) { return abstractMessageListenerContainerExecuteListener; } } throw new NoSuchMethodException("executeListener"); } private Method getExecuteListenerMethod0(Class<?> abstractMessageListenerContainerClass) throws NoSuchMethodException { List<Method> availableMethodList = new ArrayList<>(); Method[] declaredMethods = abstractMessageListenerContainerClass.getDeclaredMethods(); for (Method declaredMethod : declaredMethods) { if (declaredMethod.getName().equals("executeListener")) { availableMethodList.add(declaredMethod); } } if (availableMethodList.size() == 0) { throw new NoSuchMethodException("executeListener"); } else if (availableMethodList.size() == 1) { return availableMethodList.get(0); } else { throw new IllegalArgumentException(abstractMessageListenerContainerClass.getClass() + " has multiple executeListener()"); } } @Test public void testPull() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); // verify queue-initiated traces Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); Class<?> amqChannelClass = Class.forName("com.rabbitmq.client.impl.AMQChannel"); Method amqChannelHandleCompleteInboundCommand = amqChannelClass.getDeclaredMethod("handleCompleteInboundCommand", AMQCommand.class); ExpectedTrace amqChannelHandleCompleteInboundCommandTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // method amqChannelHandleCompleteInboundCommand); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, amqChannelHandleCompleteInboundCommandTrace }; // verify client-initiated traces Method rabbitTemplateReceive = rabbitTemplateClass.getDeclaredMethod("receive", String.class); ExpectedTrace rabbitTemplateReceiveTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateReceive); // method Method channelNBasicGet = channelNClass.getDeclaredMethod("basicGet", String.class, boolean.class); ExpectedTrace channelNBasicGetTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, channelNBasicGet); Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] clientInitiatedTraces = { rabbitTemplateReceiveTrace, channelNBasicGetTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length + clientInitiatedTraces.length; final PluginTestVerifier verifier = testRunner.runPull(expectedTraceCount); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyDiscreteTrace(clientInitiatedTraces); verifier.verifyTraceCount(0); } @Test public void testPullWithTimeout() throws Exception { final String remoteAddress = testRunner.getRemoteAddress(); Class<?> rabbitTemplateClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate"); // verify queue-initiated traces Method rabbitTemplateConvertAndSend = rabbitTemplateClass.getDeclaredMethod("convertAndSend", String.class, String.class, Object.class); ExpectedTrace rabbitTemplateConvertAndSendTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateConvertAndSend); // method // automatic recovery deliberately disabled as Spring has it's own recovery mechanism Class<?> channelNClass = Class.forName("com.rabbitmq.client.impl.ChannelN"); Method channelNBasicPublish = channelNClass.getDeclaredMethod("basicPublish", String.class, String.class, boolean.class, boolean.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace channelNBasicPublishTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType channelNBasicPublish, // method null, // rpc remoteAddress, // endPoint "exchange-" + RabbitMQTestConstants.EXCHANGE, // destinationId Expectations.annotation("rabbitmq.exchange", RabbitMQTestConstants.EXCHANGE), Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); ExpectedTrace rabbitMqConsumerInvocationTrace = Expectations.root( RabbitMQTestConstants.RABBITMQ_CLIENT, // serviceType "RabbitMQ Consumer Invocation", // method "rabbitmq://exchange=" + RabbitMQTestConstants.EXCHANGE, // rpc null, // endPoint (collected but API to retrieve local address is not available in all versions, so skip) remoteAddress, // remoteAddress Expectations.annotation("rabbitmq.routingkey", RabbitMQTestConstants.ROUTING_KEY_PULL)); Class<?> consumerDispatcherClass = Class.forName("com.rabbitmq.client.impl.ConsumerDispatcher"); Method consumerDispatcherHandleDelivery = consumerDispatcherClass.getDeclaredMethod("handleDelivery", Consumer.class, String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace consumerDispatcherHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType consumerDispatcherHandleDelivery); // method ExpectedTrace asynchronousInvocationTrace = Expectations.event( ServiceType.ASYNC.getName(), "Asynchronous Invocation"); // RabbitTemplate internal consumer implementation - may change in future versions which will cause tests to // fail, in which case the integration test needs to be updated to match code changes Class<?> rabbitTemplateInternalConsumerClass = getRabbitTemplateClazz(); Method rabbitTemplateInternalConsumerHandleDelivery = rabbitTemplateInternalConsumerClass.getDeclaredMethod("handleDelivery", String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class); ExpectedTrace rabbitTemplateInternalConsumerHandleDeliveryTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateInternalConsumerHandleDelivery); // method Class<?> deliveryClass = Class.forName("org.springframework.amqp.rabbit.support.Delivery"); Constructor<?> deliveryConstructor = deliveryClass.getDeclaredConstructor(String.class, Envelope.class, AMQP.BasicProperties.class, byte[].class, String.class); ExpectedTrace deliveryConstructorTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType deliveryConstructor); ExpectedTrace[] producerTraces = { rabbitTemplateConvertAndSendTrace, channelNBasicPublishTrace }; ExpectedTrace[] consumerTraces = { rabbitMqConsumerInvocationTrace, consumerDispatcherHandleDeliveryTrace, asynchronousInvocationTrace, rabbitTemplateInternalConsumerHandleDeliveryTrace, deliveryConstructorTrace }; // verify client-initiated traces Method rabbitTemplateReceive = rabbitTemplateClass.getDeclaredMethod("receive", String.class, long.class); ExpectedTrace rabbitTemplateReceiveTrace = Expectations.event( RabbitMQTestConstants.RABBITMQ_CLIENT_INTERNAL, // serviceType rabbitTemplateReceive); // method Class<?> propagationMarkerClass = PropagationMarker.class; Method propagationMarkerMark = propagationMarkerClass.getDeclaredMethod("mark"); ExpectedTrace markTrace = Expectations.event( ServiceType.INTERNAL_METHOD.getName(), propagationMarkerMark); ExpectedTrace[] clientInitiatedTraces = { rabbitTemplateReceiveTrace, markTrace }; final int expectedTraceCount = producerTraces.length + consumerTraces.length + clientInitiatedTraces.length; final PluginTestVerifier verifier = testRunner.runPull(expectedTraceCount, 5000L); verifier.verifyDiscreteTrace(producerTraces); verifier.verifyDiscreteTrace(consumerTraces); verifier.verifyDiscreteTrace(clientInitiatedTraces); verifier.verifyTraceCount(0); } private Class getRabbitTemplateClazz() { int[] indexes = {3, 2}; for (int index : indexes) { try { Class<?> rabbitTemplateInternalConsumerClass = Class.forName("org.springframework.amqp.rabbit.core.RabbitTemplate$" + index); if (rabbitTemplateInternalConsumerClass != null) { return rabbitTemplateInternalConsumerClass; } } catch (ClassNotFoundException e) { } } throw new IllegalArgumentException("Failed to find RabbitTemplate$ clazz"); } }
/** */ package com.github.lbroudoux.dsl.eip.impl; import com.github.lbroudoux.dsl.eip.Aggregator; import com.github.lbroudoux.dsl.eip.Channel; import com.github.lbroudoux.dsl.eip.EipPackage; import com.github.lbroudoux.dsl.eip.MessagePart; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Aggregator</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getName <em>Name</em>}</li> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getToChannels <em>To Channels</em>}</li> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getFromChannels <em>From Channels</em>}</li> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getPart <em>Part</em>}</li> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getStrategy <em>Strategy</em>}</li> * <li>{@link com.github.lbroudoux.dsl.eip.impl.AggregatorImpl#getExpression <em>Expression</em>}</li> * </ul> * </p> * * @generated */ public class AggregatorImpl extends MetadatableImpl implements Aggregator { /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The cached value of the '{@link #getToChannels() <em>To Channels</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getToChannels() * @generated * @ordered */ protected EList<Channel> toChannels; /** * The cached value of the '{@link #getFromChannels() <em>From Channels</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFromChannels() * @generated * @ordered */ protected EList<Channel> fromChannels; /** * The default value of the '{@link #getPart() <em>Part</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPart() * @generated * @ordered */ protected static final MessagePart PART_EDEFAULT = MessagePart.HEADER; /** * The cached value of the '{@link #getPart() <em>Part</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPart() * @generated * @ordered */ protected MessagePart part = PART_EDEFAULT; /** * The default value of the '{@link #getStrategy() <em>Strategy</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStrategy() * @generated * @ordered */ protected static final String STRATEGY_EDEFAULT = null; /** * The cached value of the '{@link #getStrategy() <em>Strategy</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStrategy() * @generated * @ordered */ protected String strategy = STRATEGY_EDEFAULT; /** * The default value of the '{@link #getExpression() <em>Expression</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getExpression() * @generated * @ordered */ protected static final String EXPRESSION_EDEFAULT = null; /** * The cached value of the '{@link #getExpression() <em>Expression</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getExpression() * @generated * @ordered */ protected String expression = EXPRESSION_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected AggregatorImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EipPackage.Literals.AGGREGATOR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EipPackage.AGGREGATOR__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Channel> getToChannels() { if (toChannels == null) { toChannels = new EObjectWithInverseResolvingEList<Channel>(Channel.class, this, EipPackage.AGGREGATOR__TO_CHANNELS, EipPackage.CHANNEL__FROM_ENDPOINT); } return toChannels; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Channel> getFromChannels() { if (fromChannels == null) { fromChannels = new EObjectWithInverseResolvingEList<Channel>(Channel.class, this, EipPackage.AGGREGATOR__FROM_CHANNELS, EipPackage.CHANNEL__TO_ENDPOINT); } return fromChannels; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MessagePart getPart() { return part; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPart(MessagePart newPart) { MessagePart oldPart = part; part = newPart == null ? PART_EDEFAULT : newPart; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EipPackage.AGGREGATOR__PART, oldPart, part)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getStrategy() { return strategy; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStrategy(String newStrategy) { String oldStrategy = strategy; strategy = newStrategy; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EipPackage.AGGREGATOR__STRATEGY, oldStrategy, strategy)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getExpression() { return expression; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setExpression(String newExpression) { String oldExpression = expression; expression = newExpression; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EipPackage.AGGREGATOR__EXPRESSION, oldExpression, expression)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EipPackage.AGGREGATOR__TO_CHANNELS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getToChannels()).basicAdd(otherEnd, msgs); case EipPackage.AGGREGATOR__FROM_CHANNELS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getFromChannels()).basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EipPackage.AGGREGATOR__TO_CHANNELS: return ((InternalEList<?>)getToChannels()).basicRemove(otherEnd, msgs); case EipPackage.AGGREGATOR__FROM_CHANNELS: return ((InternalEList<?>)getFromChannels()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EipPackage.AGGREGATOR__NAME: return getName(); case EipPackage.AGGREGATOR__TO_CHANNELS: return getToChannels(); case EipPackage.AGGREGATOR__FROM_CHANNELS: return getFromChannels(); case EipPackage.AGGREGATOR__PART: return getPart(); case EipPackage.AGGREGATOR__STRATEGY: return getStrategy(); case EipPackage.AGGREGATOR__EXPRESSION: return getExpression(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EipPackage.AGGREGATOR__NAME: setName((String)newValue); return; case EipPackage.AGGREGATOR__TO_CHANNELS: getToChannels().clear(); getToChannels().addAll((Collection<? extends Channel>)newValue); return; case EipPackage.AGGREGATOR__FROM_CHANNELS: getFromChannels().clear(); getFromChannels().addAll((Collection<? extends Channel>)newValue); return; case EipPackage.AGGREGATOR__PART: setPart((MessagePart)newValue); return; case EipPackage.AGGREGATOR__STRATEGY: setStrategy((String)newValue); return; case EipPackage.AGGREGATOR__EXPRESSION: setExpression((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EipPackage.AGGREGATOR__NAME: setName(NAME_EDEFAULT); return; case EipPackage.AGGREGATOR__TO_CHANNELS: getToChannels().clear(); return; case EipPackage.AGGREGATOR__FROM_CHANNELS: getFromChannels().clear(); return; case EipPackage.AGGREGATOR__PART: setPart(PART_EDEFAULT); return; case EipPackage.AGGREGATOR__STRATEGY: setStrategy(STRATEGY_EDEFAULT); return; case EipPackage.AGGREGATOR__EXPRESSION: setExpression(EXPRESSION_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EipPackage.AGGREGATOR__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case EipPackage.AGGREGATOR__TO_CHANNELS: return toChannels != null && !toChannels.isEmpty(); case EipPackage.AGGREGATOR__FROM_CHANNELS: return fromChannels != null && !fromChannels.isEmpty(); case EipPackage.AGGREGATOR__PART: return part != PART_EDEFAULT; case EipPackage.AGGREGATOR__STRATEGY: return STRATEGY_EDEFAULT == null ? strategy != null : !STRATEGY_EDEFAULT.equals(strategy); case EipPackage.AGGREGATOR__EXPRESSION: return EXPRESSION_EDEFAULT == null ? expression != null : !EXPRESSION_EDEFAULT.equals(expression); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (name: "); result.append(name); result.append(", part: "); result.append(part); result.append(", strategy: "); result.append(strategy); result.append(", expression: "); result.append(expression); result.append(')'); return result.toString(); } } //AggregatorImpl
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.operators; import org.apache.flink.api.common.InvalidProgramException; import org.apache.flink.api.common.distributions.DataDistribution; import org.apache.flink.api.common.functions.CoGroupFunction; import org.apache.flink.api.common.functions.RichCoGroupFunction; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.api.java.tuple.Tuple1; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.api.java.tuple.Tuple5; import org.apache.flink.api.java.tuple.Tuple7; import org.apache.flink.api.java.utils.DataSetUtils; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.test.operators.util.CollectionDataSets; import org.apache.flink.test.operators.util.CollectionDataSets.CustomType; import org.apache.flink.test.operators.util.CollectionDataSets.POJO; import org.apache.flink.test.util.MultipleProgramsTestBase; import org.apache.flink.util.Collector; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Integration tests for {@link CoGroupFunction} and {@link RichCoGroupFunction}. */ @RunWith(Parameterized.class) public class CoGroupITCase extends MultipleProgramsTestBase { public CoGroupITCase(TestExecutionMode mode){ super(mode); } /* * CoGroup on tuples with key field selector */ @Test public void testCoGroupTuplesWithKeyFieldSelector() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple2<Integer, Integer>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5CoGroup()); List<Tuple2<Integer, Integer>> result = coGroupDs.collect(); String expected = "1,0\n" + "2,6\n" + "3,24\n" + "4,60\n" + "5,120\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupOnTwoCustomTypeInputsWithKeyExtractors() throws Exception { /* * CoGroup on two custom type inputs with key extractors */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where(new KeySelector4()).equalTo(new KeySelector5()).with(new CustomTypeCoGroup()); List<CustomType> result = coGroupDs.collect(); String expected = "1,0,test\n" + "2,6,test\n" + "3,24,test\n" + "4,60,test\n" + "5,120,test\n" + "6,210,test\n"; compareResultAsText(result, expected); } private static class KeySelector4 implements KeySelector<CustomType, Integer> { private static final long serialVersionUID = 1L; @Override public Integer getKey(CustomType in) { return in.myInt; } } private static class KeySelector5 implements KeySelector<CustomType, Integer> { private static final long serialVersionUID = 1L; @Override public Integer getKey(CustomType in) { return in.myInt; } } @Test public void testCorrectnessOfCoGroupIfUDFReturnsLeftInputObjects() throws Exception { /* * check correctness of cogroup if UDF returns left input objects */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple3ReturnLeft()); List<Tuple3<Integer, Long, String>> result = coGroupDs.collect(); String expected = "1,1,Hi\n" + "2,2,Hello\n" + "3,2,Hello world\n" + "4,3,Hello world, how are you?\n" + "5,3,I am fine.\n"; compareResultAsTuples(result, expected); } @Test public void testCorrectnessOfCoGroupIfUDFReturnsRightInputObjects() throws Exception { /* * check correctness of cogroup if UDF returns right input objects */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5ReturnRight()); List<Tuple5<Integer, Long, Integer, String, Long>> result = coGroupDs.collect(); String expected = "1,1,0,Hallo,1\n" + "2,2,1,Hallo Welt,2\n" + "2,3,2,Hallo Welt wie,1\n" + "3,4,3,Hallo Welt wie gehts?,2\n" + "3,5,4,ABC,2\n" + "3,6,5,BCD,3\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupWithBroadcastSet() throws Exception { /* * Reduce with broadcast set */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> intDs = CollectionDataSets.getIntegerDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Integer, Integer>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5CoGroupBC()).withBroadcastSet(intDs, "ints"); List<Tuple3<Integer, Integer, Integer>> result = coGroupDs.collect(); String expected = "1,0,55\n" + "2,6,55\n" + "3,24,55\n" + "4,60,55\n" + "5,120,55\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupOnATupleInputWithKeyFieldSelectorAndACustomTypeInputWithKeyExtractor() throws Exception { /* * CoGroup on a tuple input with key field selector and a custom type input with key extractor */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds.coGroup(ds2).where(2).equalTo(new KeySelector2()).with(new MixedCoGroup()); List<Tuple3<Integer, Long, String>> result = coGroupDs.collect(); String expected = "0,1,test\n" + "1,2,test\n" + "2,5,test\n" + "3,15,test\n" + "4,33,test\n" + "5,63,test\n" + "6,109,test\n" + "7,4,test\n" + "8,4,test\n" + "9,4,test\n" + "10,5,test\n" + "11,5,test\n" + "12,5,test\n" + "13,5,test\n" + "14,5,test\n"; compareResultAsTuples(result, expected); } private static class KeySelector2 implements KeySelector<CustomType, Integer> { private static final long serialVersionUID = 1L; @Override public Integer getKey(CustomType in) { return in.myInt; } } @Test public void testCoGroupOnACustomTypeWithKeyExtractorAndATupleInputWithKeyFieldSelector() throws Exception { /* * CoGroup on a tuple input with key field selector and a custom type input with key extractor */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> coGroupDs = ds2.coGroup(ds).where(new KeySelector3()).equalTo(2).with (new MixedCoGroup2()); List<CustomType> result = coGroupDs.collect(); String expected = "0,1,test\n" + "1,2,test\n" + "2,5,test\n" + "3,15,test\n" + "4,33,test\n" + "5,63,test\n" + "6,109,test\n" + "7,4,test\n" + "8,4,test\n" + "9,4,test\n" + "10,5,test\n" + "11,5,test\n" + "12,5,test\n" + "13,5,test\n" + "14,5,test\n"; compareResultAsText(result, expected); } private static class KeySelector3 implements KeySelector<CustomType, Integer> { private static final long serialVersionUID = 1L; @Override public Integer getKey(CustomType in) { return in.myInt; } } @Test public void testCoGroupWithMultipleKeyFieldsWithFieldSelector() throws Exception { /* * CoGroup with multiple key fields */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2). where(0, 4).equalTo(0, 1).with(new Tuple5Tuple3CoGroup()); List<Tuple3<Integer, Long, String>> result = coGrouped.collect(); String expected = "1,1,Hallo\n" + "2,2,Hallo Welt\n" + "3,2,Hallo Welt wie gehts?\n" + "3,2,ABC\n" + "5,3,HIJ\n" + "5,3,IJK\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupWithMultipleKeyFieldsWithStaticClassKeyExtractor() throws Exception { /* * CoGroup with multiple key fields */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2). where(new KeySelector7()). equalTo(new KeySelector8()).with(new Tuple5Tuple3CoGroup()); List<Tuple3<Integer, Long, String>> result = coGrouped.collect(); String expected = "1,1,Hallo\n" + "2,2,Hallo Welt\n" + "3,2,Hallo Welt wie gehts?\n" + "3,2,ABC\n" + "5,3,HIJ\n" + "5,3,IJK\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupWithMultipleKeyFieldsWithInnerClassKeyExtractorWithClosureCleaner() throws Exception { /* * CoGroup with multiple key fields, test working closure cleaner for inner classes */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2). where(new KeySelector<Tuple5<Integer, Long, Integer, String, Long>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple5<Integer, Long, Integer, String, Long> t) throws Exception { return new Tuple2<Integer, Long>(t.f0, t.f4); } }). equalTo(new KeySelector<Tuple3<Integer, Long, String>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple3<Integer, Long, String> t) { return new Tuple2<>(t.f0, t.f1); } }). with(new CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>>() { @Override public void coGroup(Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple3<Integer, Long, String>> second, Collector<Tuple3<Integer, Long, String>> out) { List<String> strs = new ArrayList<>(); for (Tuple5<Integer, Long, Integer, String, Long> t : first) { strs.add(t.f3); } for (Tuple3<Integer, Long, String> t : second) { for (String s : strs) { out.collect(new Tuple3<Integer, Long, String>(t.f0, t.f1, s)); } } } }); List<Tuple3<Integer, Long, String>> result = coGrouped.collect(); String expected = "1,1,Hallo\n" + "2,2,Hallo Welt\n" + "3,2,Hallo Welt wie gehts?\n" + "3,2,ABC\n" + "5,3,HIJ\n" + "5,3,IJK\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupWithMultipleKeyFieldsWithInnerClassKeyExtractorWithoutClosureCleaner() throws Exception { /* * CoGroup with multiple key fields, test that disabling closure cleaner leads to an exception when using inner * classes. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.getConfig().disableClosureCleaner(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); boolean correctExceptionTriggered = false; try { DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2). where(new KeySelector<Tuple5<Integer, Long, Integer, String, Long>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple5<Integer, Long, Integer, String, Long> t) throws Exception { return new Tuple2<Integer, Long>(t.f0, t.f4); } }). equalTo(new KeySelector<Tuple3<Integer, Long, String>, Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> getKey(Tuple3<Integer, Long, String> t) { return new Tuple2<Integer, Long>(t.f0, t.f1); } }). with(new CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>>() { @Override public void coGroup(Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple3<Integer, Long, String>> second, Collector<Tuple3<Integer, Long, String>> out) { List<String> strs = new ArrayList<String>(); for (Tuple5<Integer, Long, Integer, String, Long> t : first) { strs.add(t.f3); } for (Tuple3<Integer, Long, String> t : second) { for (String s : strs) { out.collect(new Tuple3<Integer, Long, String>(t.f0, t.f1, s)); } } } }); } catch (InvalidProgramException ex) { correctExceptionTriggered = (ex.getCause() instanceof java.io.NotSerializableException); } Assert.assertTrue(correctExceptionTriggered); } private static class KeySelector7 implements KeySelector<Tuple5<Integer, Long, Integer, String, Long>, Tuple2<Integer, Long>> { private static final long serialVersionUID = 1L; @Override public Tuple2<Integer, Long> getKey(Tuple5<Integer, Long, Integer, String, Long> t) { return new Tuple2<Integer, Long>(t.f0, t.f4); } } private static class KeySelector8 implements KeySelector<Tuple3<Integer, Long, String>, Tuple2<Integer, Long>> { private static final long serialVersionUID = 1L; @Override public Tuple2<Integer, Long> getKey(Tuple3<Integer, Long, String> t) { return new Tuple2<Integer, Long>(t.f0, t.f1); } } @Test public void testCoGroupTwoCustomTypeInputsWithExpressionKeys() throws Exception { /* * CoGroup on two custom type inputs using expression keys */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("myInt").equalTo("myInt").with(new CustomTypeCoGroup()); List<CustomType> result = coGroupDs.collect(); String expected = "1,0,test\n" + "2,6,test\n" + "3,24,test\n" + "4,60,test\n" + "5,120,test\n" + "6,210,test\n"; compareResultAsText(result, expected); } @Test public void testCoGroupOnTwoCustomTypeInputsWithExpressionKeyAndFieldSelector() throws Exception { /* * CoGroup on two custom type inputs using expression keys */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2) .where("nestedPojo.longNumber").equalTo(6).with(new CoGroup1()); List<CustomType> result = coGroupDs.collect(); String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n"; compareResultAsText(result, expected); } private static class CoGroup1 implements CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<POJO> first, Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second, Collector<CustomType> out) throws Exception { for (POJO p : first) { for (Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) { Assert.assertTrue(p.nestedPojo.longNumber == t.f6); out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink")); } } } } @Test public void testCoGroupFieldSelectorAndComplicatedKeySelector() throws Exception { /* * CoGroup field-selector (expression keys) + key selector function * The key selector is unnecessary complicated (Tuple1) ;) */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2) .where(new KeySelector6()).equalTo(6).with(new CoGroup3()); List<CustomType> result = coGroupDs.collect(); String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n"; compareResultAsText(result, expected); } private static class KeySelector6 implements KeySelector<POJO, Tuple1<Long>> { private static final long serialVersionUID = 1L; @Override public Tuple1<Long> getKey(POJO value) throws Exception { return new Tuple1<Long>(value.nestedPojo.longNumber); } } private static class CoGroup3 implements CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<POJO> first, Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second, Collector<CustomType> out) throws Exception { for (POJO p : first) { for (Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) { Assert.assertTrue(p.nestedPojo.longNumber == t.f6); out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink")); } } } } @Test public void testCoGroupFieldSelectorAndKeySelector() throws Exception { /* * CoGroup field-selector (expression keys) + key selector function * The key selector is simple here */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env); DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env); DataSet<CustomType> coGroupDs = ds.coGroup(ds2) .where(new KeySelector1()).equalTo(6).with(new CoGroup2()); List<CustomType> result = coGroupDs.collect(); String expected = "-1,20000,Flink\n" + "-1,10000,Flink\n" + "-1,30000,Flink\n"; compareResultAsText(result, expected); } @Test public void testCoGroupWithAtomicType1() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds1 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Integer> ds2 = env.fromElements(0, 1, 2); DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds1.coGroup(ds2).where(0).equalTo("*").with(new CoGroupAtomic1()); List<Tuple3<Integer, Long, String>> result = coGroupDs.collect(); String expected = "(1,1,Hi)\n" + "(2,2,Hello)"; compareResultAsText(result, expected); } @Test public void testCoGroupWithAtomicType2() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Integer> ds1 = env.fromElements(0, 1, 2); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.getSmall3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds1.coGroup(ds2).where("*").equalTo(0).with(new CoGroupAtomic2()); List<Tuple3<Integer, Long, String>> result = coGroupDs.collect(); String expected = "(1,1,Hi)\n" + "(2,2,Hello)"; compareResultAsText(result, expected); } @Test public void testCoGroupWithRangePartitioning() throws Exception { /* * Test coGroup on tuples with multiple key field positions and same customized distribution */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env); env.setParallelism(4); TestDistribution testDis = new TestDistribution(); DataSet<Tuple3<Integer, Long, String>> coGrouped = DataSetUtils.partitionByRange(ds1, testDis, 0, 4) .coGroup(DataSetUtils.partitionByRange(ds2, testDis, 0, 1)) .where(0, 4) .equalTo(0, 1) .with(new Tuple5Tuple3CoGroup()); List<Tuple3<Integer, Long, String>> result = coGrouped.collect(); String expected = "1,1,Hallo\n" + "2,2,Hallo Welt\n" + "3,2,Hallo Welt wie gehts?\n" + "3,2,ABC\n" + "5,3,HIJ\n" + "5,3,IJK\n"; compareResultAsTuples(result, expected); } @Test public void testCoGroupLambda() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<Integer, String>> left = env.fromElements( new Tuple2<>(1, "hello"), new Tuple2<>(2, "what's"), new Tuple2<>(2, "up") ); DataSet<Tuple2<Integer, String>> right = env.fromElements( new Tuple2<>(1, "not"), new Tuple2<>(1, "much"), new Tuple2<>(2, "really") ); DataSet<Integer> joined = left.coGroup(right).where(0).equalTo(0) .with((Iterable<Tuple2<Integer, String>> values1, Iterable<Tuple2<Integer, String>> values2, Collector<Integer> out) -> { int sum = 0; for (Tuple2<Integer, String> next : values1) { sum += next.f0; } for (Tuple2<Integer, String> next : values2) { sum += next.f0; } out.collect(sum); }).returns(Integer.class); List<Integer> result = joined.collect(); String expected = "6\n3\n"; compareResultAsText(result, expected); } // -------------------------------------------------------------------------------------------- // UDF classes // -------------------------------------------------------------------------------------------- private static class KeySelector1 implements KeySelector<POJO, Long> { private static final long serialVersionUID = 1L; @Override public Long getKey(POJO value) throws Exception { return value.nestedPojo.longNumber; } } private static class CoGroup2 implements CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<POJO> first, Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second, Collector<CustomType> out) throws Exception { for (POJO p : first) { for (Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) { Assert.assertTrue(p.nestedPojo.longNumber == t.f6); out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink")); } } } } private static class Tuple5CoGroup implements CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple5<Integer, Long, Integer, String, Long>, Tuple2<Integer, Integer>> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple5<Integer, Long, Integer, String, Long>> second, Collector<Tuple2<Integer, Integer>> out) { int sum = 0; int id = 0; for (Tuple5<Integer, Long, Integer, String, Long> element : first) { sum += element.f2; id = element.f0; } for (Tuple5<Integer, Long, Integer, String, Long> element : second) { sum += element.f2; id = element.f0; } out.collect(new Tuple2<Integer, Integer>(id, sum)); } } private static class CustomTypeCoGroup implements CoGroupFunction<CustomType, CustomType, CustomType> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<CustomType> first, Iterable<CustomType> second, Collector<CustomType> out) { CustomType o = new CustomType(0, 0, "test"); for (CustomType element : first) { o.myInt = element.myInt; o.myLong += element.myLong; } for (CustomType element : second) { o.myInt = element.myInt; o.myLong += element.myLong; } out.collect(o); } } private static class MixedCoGroup implements CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, CustomType, Tuple3<Integer, Long, String>> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<CustomType> second, Collector<Tuple3<Integer, Long, String>> out) throws Exception { long sum = 0; int id = 0; for (Tuple5<Integer, Long, Integer, String, Long> element : first) { sum += element.f0; id = element.f2; } for (CustomType element : second) { id = element.myInt; sum += element.myLong; } out.collect(new Tuple3<Integer, Long, String>(id, sum, "test")); } } private static class MixedCoGroup2 implements CoGroupFunction<CustomType, Tuple5<Integer, Long, Integer, String, Long>, CustomType> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<CustomType> first, Iterable<Tuple5<Integer, Long, Integer, String, Long>> second, Collector<CustomType> out) { CustomType o = new CustomType(0, 0, "test"); for (CustomType element : first) { o.myInt = element.myInt; o.myLong += element.myLong; } for (Tuple5<Integer, Long, Integer, String, Long> element : second) { o.myInt = element.f2; o.myLong += element.f0; } out.collect(o); } } private static class Tuple3ReturnLeft implements CoGroupFunction<Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<Tuple3<Integer, Long, String>> first, Iterable<Tuple3<Integer, Long, String>> second, Collector<Tuple3<Integer, Long, String>> out) { for (Tuple3<Integer, Long, String> element : first) { if (element.f0 < 6) { out.collect(element); } } } } private static class Tuple5ReturnRight implements CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple5<Integer, Long, Integer, String, Long>, Tuple5<Integer, Long, Integer, String, Long>> { private static final long serialVersionUID = 1L; @Override public void coGroup( Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple5<Integer, Long, Integer, String, Long>> second, Collector<Tuple5<Integer, Long, Integer, String, Long>> out) { for (Tuple5<Integer, Long, Integer, String, Long> element : second) { if (element.f0 < 4) { out.collect(element); } } } } private static class Tuple5CoGroupBC extends RichCoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple5<Integer, Long, Integer, String, Long>, Tuple3<Integer, Integer, Integer>> { private static final long serialVersionUID = 1L; private int broadcast = 42; @Override public void open(Configuration config) { Collection<Integer> ints = this.getRuntimeContext().getBroadcastVariable("ints"); int sum = 0; for (Integer i : ints) { sum += i; } broadcast = sum; } @Override public void coGroup( Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple5<Integer, Long, Integer, String, Long>> second, Collector<Tuple3<Integer, Integer, Integer>> out) { int sum = 0; int id = 0; for (Tuple5<Integer, Long, Integer, String, Long> element : first) { sum += element.f2; id = element.f0; } for (Tuple5<Integer, Long, Integer, String, Long> element : second) { sum += element.f2; id = element.f0; } out.collect(new Tuple3<Integer, Integer, Integer>(id, sum, broadcast)); } } private static class Tuple5Tuple3CoGroup implements CoGroupFunction<Tuple5<Integer, Long, Integer, String, Long>, Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<Tuple5<Integer, Long, Integer, String, Long>> first, Iterable<Tuple3<Integer, Long, String>> second, Collector<Tuple3<Integer, Long, String>> out) { List<String> strs = new ArrayList<String>(); for (Tuple5<Integer, Long, Integer, String, Long> t : first) { strs.add(t.f3); } for (Tuple3<Integer, Long, String> t : second) { for (String s : strs) { out.collect(new Tuple3<Integer, Long, String>(t.f0, t.f1, s)); } } } } private static class CoGroupAtomic1 implements CoGroupFunction<Tuple3<Integer, Long, String>, Integer, Tuple3<Integer, Long, String>> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<Tuple3<Integer, Long, String>> first, Iterable<Integer> second, Collector<Tuple3<Integer, Long, String>> out) throws Exception { List<Integer> ints = new ArrayList<Integer>(); for (Integer i : second) { ints.add(i); } for (Tuple3<Integer, Long, String> t : first) { for (Integer i : ints) { if (t.f0.equals(i)) { out.collect(t); } } } } } private static class CoGroupAtomic2 implements CoGroupFunction<Integer, Tuple3<Integer, Long, String>, Tuple3<Integer, Long, String>> { private static final long serialVersionUID = 1L; @Override public void coGroup(Iterable<Integer> first, Iterable<Tuple3<Integer, Long, String>> second, Collector<Tuple3<Integer, Long, String>> out) throws Exception { List<Integer> ints = new ArrayList<Integer>(); for (Integer i : first) { ints.add(i); } for (Tuple3<Integer, Long, String> t : second) { for (Integer i : ints) { if (t.f0.equals(i)) { out.collect(t); } } } } } /** * Test {@link DataDistribution}. */ public static class TestDistribution implements DataDistribution { public Object[][] boundaries = new Object[][]{ new Object[]{2, 2L}, new Object[]{5, 4L}, new Object[]{10, 12L}, new Object[]{21, 6L} }; public TestDistribution() {} @Override public Object[] getBucketBoundary(int bucketNum, int totalNumBuckets) { return boundaries[bucketNum]; } @Override public int getNumberOfFields() { return 2; } @Override public TypeInformation[] getKeyTypes() { return new TypeInformation[]{BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO}; } @Override public void write(DataOutputView out) throws IOException { } @Override public void read(DataInputView in) throws IOException { } @Override public boolean equals(Object obj) { return obj instanceof TestDistribution; } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.protocol.oidc; import java.util.HashMap; import org.jboss.logging.Logger; import org.jboss.resteasy.spi.HttpRequest; import org.keycloak.OAuth2Constants; import org.keycloak.OAuthErrorException; import org.keycloak.TokenCategory; import org.keycloak.TokenVerifier; import org.keycloak.authentication.AuthenticatorUtil; import org.keycloak.broker.oidc.OIDCIdentityProvider; import org.keycloak.broker.provider.IdentityBrokerException; import org.keycloak.cluster.ClusterProvider; import org.keycloak.common.ClientConnection; import org.keycloak.common.VerificationException; import org.keycloak.common.util.Time; import org.keycloak.crypto.HashProvider; import org.keycloak.crypto.SignatureProvider; import org.keycloak.events.Details; import org.keycloak.events.Errors; import org.keycloak.events.EventBuilder; import org.keycloak.jose.jws.JWSInput; import org.keycloak.jose.jws.JWSInputException; import org.keycloak.jose.jws.crypto.HashUtils; import org.keycloak.migration.migrators.MigrationUtils; import org.keycloak.models.AuthenticatedClientSessionModel; import org.keycloak.models.ClientModel; import org.keycloak.models.ClientScopeModel; import org.keycloak.models.ClientSessionContext; import org.keycloak.models.Constants; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.RoleModel; import org.keycloak.models.TokenRevocationStoreProvider; import org.keycloak.models.UserConsentModel; import org.keycloak.models.UserModel; import org.keycloak.models.UserSessionModel; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.models.utils.RoleUtils; import org.keycloak.protocol.ProtocolMapperUtils; import org.keycloak.protocol.oidc.mappers.OIDCAccessTokenMapper; import org.keycloak.protocol.oidc.mappers.OIDCAccessTokenResponseMapper; import org.keycloak.protocol.oidc.mappers.OIDCIDTokenMapper; import org.keycloak.protocol.oidc.mappers.UserInfoTokenMapper; import org.keycloak.protocol.oidc.utils.AcrUtils; import org.keycloak.protocol.oidc.utils.OIDCResponseType; import org.keycloak.representations.AccessToken; import org.keycloak.representations.AccessTokenResponse; import org.keycloak.representations.IDToken; import org.keycloak.representations.JsonWebToken; import org.keycloak.representations.LogoutToken; import org.keycloak.representations.RefreshToken; import org.keycloak.services.ErrorResponseException; import org.keycloak.services.managers.AuthenticationManager; import org.keycloak.services.managers.AuthenticationSessionManager; import org.keycloak.services.managers.UserSessionCrossDCManager; import org.keycloak.services.managers.UserSessionManager; import org.keycloak.services.resources.IdentityBrokerService; import org.keycloak.services.util.DefaultClientSessionContext; import org.keycloak.services.util.MtlsHoKTokenUtil; import org.keycloak.sessions.AuthenticationSessionModel; import org.keycloak.util.TokenUtil; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import static org.keycloak.representations.IDToken.NONCE; /** * Stateless object that creates tokens and manages oauth access codes * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class TokenManager { private static final Logger logger = Logger.getLogger(TokenManager.class); private static final String JWT = "JWT"; public static class TokenValidation { public final UserModel user; public final UserSessionModel userSession; public final ClientSessionContext clientSessionCtx; public final AccessToken newToken; public TokenValidation(UserModel user, UserSessionModel userSession, ClientSessionContext clientSessionCtx, AccessToken newToken) { this.user = user; this.userSession = userSession; this.clientSessionCtx = clientSessionCtx; this.newToken = newToken; } } public TokenValidation validateToken(KeycloakSession session, UriInfo uriInfo, ClientConnection connection, RealmModel realm, RefreshToken oldToken, HttpHeaders headers) throws OAuthErrorException { UserSessionModel userSession = null; boolean offline = TokenUtil.TOKEN_TYPE_OFFLINE.equals(oldToken.getType()); if (offline) { UserSessionManager sessionManager = new UserSessionManager(session); userSession = sessionManager.findOfflineUserSession(realm, oldToken.getSessionState()); if (userSession != null) { // Revoke timeouted offline userSession if (!AuthenticationManager.isOfflineSessionValid(realm, userSession)) { sessionManager.revokeOfflineUserSession(userSession); throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Offline session not active", "Offline session not active"); } } else { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Offline user session not found", "Offline user session not found"); } } else { // Find userSession regularly for online tokens userSession = session.sessions().getUserSession(realm, oldToken.getSessionState()); if (!AuthenticationManager.isSessionValid(realm, userSession)) { AuthenticationManager.backchannelLogout(session, realm, userSession, uriInfo, connection, headers, true); throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Session not active", "Session not active"); } } UserModel user = userSession.getUser(); if (user == null) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token", "Unknown user"); } if (!user.isEnabled()) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "User disabled", "User disabled"); } if (oldToken.isIssuedBeforeSessionStart(userSession.getStarted())) { logger.debug("Refresh toked issued before the user session started"); throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Refresh toked issued before the user session started"); } ClientModel client = session.getContext().getClient(); AuthenticatedClientSessionModel clientSession = userSession.getAuthenticatedClientSessionByClient(client.getId()); // Can theoretically happen in cross-dc environment. Try to see if userSession with our client is available in remoteCache if (clientSession == null) { userSession = new UserSessionCrossDCManager(session).getUserSessionWithClient(realm, userSession.getId(), offline, client.getId()); if (userSession != null) { clientSession = userSession.getAuthenticatedClientSessionByClient(client.getId()); } else { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Session doesn't have required client", "Session doesn't have required client"); } } if (oldToken.isIssuedBeforeSessionStart(clientSession.getStarted())) { logger.debug("Refresh toked issued before the client session started"); throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Refresh toked issued before the client session started"); } if (!client.getClientId().equals(oldToken.getIssuedFor())) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Unmatching clients", "Unmatching clients"); } try { TokenVerifier.createWithoutSignature(oldToken) .withChecks(NotBeforeCheck.forModel(client), NotBeforeCheck.forModel(session, realm, user)) .verify(); } catch (VerificationException e) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Stale token"); } // Setup clientScopes from refresh token to the context String oldTokenScope = oldToken.getScope(); // Case when offline token is migrated from previous version if (oldTokenScope == null && userSession.isOffline()) { logger.debugf("Migrating offline token of user '%s' for client '%s' of realm '%s'", user.getUsername(), client.getClientId(), realm.getName()); MigrationUtils.migrateOldOfflineToken(session, realm, client, user); oldTokenScope = OAuth2Constants.OFFLINE_ACCESS; } ClientSessionContext clientSessionCtx = DefaultClientSessionContext.fromClientSessionAndScopeParameter(clientSession, oldTokenScope, session); // Check user didn't revoke granted consent if (!verifyConsentStillAvailable(session, user, client, clientSessionCtx.getClientScopesStream())) { throw new OAuthErrorException(OAuthErrorException.INVALID_SCOPE, "Client no longer has requested consent from user"); } clientSessionCtx.setAttribute(OIDCLoginProtocol.NONCE_PARAM, oldToken.getNonce()); // recreate token. AccessToken newToken = createClientAccessToken(session, realm, client, user, userSession, clientSessionCtx); return new TokenValidation(user, userSession, clientSessionCtx, newToken); } /** * Checks if the token is valid. Optionally the session last refresh and client session timestamp * are updated if the token was valid. This is used to keep the session alive when long lived tokens are used. * * @param session * @param realm * @param token * @param updateTimestamps * @return */ public boolean checkTokenValidForIntrospection(KeycloakSession session, RealmModel realm, AccessToken token, boolean updateTimestamps) { ClientModel client = realm.getClientByClientId(token.getIssuedFor()); if (client == null || !client.isEnabled()) { return false; } try { TokenVerifier.createWithoutSignature(token) .withChecks(NotBeforeCheck.forModel(client), TokenVerifier.IS_ACTIVE) .verify(); } catch (VerificationException e) { logger.debugf("JWT check failed: %s", e.getMessage()); return false; } TokenRevocationStoreProvider revocationStore = session.getProvider(TokenRevocationStoreProvider.class); if (revocationStore.isRevoked(token.getId())) { return false; } boolean valid = false; // Tokens without sessions are considered valid. Signature check and revocation check are sufficient checks for them if (token.getSessionState() == null) { UserModel user = lookupUserFromStatelessToken(session, realm, token); valid = isUserValid(session, realm, token, user); } else { UserSessionModel userSession = new UserSessionCrossDCManager(session).getUserSessionWithClient(realm, token.getSessionState(), false, client.getId()); if (AuthenticationManager.isSessionValid(realm, userSession)) { valid = isUserValid(session, realm, token, userSession.getUser()); } else { userSession = new UserSessionCrossDCManager(session).getUserSessionWithClient(realm, token.getSessionState(), true, client.getId()); if (AuthenticationManager.isOfflineSessionValid(realm, userSession)) { valid = isUserValid(session, realm, token, userSession.getUser()); } } if (valid && (token.isIssuedBeforeSessionStart(userSession.getStarted()))) { valid = false; } AuthenticatedClientSessionModel clientSession = userSession == null ? null : userSession.getAuthenticatedClientSessionByClient(client.getId()); if (clientSession != null) { if (valid && (token.isIssuedBeforeSessionStart(clientSession.getStarted()))) { valid = false; } } String tokenType = token.getType(); if (realm.isRevokeRefreshToken() && (tokenType.equals(TokenUtil.TOKEN_TYPE_REFRESH) || tokenType.equals(TokenUtil.TOKEN_TYPE_OFFLINE)) && !validateTokenReuseForIntrospection(session, realm, token)) { return false; } if (updateTimestamps && valid) { int currentTime = Time.currentTime(); userSession.setLastSessionRefresh(currentTime); if (clientSession != null) { clientSession.setTimestamp(currentTime); } } } return valid; } private boolean validateTokenReuseForIntrospection(KeycloakSession session, RealmModel realm, AccessToken token) { UserSessionModel userSession = null; if (token.getType().equals(TokenUtil.TOKEN_TYPE_REFRESH)) { userSession = session.sessions().getUserSession(realm, token.getSessionState()); } else { UserSessionManager sessionManager = new UserSessionManager(session); userSession = sessionManager.findOfflineUserSession(realm, token.getSessionState()); } ClientModel client = realm.getClientByClientId(token.getIssuedFor()); AuthenticatedClientSessionModel clientSession = userSession.getAuthenticatedClientSessionByClient(client.getId()); try { validateTokenReuse(session, realm, token, clientSession, false); return true; } catch (OAuthErrorException e) { return false; } } private boolean isUserValid(KeycloakSession session, RealmModel realm, AccessToken token, UserModel user) { if (user == null) { return false; } if (!user.isEnabled()) { return false; } try { TokenVerifier.createWithoutSignature(token) .withChecks(NotBeforeCheck.forModel(session ,realm, user)) .verify(); } catch (VerificationException e) { logger.debugf("JWT check failed: %s", e.getMessage()); return false; } return true; } /** * Lookup user from the "stateless" token. Stateless token is the token without sessionState filled (token doesn't belong to any userSession) */ public static UserModel lookupUserFromStatelessToken(KeycloakSession session, RealmModel realm, AccessToken token) { // Try to lookup user based on "sub" claim. It should work for most cases with some rare exceptions (EG. OIDC "pairwise" subjects) UserModel user = session.users().getUserById(realm, token.getSubject()); if (user != null) { return user; } // Fallback to lookup user based on username (preferred_username claim) if (token.getPreferredUsername() != null) { user = session.users().getUserByUsername(realm, token.getPreferredUsername()); if (user != null) { return user; } } return user; } public RefreshResult refreshAccessToken(KeycloakSession session, UriInfo uriInfo, ClientConnection connection, RealmModel realm, ClientModel authorizedClient, String encodedRefreshToken, EventBuilder event, HttpHeaders headers, HttpRequest request) throws OAuthErrorException { RefreshToken refreshToken = verifyRefreshToken(session, realm, authorizedClient, request, encodedRefreshToken, true); event.user(refreshToken.getSubject()).session(refreshToken.getSessionState()) .detail(Details.REFRESH_TOKEN_ID, refreshToken.getId()) .detail(Details.REFRESH_TOKEN_TYPE, refreshToken.getType()); TokenValidation validation = validateToken(session, uriInfo, connection, realm, refreshToken, headers); AuthenticatedClientSessionModel clientSession = validation.clientSessionCtx.getClientSession(); // validate authorizedClient is same as validated client if (!clientSession.getClient().getId().equals(authorizedClient.getId())) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token. Token client and authorized client don't match"); } validateTokenReuseForRefresh(session, realm, refreshToken, validation); int currentTime = Time.currentTime(); clientSession.setTimestamp(currentTime); validation.userSession.setLastSessionRefresh(currentTime); if (refreshToken.getAuthorization() != null) { validation.newToken.setAuthorization(refreshToken.getAuthorization()); } AccessTokenResponseBuilder responseBuilder = responseBuilder(realm, authorizedClient, event, session, validation.userSession, validation.clientSessionCtx).accessToken(validation.newToken); if (OIDCAdvancedConfigWrapper.fromClientModel(authorizedClient).isUseRefreshToken()) { responseBuilder.generateRefreshToken(); } if (validation.newToken.getAuthorization() != null && OIDCAdvancedConfigWrapper.fromClientModel(authorizedClient).isUseRefreshToken()) { responseBuilder.getRefreshToken().setAuthorization(validation.newToken.getAuthorization()); } // KEYCLOAK-6771 Certificate Bound Token // https://tools.ietf.org/html/draft-ietf-oauth-mtls-08#section-3.1 // bind refreshed access and refresh token with Client Certificate AccessToken.CertConf certConf = refreshToken.getCertConf(); if (certConf != null) { responseBuilder.getAccessToken().setCertConf(certConf); if (OIDCAdvancedConfigWrapper.fromClientModel(authorizedClient).isUseRefreshToken()) { responseBuilder.getRefreshToken().setCertConf(certConf); } } String scopeParam = clientSession.getNote(OAuth2Constants.SCOPE); if (TokenUtil.isOIDCRequest(scopeParam)) { responseBuilder.generateIDToken().generateAccessTokenHash(); } AccessTokenResponse res = responseBuilder.build(); return new RefreshResult(res, TokenUtil.TOKEN_TYPE_OFFLINE.equals(refreshToken.getType())); } private void validateTokenReuseForRefresh(KeycloakSession session, RealmModel realm, RefreshToken refreshToken, TokenValidation validation) throws OAuthErrorException { if (realm.isRevokeRefreshToken()) { AuthenticatedClientSessionModel clientSession = validation.clientSessionCtx.getClientSession(); try { validateTokenReuse(session, realm, refreshToken, clientSession, true); int currentCount = clientSession.getCurrentRefreshTokenUseCount(); clientSession.setCurrentRefreshTokenUseCount(currentCount + 1); } catch (OAuthErrorException oee) { if (logger.isDebugEnabled()) { logger.debugf("Failed validation of refresh token %s due it was used before. Realm: %s, client: %s, user: %s, user session: %s. Will detach client session from user session", refreshToken.getId(), realm.getName(), clientSession.getClient().getClientId(), clientSession.getUserSession().getUser().getUsername(), clientSession.getUserSession().getId()); } clientSession.detachFromUserSession(); throw oee; } } } // Will throw OAuthErrorException if validation fails private void validateTokenReuse(KeycloakSession session, RealmModel realm, AccessToken refreshToken, AuthenticatedClientSessionModel clientSession, boolean refreshFlag) throws OAuthErrorException { int clusterStartupTime = session.getProvider(ClusterProvider.class).getClusterStartupTime(); if (clientSession.getCurrentRefreshToken() != null && !refreshToken.getId().equals(clientSession.getCurrentRefreshToken()) && refreshToken.getIssuedAt() < clientSession.getTimestamp() && clusterStartupTime <= clientSession.getTimestamp()) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Stale token"); } if (!refreshToken.getId().equals(clientSession.getCurrentRefreshToken())) { if (refreshFlag) { clientSession.setCurrentRefreshToken(refreshToken.getId()); clientSession.setCurrentRefreshTokenUseCount(0); } else { return; } } int currentCount = clientSession.getCurrentRefreshTokenUseCount(); if (currentCount > realm.getRefreshTokenMaxReuse()) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Maximum allowed refresh token reuse exceeded", "Maximum allowed refresh token reuse exceeded"); } return; } public RefreshToken verifyRefreshToken(KeycloakSession session, RealmModel realm, ClientModel client, HttpRequest request, String encodedRefreshToken, boolean checkExpiration) throws OAuthErrorException { try { RefreshToken refreshToken = toRefreshToken(session, encodedRefreshToken); if (!(TokenUtil.TOKEN_TYPE_REFRESH.equals(refreshToken.getType()) || TokenUtil.TOKEN_TYPE_OFFLINE.equals(refreshToken.getType()))) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token"); } if (checkExpiration) { try { TokenVerifier.createWithoutSignature(refreshToken) .withChecks(NotBeforeCheck.forModel(realm), TokenVerifier.IS_ACTIVE) .verify(); } catch (VerificationException e) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, e.getMessage()); } } if (!client.getClientId().equals(refreshToken.getIssuedFor())) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token. Token client and authorized client don't match"); } // KEYCLOAK-6771 Certificate Bound Token if (OIDCAdvancedConfigWrapper.fromClientModel(client).isUseMtlsHokToken()) { if (!MtlsHoKTokenUtil.verifyTokenBindingWithClientCertificate(refreshToken, request, session)) { throw new OAuthErrorException(OAuthErrorException.UNAUTHORIZED_CLIENT, MtlsHoKTokenUtil.CERT_VERIFY_ERROR_DESC); } } return refreshToken; } catch (JWSInputException e) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token", e); } } public RefreshToken toRefreshToken(KeycloakSession session, String encodedRefreshToken) throws JWSInputException, OAuthErrorException { RefreshToken refreshToken = session.tokens().decode(encodedRefreshToken, RefreshToken.class); if (refreshToken == null) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid refresh token"); } return refreshToken; } public IDToken verifyIDToken(KeycloakSession session, RealmModel realm, String encodedIDToken) throws OAuthErrorException { IDToken idToken = session.tokens().decode(encodedIDToken, IDToken.class); try { TokenVerifier.createWithoutSignature(idToken) .withChecks(NotBeforeCheck.forModel(realm), TokenVerifier.IS_ACTIVE) .verify(); } catch (VerificationException e) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, e.getMessage()); } return idToken; } public IDToken verifyIDTokenSignature(KeycloakSession session, String encodedIDToken) throws OAuthErrorException { IDToken idToken = session.tokens().decode(encodedIDToken, IDToken.class); if (idToken == null) { throw new OAuthErrorException(OAuthErrorException.INVALID_GRANT, "Invalid IDToken"); } return idToken; } public AccessToken createClientAccessToken(KeycloakSession session, RealmModel realm, ClientModel client, UserModel user, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { AccessToken token = initToken(realm, client, user, userSession, clientSessionCtx, session.getContext().getUri()); token = transformAccessToken(session, token, userSession, clientSessionCtx); return token; } public static ClientSessionContext attachAuthenticationSession(KeycloakSession session, UserSessionModel userSession, AuthenticationSessionModel authSession) { ClientModel client = authSession.getClient(); AuthenticatedClientSessionModel clientSession = userSession.getAuthenticatedClientSessionByClient(client.getId()); if (clientSession == null) { clientSession = session.sessions().createClientSession(userSession.getRealm(), client, userSession); } clientSession.setRedirectUri(authSession.getRedirectUri()); clientSession.setProtocol(authSession.getProtocol()); Set<String> clientScopeIds = authSession.getClientScopes(); Map<String, String> transferredNotes = authSession.getClientNotes(); for (Map.Entry<String, String> entry : transferredNotes.entrySet()) { clientSession.setNote(entry.getKey(), entry.getValue()); } Map<String, String> transferredUserSessionNotes = authSession.getUserSessionNotes(); for (Map.Entry<String, String> entry : transferredUserSessionNotes.entrySet()) { userSession.setNote(entry.getKey(), entry.getValue()); } clientSession.setNote(Constants.LEVEL_OF_AUTHENTICATION, String.valueOf(AuthenticatorUtil.getCurrentLevelOfAuthentication(authSession))); clientSession.setTimestamp(Time.currentTime()); // Remove authentication session now new AuthenticationSessionManager(session).removeAuthenticationSession(userSession.getRealm(), authSession, true); ClientSessionContext clientSessionCtx = DefaultClientSessionContext.fromClientSessionAndClientScopeIds(clientSession, clientScopeIds, session); return clientSessionCtx; } public static void dettachClientSession(AuthenticatedClientSessionModel clientSession) { UserSessionModel userSession = clientSession.getUserSession(); if (userSession == null) { return; } clientSession.detachFromUserSession(); } public static Set<RoleModel> getAccess(UserModel user, ClientModel client, Stream<ClientScopeModel> clientScopes) { Set<RoleModel> roleMappings = RoleUtils.getDeepUserRoleMappings(user); if (client.isFullScopeAllowed()) { if (logger.isTraceEnabled()) { logger.tracef("Using full scope for client %s", client.getClientId()); } return roleMappings; } else { // 1 - Client roles of this client itself Stream<RoleModel> scopeMappings = client.getRolesStream(); // 2 - Role mappings of client itself + default client scopes + optional client scopes requested by scope parameter (if applyScopeParam is true) Stream<RoleModel> clientScopesMappings; if (!logger.isTraceEnabled()) { clientScopesMappings = clientScopes.flatMap(clientScope -> clientScope.getScopeMappingsStream()); } else { clientScopesMappings = clientScopes.flatMap(clientScope -> { logger.tracef("Adding client scope role mappings of client scope '%s' to client '%s'", clientScope.getName(), client.getClientId()); return clientScope.getScopeMappingsStream(); }); } scopeMappings = Stream.concat(scopeMappings, clientScopesMappings); // 3 - Expand scope mappings scopeMappings = RoleUtils.expandCompositeRolesStream(scopeMappings); // Intersection of expanded user roles and expanded scopeMappings roleMappings.retainAll(scopeMappings.collect(Collectors.toSet())); return roleMappings; } } /** Return client itself + all default client scopes of client + optional client scopes requested by scope parameter **/ public static Stream<ClientScopeModel> getRequestedClientScopes(String scopeParam, ClientModel client) { // Add all default client scopes automatically and client itself Stream<ClientScopeModel> clientScopes = Stream.concat( client.getClientScopes(true).values().stream(), Stream.of(client)).distinct(); if (scopeParam == null) { return clientScopes; } Map<String, ClientScopeModel> allOptionalScopes = client.getClientScopes(false); // Add optional client scopes requested by scope parameter return Stream.concat(parseScopeParameter(scopeParam).map(allOptionalScopes::get).filter(Objects::nonNull), clientScopes).distinct(); } public static boolean isValidScope(String scopes, ClientModel client) { if (scopes == null) { return true; } Set<String> clientScopes = getRequestedClientScopes(scopes, client) .filter(((Predicate<ClientScopeModel>) ClientModel.class::isInstance).negate()) .map(ClientScopeModel::getName) .collect(Collectors.toSet()); Collection<String> requestedScopes = TokenManager.parseScopeParameter(scopes).collect(Collectors.toSet()); if (TokenUtil.isOIDCRequest(scopes)) { requestedScopes.remove(OAuth2Constants.SCOPE_OPENID); } if (!requestedScopes.isEmpty() && clientScopes.isEmpty()) { return false; } for (String requestedScope : requestedScopes) { // we also check dynamic scopes in case the client is from a provider that dynamically provides scopes to their clients if (!clientScopes.contains(requestedScope) && client.getDynamicClientScope(requestedScope) == null) { return false; } } return true; } public static Stream<String> parseScopeParameter(String scopeParam) { return Arrays.stream(scopeParam.split(" ")).distinct(); } // Check if user still has granted consents to all requested client scopes public static boolean verifyConsentStillAvailable(KeycloakSession session, UserModel user, ClientModel client, Stream<ClientScopeModel> requestedClientScopes) { if (!client.isConsentRequired()) { return true; } UserConsentModel grantedConsent = session.users().getConsentByClient(client.getRealm(), user.getId(), client.getId()); return requestedClientScopes .filter(ClientScopeModel::isDisplayOnConsentScreen) .noneMatch(requestedScope -> { if (grantedConsent == null || !grantedConsent.getGrantedClientScopes().contains(requestedScope)) { logger.debugf("Client '%s' no longer has requested consent from user '%s' for client scope '%s'", client.getClientId(), user.getUsername(), requestedScope.getName()); return true; } return false; }); } public AccessToken transformAccessToken(KeycloakSession session, AccessToken token, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { AtomicReference<AccessToken> finalToken = new AtomicReference<>(token); ProtocolMapperUtils.getSortedProtocolMappers(session, clientSessionCtx) .filter(mapper -> mapper.getValue() instanceof OIDCAccessTokenMapper) .forEach(mapper -> finalToken.set(((OIDCAccessTokenMapper) mapper.getValue()) .transformAccessToken(finalToken.get(), mapper.getKey(), session, userSession, clientSessionCtx))); return finalToken.get(); } public AccessTokenResponse transformAccessTokenResponse(KeycloakSession session, AccessTokenResponse accessTokenResponse, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { AtomicReference<AccessTokenResponse> finalResponseToken = new AtomicReference<>(accessTokenResponse); ProtocolMapperUtils.getSortedProtocolMappers(session, clientSessionCtx) .filter(mapper -> mapper.getValue() instanceof OIDCAccessTokenResponseMapper) .forEach(mapper -> finalResponseToken.set(((OIDCAccessTokenResponseMapper) mapper.getValue()) .transformAccessTokenResponse(finalResponseToken.get(), mapper.getKey(), session, userSession, clientSessionCtx))); return finalResponseToken.get(); } public AccessToken transformUserInfoAccessToken(KeycloakSession session, AccessToken token, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { AtomicReference<AccessToken> finalToken = new AtomicReference<>(token); ProtocolMapperUtils.getSortedProtocolMappers(session, clientSessionCtx) .filter(mapper -> mapper.getValue() instanceof UserInfoTokenMapper) .forEach(mapper -> finalToken.set(((UserInfoTokenMapper) mapper.getValue()) .transformUserInfoToken(finalToken.get(), mapper.getKey(), session, userSession, clientSessionCtx))); return finalToken.get(); } public Map<String, Object> generateUserInfoClaims(AccessToken userInfo, UserModel userModel) { Map<String, Object> claims = new HashMap<>(); claims.put("sub", userInfo.getSubject() == null? userModel.getId() : userInfo.getSubject()); if (userInfo.getIssuer() != null) { claims.put("iss", userInfo.getIssuer()); } if (userInfo.getAudience()!= null) { claims.put("aud", userInfo.getAudience()); } if (userInfo.getName() != null) { claims.put("name", userInfo.getName()); } if (userInfo.getGivenName() != null) { claims.put("given_name", userInfo.getGivenName()); } if (userInfo.getFamilyName() != null) { claims.put("family_name", userInfo.getFamilyName()); } if (userInfo.getMiddleName() != null) { claims.put("middle_name", userInfo.getMiddleName()); } if (userInfo.getNickName() != null) { claims.put("nickname", userInfo.getNickName()); } if (userInfo.getPreferredUsername() != null) { claims.put("preferred_username", userInfo.getPreferredUsername()); } if (userInfo.getProfile() != null) { claims.put("profile", userInfo.getProfile()); } if (userInfo.getPicture() != null) { claims.put("picture", userInfo.getPicture()); } if (userInfo.getWebsite() != null) { claims.put("website", userInfo.getWebsite()); } if (userInfo.getEmail() != null) { claims.put("email", userInfo.getEmail()); } if (userInfo.getEmailVerified() != null) { claims.put("email_verified", userInfo.getEmailVerified()); } if (userInfo.getGender() != null) { claims.put("gender", userInfo.getGender()); } if (userInfo.getBirthdate() != null) { claims.put("birthdate", userInfo.getBirthdate()); } if (userInfo.getZoneinfo() != null) { claims.put("zoneinfo", userInfo.getZoneinfo()); } if (userInfo.getLocale() != null) { claims.put("locale", userInfo.getLocale()); } if (userInfo.getPhoneNumber() != null) { claims.put("phone_number", userInfo.getPhoneNumber()); } if (userInfo.getPhoneNumberVerified() != null) { claims.put("phone_number_verified", userInfo.getPhoneNumberVerified()); } if (userInfo.getAddress() != null) { claims.put("address", userInfo.getAddress()); } if (userInfo.getUpdatedAt() != null) { claims.put("updated_at", userInfo.getUpdatedAt()); } if (userInfo.getClaimsLocales() != null) { claims.put("claims_locales", userInfo.getClaimsLocales()); } claims.putAll(userInfo.getOtherClaims()); if (userInfo.getRealmAccess() != null) { Map<String, Set<String>> realmAccess = new HashMap<>(); realmAccess.put("roles", userInfo.getRealmAccess().getRoles()); claims.put("realm_access", realmAccess); } if (userInfo.getResourceAccess() != null && !userInfo.getResourceAccess().isEmpty()) { Map<String, Map<String, Set<String>>> resourceAccessMap = new HashMap<>(); for (Map.Entry<String, AccessToken.Access> resourceAccessMapEntry : userInfo.getResourceAccess() .entrySet()) { Map<String, Set<String>> resourceAccess = new HashMap<>(); resourceAccess.put("roles", resourceAccessMapEntry.getValue().getRoles()); resourceAccessMap.put(resourceAccessMapEntry.getKey(), resourceAccess); } claims.put("resource_access", resourceAccessMap); } return claims; } public void transformIDToken(KeycloakSession session, IDToken token, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { AtomicReference<IDToken> finalToken = new AtomicReference<>(token); ProtocolMapperUtils.getSortedProtocolMappers(session, clientSessionCtx) .filter(mapper -> mapper.getValue() instanceof OIDCIDTokenMapper) .forEach(mapper -> finalToken.set(((OIDCIDTokenMapper) mapper.getValue()) .transformIDToken(finalToken.get(), mapper.getKey(), session, userSession, clientSessionCtx))); } protected AccessToken initToken(RealmModel realm, ClientModel client, UserModel user, UserSessionModel session, ClientSessionContext clientSessionCtx, UriInfo uriInfo) { AccessToken token = new AccessToken(); token.id(KeycloakModelUtils.generateId()); token.type(TokenUtil.TOKEN_TYPE_BEARER); token.subject(user.getId()); token.issuedNow(); token.issuedFor(client.getClientId()); AuthenticatedClientSessionModel clientSession = clientSessionCtx.getClientSession(); token.issuer(clientSession.getNote(OIDCLoginProtocol.ISSUER)); token.setNonce(clientSessionCtx.getAttribute(OIDCLoginProtocol.NONCE_PARAM, String.class)); token.setScope(clientSessionCtx.getScopeString()); token.setAcr(getAcr(clientSession)); String authTime = session.getNote(AuthenticationManager.AUTH_TIME); if (authTime != null) { token.setAuthTime(Integer.parseInt(authTime)); } token.setSessionState(session.getId()); ClientScopeModel offlineAccessScope = KeycloakModelUtils.getClientScopeByName(realm, OAuth2Constants.OFFLINE_ACCESS); boolean offlineTokenRequested = offlineAccessScope == null ? false : clientSessionCtx.getClientScopeIds().contains(offlineAccessScope.getId()); token.expiration(getTokenExpiration(realm, client, session, clientSession, offlineTokenRequested)); return token; } private String getAcr(AuthenticatedClientSessionModel clientSession) { int loa = Integer.parseInt(clientSession.getNote(Constants.LEVEL_OF_AUTHENTICATION)); if (loa < Constants.MINIMUM_LOA) { loa = AuthenticationManager.isSSOAuthentication(clientSession) ? 0 : 1; } Map<String, Integer> acrLoaMap = AcrUtils.getAcrLoaMap(clientSession.getClient()); String acr = AcrUtils.mapLoaToAcr(loa, acrLoaMap, AcrUtils.getRequiredAcrValues( clientSession.getNote(OIDCLoginProtocol.CLAIMS_PARAM))); if (acr == null) { acr = AcrUtils.mapLoaToAcr(loa, acrLoaMap, AcrUtils.getAcrValues( clientSession.getNote(OIDCLoginProtocol.CLAIMS_PARAM), clientSession.getNote(OIDCLoginProtocol.ACR_PARAM))); if (acr == null) { acr = AcrUtils.mapLoaToAcr(loa, acrLoaMap, acrLoaMap.keySet()); if (acr == null) { acr = String.valueOf(loa); } } } return acr; } private int getTokenExpiration(RealmModel realm, ClientModel client, UserSessionModel userSession, AuthenticatedClientSessionModel clientSession, boolean offlineTokenRequested) { boolean implicitFlow = false; String responseType = clientSession.getNote(OIDCLoginProtocol.RESPONSE_TYPE_PARAM); if (responseType != null) { implicitFlow = OIDCResponseType.parse(responseType).isImplicitFlow(); } int tokenLifespan; if (implicitFlow) { tokenLifespan = realm.getAccessTokenLifespanForImplicitFlow(); } else { String clientLifespan = client.getAttribute(OIDCConfigAttributes.ACCESS_TOKEN_LIFESPAN); if (clientLifespan != null && !clientLifespan.trim().isEmpty()) { tokenLifespan = Integer.parseInt(clientLifespan); } else { tokenLifespan = realm.getAccessTokenLifespan(); } } int expiration; if (tokenLifespan == -1) { expiration = userSession.getStarted() + (userSession.isRememberMe() && realm.getSsoSessionMaxLifespanRememberMe() > 0 ? realm.getSsoSessionMaxLifespanRememberMe() : realm.getSsoSessionMaxLifespan()); } else { expiration = Time.currentTime() + tokenLifespan; } if (userSession.isOffline() || offlineTokenRequested) { if (realm.isOfflineSessionMaxLifespanEnabled()) { int sessionExpires = userSession.getStarted() + realm.getOfflineSessionMaxLifespan(); expiration = expiration <= sessionExpires ? expiration : sessionExpires; int clientOfflineSessionMaxLifespan; String clientOfflineSessionMaxLifespanPerClient = client .getAttribute(OIDCConfigAttributes.CLIENT_OFFLINE_SESSION_MAX_LIFESPAN); if (clientOfflineSessionMaxLifespanPerClient != null && !clientOfflineSessionMaxLifespanPerClient.trim().isEmpty()) { clientOfflineSessionMaxLifespan = Integer.parseInt(clientOfflineSessionMaxLifespanPerClient); } else { clientOfflineSessionMaxLifespan = realm.getClientOfflineSessionMaxLifespan(); } if (clientOfflineSessionMaxLifespan > 0) { int clientOfflineSessionExpiration = userSession.getStarted() + clientOfflineSessionMaxLifespan; return expiration < clientOfflineSessionExpiration ? expiration : clientOfflineSessionExpiration; } } } else { int sessionExpires = userSession.getStarted() + (userSession.isRememberMe() && realm.getSsoSessionMaxLifespanRememberMe() > 0 ? realm.getSsoSessionMaxLifespanRememberMe() : realm.getSsoSessionMaxLifespan()); expiration = expiration <= sessionExpires ? expiration : sessionExpires; int clientSessionMaxLifespan; String clientSessionMaxLifespanPerClient = client.getAttribute(OIDCConfigAttributes.CLIENT_SESSION_MAX_LIFESPAN); if (clientSessionMaxLifespanPerClient != null && !clientSessionMaxLifespanPerClient.trim().isEmpty()) { clientSessionMaxLifespan = Integer.parseInt(clientSessionMaxLifespanPerClient); } else { clientSessionMaxLifespan = realm.getClientSessionMaxLifespan(); } if (clientSessionMaxLifespan > 0) { int clientSessionExpiration = clientSession.getTimestamp() + clientSessionMaxLifespan; return expiration < clientSessionExpiration ? expiration : clientSessionExpiration; } } return expiration; } public AccessTokenResponseBuilder responseBuilder(RealmModel realm, ClientModel client, EventBuilder event, KeycloakSession session, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { return new AccessTokenResponseBuilder(realm, client, event, session, userSession, clientSessionCtx); } public class AccessTokenResponseBuilder { RealmModel realm; ClientModel client; EventBuilder event; KeycloakSession session; UserSessionModel userSession; ClientSessionContext clientSessionCtx; AccessToken accessToken; RefreshToken refreshToken; IDToken idToken; boolean generateAccessTokenHash = false; String codeHash; String stateHash; public AccessTokenResponseBuilder(RealmModel realm, ClientModel client, EventBuilder event, KeycloakSession session, UserSessionModel userSession, ClientSessionContext clientSessionCtx) { this.realm = realm; this.client = client; this.event = event; this.session = session; this.userSession = userSession; this.clientSessionCtx = clientSessionCtx; } public AccessToken getAccessToken() { return accessToken; } public RefreshToken getRefreshToken() { return refreshToken; } public IDToken getIdToken() { return idToken; } public AccessTokenResponseBuilder accessToken(AccessToken accessToken) { this.accessToken = accessToken; return this; } public AccessTokenResponseBuilder refreshToken(RefreshToken refreshToken) { this.refreshToken = refreshToken; return this; } public AccessTokenResponseBuilder generateAccessToken() { UserModel user = userSession.getUser(); accessToken = createClientAccessToken(session, realm, client, user, userSession, clientSessionCtx); return this; } public AccessTokenResponseBuilder generateRefreshToken() { if (accessToken == null) { throw new IllegalStateException("accessToken not set"); } ClientScopeModel offlineAccessScope = KeycloakModelUtils.getClientScopeByName(realm, OAuth2Constants.OFFLINE_ACCESS); boolean offlineTokenRequested = offlineAccessScope==null ? false : clientSessionCtx.getClientScopeIds().contains(offlineAccessScope.getId()); if (offlineTokenRequested) { UserSessionManager sessionManager = new UserSessionManager(session); if (!sessionManager.isOfflineTokenAllowed(clientSessionCtx)) { event.error(Errors.NOT_ALLOWED); throw new ErrorResponseException("not_allowed", "Offline tokens not allowed for the user or client", Response.Status.BAD_REQUEST); } refreshToken = new RefreshToken(accessToken); refreshToken.type(TokenUtil.TOKEN_TYPE_OFFLINE); if (realm.isOfflineSessionMaxLifespanEnabled()) refreshToken.expiration(getOfflineExpiration()); sessionManager.createOrUpdateOfflineSession(clientSessionCtx.getClientSession(), userSession); } else { refreshToken = new RefreshToken(accessToken); refreshToken.expiration(getRefreshExpiration()); } refreshToken.id(KeycloakModelUtils.generateId()); refreshToken.issuedNow(); return this; } private int getRefreshExpiration() { int sessionExpires = userSession.getStarted() + (userSession.isRememberMe() && realm.getSsoSessionMaxLifespanRememberMe() > 0 ? realm.getSsoSessionMaxLifespanRememberMe() : realm.getSsoSessionMaxLifespan()); int clientSessionMaxLifespan; String clientSessionMaxLifespanPerClient = client.getAttribute(OIDCConfigAttributes.CLIENT_SESSION_MAX_LIFESPAN); if (clientSessionMaxLifespanPerClient != null && !clientSessionMaxLifespanPerClient.trim().isEmpty()) { clientSessionMaxLifespan = Integer.parseInt(clientSessionMaxLifespanPerClient); } else { clientSessionMaxLifespan = realm.getClientSessionMaxLifespan(); } if (clientSessionMaxLifespan > 0) { int clientSessionMaxExpiration = userSession.getStarted() + clientSessionMaxLifespan; sessionExpires = sessionExpires < clientSessionMaxExpiration ? sessionExpires : clientSessionMaxExpiration; } int expiration = Time.currentTime() + (userSession.isRememberMe() && realm.getSsoSessionIdleTimeoutRememberMe() > 0 ? realm.getSsoSessionIdleTimeoutRememberMe() : realm.getSsoSessionIdleTimeout()); int clientSessionIdleTimeout; String clientSessionIdleTimeoutPerClient = client.getAttribute(OIDCConfigAttributes.CLIENT_SESSION_IDLE_TIMEOUT); if (clientSessionIdleTimeoutPerClient != null && !clientSessionIdleTimeoutPerClient.trim().isEmpty()) { clientSessionIdleTimeout = Integer.parseInt(clientSessionIdleTimeoutPerClient); } else { clientSessionIdleTimeout = realm.getClientSessionIdleTimeout(); } if (clientSessionIdleTimeout > 0) { int clientSessionIdleExpiration = Time.currentTime() + clientSessionIdleTimeout; expiration = expiration < clientSessionIdleExpiration ? expiration : clientSessionIdleExpiration; } return expiration <= sessionExpires ? expiration : sessionExpires; } private int getOfflineExpiration() { int sessionExpires = userSession.getStarted() + realm.getOfflineSessionMaxLifespan(); int clientOfflineSessionMaxLifespan; String clientOfflineSessionMaxLifespanPerClient = client .getAttribute(OIDCConfigAttributes.CLIENT_OFFLINE_SESSION_MAX_LIFESPAN); if (clientOfflineSessionMaxLifespanPerClient != null && !clientOfflineSessionMaxLifespanPerClient.trim().isEmpty()) { clientOfflineSessionMaxLifespan = Integer.parseInt(clientOfflineSessionMaxLifespanPerClient); } else { clientOfflineSessionMaxLifespan = realm.getClientOfflineSessionMaxLifespan(); } if (clientOfflineSessionMaxLifespan > 0) { int clientOfflineSessionMaxExpiration = userSession.getStarted() + clientOfflineSessionMaxLifespan; sessionExpires = sessionExpires < clientOfflineSessionMaxExpiration ? sessionExpires : clientOfflineSessionMaxExpiration; } int expiration = Time.currentTime() + realm.getOfflineSessionIdleTimeout(); int clientOfflineSessionIdleTimeout; String clientOfflineSessionIdleTimeoutPerClient = client .getAttribute(OIDCConfigAttributes.CLIENT_OFFLINE_SESSION_IDLE_TIMEOUT); if (clientOfflineSessionIdleTimeoutPerClient != null && !clientOfflineSessionIdleTimeoutPerClient.trim().isEmpty()) { clientOfflineSessionIdleTimeout = Integer.parseInt(clientOfflineSessionIdleTimeoutPerClient); } else { clientOfflineSessionIdleTimeout = realm.getClientOfflineSessionIdleTimeout(); } if (clientOfflineSessionIdleTimeout > 0) { int clientOfflineSessionIdleExpiration = Time.currentTime() + clientOfflineSessionIdleTimeout; expiration = expiration < clientOfflineSessionIdleExpiration ? expiration : clientOfflineSessionIdleExpiration; } return expiration <= sessionExpires ? expiration : sessionExpires; } public AccessTokenResponseBuilder generateIDToken() { return generateIDToken(false); } public AccessTokenResponseBuilder generateIDToken(boolean isIdTokenAsDetachedSignature) { if (accessToken == null) { throw new IllegalStateException("accessToken not set"); } idToken = new IDToken(); idToken.id(KeycloakModelUtils.generateId()); idToken.type(TokenUtil.TOKEN_TYPE_ID); idToken.subject(accessToken.getSubject()); idToken.audience(client.getClientId()); idToken.issuedNow(); idToken.issuedFor(accessToken.getIssuedFor()); idToken.issuer(accessToken.getIssuer()); idToken.setNonce(accessToken.getNonce()); idToken.setAuthTime(accessToken.getAuthTime()); idToken.setSessionState(accessToken.getSessionState()); idToken.expiration(accessToken.getExpiration()); idToken.setAcr(accessToken.getAcr()); if (isIdTokenAsDetachedSignature == false) { transformIDToken(session, idToken, userSession, clientSessionCtx); } return this; } public AccessTokenResponseBuilder generateAccessTokenHash() { generateAccessTokenHash = true; return this; } public AccessTokenResponseBuilder generateCodeHash(String code) { codeHash = generateOIDCHash(code); return this; } // Financial API - Part 2: Read and Write API Security Profile // http://openid.net/specs/openid-financial-api-part-2.html#authorization-server public AccessTokenResponseBuilder generateStateHash(String state) { stateHash = generateOIDCHash(state); return this; } public AccessTokenResponse build() { if (accessToken != null) { event.detail(Details.TOKEN_ID, accessToken.getId()); } if (refreshToken != null) { if (event.getEvent().getDetails().containsKey(Details.REFRESH_TOKEN_ID)) { event.detail(Details.UPDATED_REFRESH_TOKEN_ID, refreshToken.getId()); } else { event.detail(Details.REFRESH_TOKEN_ID, refreshToken.getId()); } event.detail(Details.REFRESH_TOKEN_TYPE, refreshToken.getType()); } AccessTokenResponse res = new AccessTokenResponse(); if (accessToken != null) { String encodedToken = session.tokens().encode(accessToken); res.setToken(encodedToken); res.setTokenType(TokenUtil.TOKEN_TYPE_BEARER); res.setSessionState(accessToken.getSessionState()); if (accessToken.getExpiration() != 0) { res.setExpiresIn(accessToken.getExpiration() - Time.currentTime()); } } if (generateAccessTokenHash) { String atHash = generateOIDCHash(res.getToken()); idToken.setAccessTokenHash(atHash); } if (codeHash != null) { idToken.setCodeHash(codeHash); } // Financial API - Part 2: Read and Write API Security Profile // http://openid.net/specs/openid-financial-api-part-2.html#authorization-server if (stateHash != null) { idToken.setStateHash(stateHash); } if (idToken != null) { String encodedToken = session.tokens().encodeAndEncrypt(idToken); res.setIdToken(encodedToken); } if (refreshToken != null) { String encodedToken = session.tokens().encode(refreshToken); res.setRefreshToken(encodedToken); if (refreshToken.getExpiration() != 0) { res.setRefreshExpiresIn(refreshToken.getExpiration() - Time.currentTime()); } } int notBefore = realm.getNotBefore(); if (client.getNotBefore() > notBefore) notBefore = client.getNotBefore(); int userNotBefore = session.users().getNotBeforeOfUser(realm, userSession.getUser()); if (userNotBefore > notBefore) notBefore = userNotBefore; res.setNotBeforePolicy(notBefore); transformAccessTokenResponse(session, res, userSession, clientSessionCtx); // OIDC Financial API Read Only Profile : scope MUST be returned in the response from Token Endpoint String responseScope = clientSessionCtx.getScopeString(); res.setScope(responseScope); event.detail(Details.SCOPE, responseScope); return res; } private String generateOIDCHash(String input) { String signatureAlgorithm = session.tokens().signatureAlgorithm(TokenCategory.ID); SignatureProvider signatureProvider = session.getProvider(SignatureProvider.class, signatureAlgorithm); String hashAlgorithm = signatureProvider.signer().getHashAlgorithm(); HashProvider hashProvider = session.getProvider(HashProvider.class, hashAlgorithm); byte[] hash = hashProvider.hash(input); return HashUtils.encodeHashToOIDC(hash); } } public static class RefreshResult { private final AccessTokenResponse response; private final boolean offlineToken; private RefreshResult(AccessTokenResponse response, boolean offlineToken) { this.response = response; this.offlineToken = offlineToken; } public AccessTokenResponse getResponse() { return response; } public boolean isOfflineToken() { return offlineToken; } } public static class NotBeforeCheck implements TokenVerifier.Predicate<JsonWebToken> { private final int notBefore; public NotBeforeCheck(int notBefore) { this.notBefore = notBefore; } @Override public boolean test(JsonWebToken t) throws VerificationException { if (t.getIssuedAt() < notBefore) { throw new VerificationException("Stale token"); } return true; } public static NotBeforeCheck forModel(ClientModel clientModel) { if (clientModel != null) { int notBeforeClient = clientModel.getNotBefore(); int notBeforeRealm = clientModel.getRealm().getNotBefore(); int notBefore = (notBeforeClient == 0 ? notBeforeRealm : (notBeforeRealm == 0 ? notBeforeClient : Math.min(notBeforeClient, notBeforeRealm))); return new NotBeforeCheck(notBefore); } return new NotBeforeCheck(0); } public static NotBeforeCheck forModel(RealmModel realmModel) { return new NotBeforeCheck(realmModel == null ? 0 : realmModel.getNotBefore()); } public static NotBeforeCheck forModel(KeycloakSession session, RealmModel realmModel, UserModel userModel) { return new NotBeforeCheck(session.users().getNotBeforeOfUser(realmModel, userModel)); } } public LogoutTokenValidationCode verifyLogoutToken(KeycloakSession session, RealmModel realm, String encodedLogoutToken) { Optional<LogoutToken> logoutTokenOptional = toLogoutToken(encodedLogoutToken); if (!logoutTokenOptional.isPresent()) { return LogoutTokenValidationCode.DECODE_TOKEN_FAILED; } LogoutToken logoutToken = logoutTokenOptional.get(); List<OIDCIdentityProvider> identityProviders = getOIDCIdentityProviders(realm, session).collect(Collectors.toList()); if (identityProviders.isEmpty()) { return LogoutTokenValidationCode.COULD_NOT_FIND_IDP; } Stream<OIDCIdentityProvider> validOidcIdentityProviders = validateLogoutTokenAgainstIdpProvider(identityProviders.stream(), encodedLogoutToken, logoutToken); if (validOidcIdentityProviders.count() == 0) { return LogoutTokenValidationCode.TOKEN_VERIFICATION_WITH_IDP_FAILED; } if (logoutToken.getSubject() == null && logoutToken.getSid() == null) { return LogoutTokenValidationCode.MISSING_SID_OR_SUBJECT; } if (!checkLogoutTokenForEvents(logoutToken)) { return LogoutTokenValidationCode.BACKCHANNEL_LOGOUT_EVENT_MISSING; } if (logoutToken.getOtherClaims().get(NONCE) != null) { return LogoutTokenValidationCode.NONCE_CLAIM_IN_TOKEN; } if (logoutToken.getId() == null) { return LogoutTokenValidationCode.LOGOUT_TOKEN_ID_MISSING; } if (logoutToken.getIat() == null) { return LogoutTokenValidationCode.MISSING_IAT_CLAIM; } return LogoutTokenValidationCode.VALIDATION_SUCCESS; } public Optional<LogoutToken> toLogoutToken(String encodedLogoutToken) { try { JWSInput jws = new JWSInput(encodedLogoutToken); return Optional.of(jws.readJsonContent(LogoutToken.class)); } catch (JWSInputException e) { return Optional.empty(); } } public Stream<OIDCIdentityProvider> getValidOIDCIdentityProvidersForBackchannelLogout(RealmModel realm, KeycloakSession session, String encodedLogoutToken, LogoutToken logoutToken) { return validateLogoutTokenAgainstIdpProvider(getOIDCIdentityProviders(realm, session), encodedLogoutToken, logoutToken); } public Stream<OIDCIdentityProvider> validateLogoutTokenAgainstIdpProvider(Stream<OIDCIdentityProvider> oidcIdps, String encodedLogoutToken, LogoutToken logoutToken) { return oidcIdps .filter(oidcIdp -> oidcIdp.getConfig().getIssuer() != null) .filter(oidcIdp -> oidcIdp.isIssuer(logoutToken.getIssuer(), null)) .filter(oidcIdp -> { try { oidcIdp.validateToken(encodedLogoutToken); return true; } catch (IdentityBrokerException e) { logger.debugf("LogoutToken verification with identity provider failed", e.getMessage()); return false; } }); } private Stream<OIDCIdentityProvider> getOIDCIdentityProviders(RealmModel realm, KeycloakSession session) { try { return realm.getIdentityProvidersStream() .map(idpModel -> IdentityBrokerService.getIdentityProviderFactory(session, idpModel).create(session, idpModel)) .filter(OIDCIdentityProvider.class::isInstance) .map(OIDCIdentityProvider.class::cast); } catch (IdentityBrokerException e) { logger.warnf("LogoutToken verification with identity provider failed", e.getMessage()); } return Stream.empty(); } private boolean checkLogoutTokenForEvents(LogoutToken logoutToken) { for (String eventKey : logoutToken.getEvents().keySet()) { if (TokenUtil.TOKEN_BACKCHANNEL_LOGOUT_EVENT.equals(eventKey)) { return true; } } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.util; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.util.Map; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.buf.B2CConverter; import org.apache.tomcat.util.res.StringManager; /** * General purpose request parsing and encoding utility methods. * * @author Craig R. McClanahan * @author Tim Tye * @version $Id: RequestUtil.java 1239053 2012-02-01 10:52:00Z markt $ */ public final class RequestUtil { private static final Log log = LogFactory.getLog(RequestUtil.class); /** * The string resources for this package. */ private static final StringManager sm = StringManager.getManager("org.apache.catalina.util"); /** * Filter the specified message string for characters that are sensitive * in HTML. This avoids potential attacks caused by including JavaScript * codes in the request URL that is often reported in error messages. * * @param message The message string to be filtered */ public static String filter(String message) { if (message == null) return (null); char content[] = new char[message.length()]; message.getChars(0, message.length(), content, 0); StringBuilder result = new StringBuilder(content.length + 50); for (int i = 0; i < content.length; i++) { switch (content[i]) { case '<': result.append("&lt;"); break; case '>': result.append("&gt;"); break; case '&': result.append("&amp;"); break; case '"': result.append("&quot;"); break; default: result.append(content[i]); } } return (result.toString()); } /** * Normalize a relative URI path that may have relative values ("/./", * "/../", and so on ) it it. <strong>WARNING</strong> - This method is * useful only for normalizing application-generated paths. It does not * try to perform security checks for malicious input. * * @param path Relative path to be normalized * * @deprecated Deprecated to resolve a circular package dependency and will * be removed in Tomcat 8.0.x. Use {@link * org.apache.tomcat.util.http.RequestUtil#normalize(String)} as * a replacement. */ @Deprecated public static String normalize(String path) { return org.apache.tomcat.util.http.RequestUtil.normalize(path); } /** * Normalize a relative URI path that may have relative values ("/./", * "/../", and so on ) it it. <strong>WARNING</strong> - This method is * useful only for normalizing application-generated paths. It does not * try to perform security checks for malicious input. * * @param path Relative path to be normalized * @param replaceBackSlash Should '\\' be replaced with '/' * * @deprecated Deprecated to resolve a circular package dependency and will * be removed in Tomcat 8.0.x. Use {@link * org.apache.tomcat.util.http.RequestUtil#normalize(String, * boolean)} as a replacement. */ @Deprecated public static String normalize(String path, boolean replaceBackSlash) { return org.apache.tomcat.util.http.RequestUtil.normalize(path, replaceBackSlash); } /** * Append request parameters from the specified String to the specified * Map. It is presumed that the specified Map is not accessed from any * other thread, so no synchronization is performed. * <p> * <strong>IMPLEMENTATION NOTE</strong>: URL decoding is performed * individually on the parsed name and value elements, rather than on * the entire query string ahead of time, to properly deal with the case * where the name or value includes an encoded "=" or "&" character * that would otherwise be interpreted as a delimiter. * * @param map Map that accumulates the resulting parameters * @param data Input string containing request parameters * @param encoding The encoding to use; encoding must not be null. * If an unsupported encoding is specified the parameters will not be * parsed and the map will not be modified */ public static void parseParameters(Map<String,String[]> map, String data, String encoding) { if ((data != null) && (data.length() > 0)) { // use the specified encoding to extract bytes out of the // given string so that the encoding is not lost. byte[] bytes = null; try { bytes = data.getBytes(B2CConverter.getCharset(encoding)); parseParameters(map, bytes, encoding); } catch (UnsupportedEncodingException uee) { if (log.isDebugEnabled()) { log.debug(sm.getString("requestUtil.parseParameters.uee", encoding), uee); } } } } /** * Decode and return the specified URL-encoded String. * When the byte array is converted to a string, the system default * character encoding is used... This may be different than some other * servers. It is assumed the string is not a query string. * * @param str The url-encoded string * * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(String str) { return URLDecode(str, null); } /** * Decode and return the specified URL-encoded String. It is assumed the * string is not a query string. * * @param str The url-encoded string * @param enc The encoding to use; if null, the default encoding is used. If * an unsupported encoding is specified null will be returned * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(String str, String enc) { return URLDecode(str, enc, false); } /** * Decode and return the specified URL-encoded String. * * @param str The url-encoded string * @param enc The encoding to use; if null, the default encoding is used. If * an unsupported encoding is specified null will be returned * @param isQuery Is this a query string being processed * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(String str, String enc, boolean isQuery) { if (str == null) return (null); // use the specified encoding to extract bytes out of the // given string so that the encoding is not lost. If an // encoding is not specified, let it use platform default byte[] bytes = null; try { if (enc == null) { bytes = str.getBytes(Charset.defaultCharset()); } else { bytes = str.getBytes(B2CConverter.getCharset(enc)); } } catch (UnsupportedEncodingException uee) { if (log.isDebugEnabled()) { log.debug(sm.getString("requestUtil.urlDecode.uee", enc), uee); } } return URLDecode(bytes, enc, isQuery); } /** * Decode and return the specified URL-encoded byte array. It is assumed * the string is not a query string. * * @param bytes The url-encoded byte array * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(byte[] bytes) { return URLDecode(bytes, null); } /** * Decode and return the specified URL-encoded byte array. It is assumed * the string is not a query string. * * @param bytes The url-encoded byte array * @param enc The encoding to use; if null, the default encoding is used * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(byte[] bytes, String enc) { return URLDecode(bytes, enc, false); } /** * Decode and return the specified URL-encoded byte array. * * @param bytes The url-encoded byte array * @param enc The encoding to use; if null, the default encoding is used. If * an unsupported encoding is specified null will be returned * @param isQuery Is this a query string being processed * @exception IllegalArgumentException if a '%' character is not followed * by a valid 2-digit hexadecimal number */ public static String URLDecode(byte[] bytes, String enc, boolean isQuery) { if (bytes == null) return null; int len = bytes.length; int ix = 0; int ox = 0; while (ix < len) { byte b = bytes[ix++]; // Get byte to test if (b == '+' && isQuery) { b = (byte)' '; } else if (b == '%') { if (ix + 2 > len) { throw new IllegalArgumentException( sm.getString("requestUtil.urlDecode.missingDigit")); } b = (byte) ((convertHexDigit(bytes[ix++]) << 4) + convertHexDigit(bytes[ix++])); } bytes[ox++] = b; } if (enc != null) { try { return new String(bytes, 0, ox, B2CConverter.getCharset(enc)); } catch (UnsupportedEncodingException uee) { if (log.isDebugEnabled()) { log.debug(sm.getString("requestUtil.urlDecode.uee", enc), uee); } return null; } } return new String(bytes, 0, ox); } /** * Convert a byte character value to hexadecimal digit value. * * @param b the character value byte */ private static byte convertHexDigit( byte b ) { if ((b >= '0') && (b <= '9')) return (byte)(b - '0'); if ((b >= 'a') && (b <= 'f')) return (byte)(b - 'a' + 10); if ((b >= 'A') && (b <= 'F')) return (byte)(b - 'A' + 10); throw new IllegalArgumentException( sm.getString("requestUtil.convertHexDigit.notHex", Character.valueOf((char)b))); } /** * Put name and value pair in map. When name already exist, add value * to array of values. * * @param map The map to populate * @param name The parameter name * @param value The parameter value */ private static void putMapEntry( Map<String,String[]> map, String name, String value) { String[] newValues = null; String[] oldValues = map.get(name); if (oldValues == null) { newValues = new String[1]; newValues[0] = value; } else { newValues = new String[oldValues.length + 1]; System.arraycopy(oldValues, 0, newValues, 0, oldValues.length); newValues[oldValues.length] = value; } map.put(name, newValues); } /** * Append request parameters from the specified String to the specified * Map. It is presumed that the specified Map is not accessed from any * other thread, so no synchronization is performed. * <p> * <strong>IMPLEMENTATION NOTE</strong>: URL decoding is performed * individually on the parsed name and value elements, rather than on * the entire query string ahead of time, to properly deal with the case * where the name or value includes an encoded "=" or "&" character * that would otherwise be interpreted as a delimiter. * * NOTE: byte array data is modified by this method. Caller beware. * * @param map Map that accumulates the resulting parameters * @param data Input string containing request parameters * @param encoding The encoding to use; if null, the default encoding is * used * * @exception UnsupportedEncodingException if the requested encoding is not * supported. */ public static void parseParameters(Map<String,String[]> map, byte[] data, String encoding) throws UnsupportedEncodingException { Charset charset = B2CConverter.getCharset(encoding); if (data != null && data.length > 0) { int ix = 0; int ox = 0; String key = null; String value = null; while (ix < data.length) { byte c = data[ix++]; switch ((char) c) { case '&': value = new String(data, 0, ox, charset); if (key != null) { putMapEntry(map, key, value); key = null; } ox = 0; break; case '=': if (key == null) { key = new String(data, 0, ox, charset); ox = 0; } else { data[ox++] = c; } break; case '+': data[ox++] = (byte)' '; break; case '%': data[ox++] = (byte)((convertHexDigit(data[ix++]) << 4) + convertHexDigit(data[ix++])); break; default: data[ox++] = c; } } //The last value does not end in '&'. So save it now. if (key != null) { value = new String(data, 0, ox, charset); putMapEntry(map, key, value); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.metrics.stats.extended; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.reducers.Reducer; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; import java.util.List; import java.util.Map; /** * */ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue { final ValuesSource.Numeric valuesSource; final ValueFormatter formatter; final double sigma; LongArray counts; DoubleArray sums; DoubleArray mins; DoubleArray maxes; DoubleArray sumOfSqrs; public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, @Nullable ValueFormatter formatter, AggregationContext context, Aggregator parent, double sigma, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { super(name, context, parent, reducers, metaData); this.valuesSource = valuesSource; this.formatter = formatter; this.sigma = sigma; if (valuesSource != null) { final BigArrays bigArrays = context.bigArrays(); counts = bigArrays.newLongArray(1, true); sums = bigArrays.newDoubleArray(1, true); mins = bigArrays.newDoubleArray(1, false); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); maxes = bigArrays.newDoubleArray(1, false); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); sumOfSqrs = bigArrays.newDoubleArray(1, true); } } @Override public boolean needsScores() { return valuesSource != null && valuesSource.needsScores(); } @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { if (valuesSource == null) { return LeafBucketCollector.NO_OP_COLLECTOR; } final BigArrays bigArrays = context.bigArrays(); final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { if (bucket >= counts.size()) { final long from = counts.size(); final long overSize = BigArrays.overSize(bucket + 1); counts = bigArrays.resize(counts, overSize); sums = bigArrays.resize(sums, overSize); mins = bigArrays.resize(mins, overSize); maxes = bigArrays.resize(maxes, overSize); sumOfSqrs = bigArrays.resize(sumOfSqrs, overSize); mins.fill(from, overSize, Double.POSITIVE_INFINITY); maxes.fill(from, overSize, Double.NEGATIVE_INFINITY); } values.setDocument(doc); final int valuesCount = values.count(); counts.increment(bucket, valuesCount); double sum = 0; double sumOfSqr = 0; double min = mins.get(bucket); double max = maxes.get(bucket); for (int i = 0; i < valuesCount; i++) { double value = values.valueAt(i); sum += value; sumOfSqr += value * value; min = Math.min(min, value); max = Math.max(max, value); } sums.increment(bucket, sum); sumOfSqrs.increment(bucket, sumOfSqr); mins.set(bucket, min); maxes.set(bucket, max); } }; } @Override public boolean hasMetric(String name) { try { InternalExtendedStats.Metrics.resolve(name); return true; } catch (IllegalArgumentException iae) { return false; } } @Override public double metric(String name, long owningBucketOrd) { switch(InternalExtendedStats.Metrics.resolve(name)) { case count: return valuesSource == null ? 0 : counts.get(owningBucketOrd); case sum: return valuesSource == null ? 0 : sums.get(owningBucketOrd); case min: return valuesSource == null ? Double.POSITIVE_INFINITY : mins.get(owningBucketOrd); case max: return valuesSource == null ? Double.NEGATIVE_INFINITY : maxes.get(owningBucketOrd); case avg: return valuesSource == null ? Double.NaN : sums.get(owningBucketOrd) / counts.get(owningBucketOrd); case sum_of_squares: return valuesSource == null ? 0 : sumOfSqrs.get(owningBucketOrd); case variance: return valuesSource == null ? Double.NaN : variance(owningBucketOrd); case std_deviation: return valuesSource == null ? Double.NaN : Math.sqrt(variance(owningBucketOrd)); case std_upper: if (valuesSource == null) { return Double.NaN; } return (sums.get(owningBucketOrd) / counts.get(owningBucketOrd)) + (Math.sqrt(variance(owningBucketOrd)) * this.sigma); case std_lower: if (valuesSource == null) { return Double.NaN; } return (sums.get(owningBucketOrd) / counts.get(owningBucketOrd)) - (Math.sqrt(variance(owningBucketOrd)) * this.sigma); default: throw new IllegalArgumentException("Unknown value [" + name + "] in common stats aggregation"); } } private double variance(long owningBucketOrd) { double sum = sums.get(owningBucketOrd); long count = counts.get(owningBucketOrd); return (sumOfSqrs.get(owningBucketOrd) - ((sum * sum) / count)) / count; } @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) { if (valuesSource == null) { return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, reducers(), metaData()); } assert owningBucketOrdinal < counts.size(); return new InternalExtendedStats(name, counts.get(owningBucketOrdinal), sums.get(owningBucketOrdinal), mins.get(owningBucketOrdinal), maxes.get(owningBucketOrdinal), sumOfSqrs.get(owningBucketOrdinal), sigma, formatter, reducers(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { return new InternalExtendedStats(name, 0, 0d, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 0d, 0d, formatter, reducers(), metaData()); } @Override public void doClose() { Releasables.close(counts, maxes, mins, sumOfSqrs, sums); } public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric> { private final double sigma; public Factory(String name, ValuesSourceConfig<ValuesSource.Numeric> valuesSourceConfig, double sigma) { super(name, InternalExtendedStats.TYPE.name(), valuesSourceConfig); this.sigma = sigma; } @Override protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { return new ExtendedStatsAggregator(name, null, config.formatter(), aggregationContext, parent, sigma, reducers, metaData); } @Override protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, boolean collectsFromSingleBucket, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { return new ExtendedStatsAggregator(name, valuesSource, config.formatter(), aggregationContext, parent, sigma, reducers, metaData); } } }
package fi.nls.oskari.search.channel; import fi.mml.portti.service.search.SearchCriteria; import fi.nls.oskari.control.metadata.MetadataField; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.map.geometry.ProjectionHelper; import fi.nls.oskari.search.util.GeonetworkSpatialOperation; import fi.nls.oskari.util.IOHelper; import fi.nls.oskari.domain.geo.Point; import org.deegree.datatypes.QualifiedName; import org.deegree.model.crs.CRSFactory; import org.deegree.model.crs.CoordinateSystem; import org.deegree.model.filterencoding.*; import org.deegree.model.spatialschema.Geometry; import org.deegree.model.spatialschema.GeometryException; import org.deegree.model.spatialschema.WKTAdapter; import org.deegree.ogcbase.PropertyPath; import org.deegree.ogcbase.SortProperty; import org.deegree.ogcwebservices.csw.discovery.*; import org.geotools.feature.FeatureCollection; import org.geotools.geojson.feature.FeatureJSON; import org.geotools.geometry.jts.ReferencedEnvelope; import java.io.ByteArrayInputStream; import java.io.StringWriter; import java.net.URI; import java.util.*; /** * Helper class for creating search queries for MetadataCatalogue */ public class MetadataCatalogueQueryHelper { private static final String GCO_NAMESPACE = "gco"; private static final String GMD_NAMESPACE = "gmd"; public final static String TARGET_SRS = "EPSG:4326"; public final static String SPATIAL_OPERATOR = "INTERSECTS"; public final static Boolean FORCE_TARGET_CRS_4326_XY = true; private static final Logger log = LogFactory.getLogger(MetadataCatalogueQueryHelper.class); private final static char WILDCARD_CHARACTER = '*'; private static final Map<String, Integer> opMap = new HashMap<String, Integer>(); static { // only one needed at the moment opMap.put("COMP_EQUAL", OperationDefines.PROPERTYISEQUALTO); } public String getQueryPayload(SearchCriteria searchCriteria) { final GetRecords getRecs = getRecordsQuery(searchCriteria); if(getRecs == null) { // no point in making the query without GetRecords return null; } return getQueryPayload(getRecs); } public GetRecords getRecordsQuery(SearchCriteria searchCriteria) { final List<Operation> operations = getOperations(searchCriteria); Operation operation; if (operations.isEmpty()) { return null; } else if (operations.size() == 1) { operation = operations.get(0); } else { operation = new LogicalOperation(OperationDefines.AND, operations); } final ComplexFilter filter = new ComplexFilter(operation); try { final Map<String, URI> nsmap = new HashMap<String, URI>(); nsmap.put(GMD_NAMESPACE, new URI("http://www.isotc211.org/2005/gmd")); nsmap.put(GCO_NAMESPACE, new URI("http://www.isotc211.org/2005/gco")); nsmap.put("csw", new URI("http://www.opengis.net/cat/csw/2.0.2")); final List<QualifiedName> typeNames = new ArrayList<QualifiedName>(); typeNames.add(new QualifiedName("gmd:MD_Metadata")); final List<PropertyPath> elementNamesAsPropertyPaths = new ArrayList<PropertyPath>(); final SortProperty[] sortProperties = SortProperty.create(null, nsmap); // we need "full" query to get locale mapping like #SW -> swe -> sv // to optimize we could try to do an "init" query to get mappings and use // "summary" query to get the data. Note! Since locale mappings are at "result item" level // this might lead to complications. Just using "full" for now, it's more XML to transfer and // parse but it's safe. final Query query = new Query("full", new ArrayList<QualifiedName>(), new HashMap<String, QualifiedName>(), elementNamesAsPropertyPaths, filter, sortProperties, typeNames, new HashMap<String, QualifiedName>()); final GetRecords getRecs = new GetRecords("0", "2.0.2", null, nsmap, GetRecords.RESULT_TYPE.RESULTS, "application/xml", "csw:IsoRecord", 1, 10000, 0, null, query); return getRecs; } catch (Exception ex) { log.error(ex, "Error generating GetRecords document for CSW Query"); } return null; } public String getQueryPayload(final GetRecords getRecs) { if(getRecs == null) { return null; } final StringWriter xml = new StringWriter(); try { final GetRecordsDocument getRecsDoc = org.deegree.ogcwebservices.csw.discovery.XMLFactory.exportWithVersion(getRecs); final Properties p = new Properties(); p.put("indent", "yes"); // write the post data to postable string getRecsDoc.write(xml, p); xml.flush(); return xml.toString(); } catch (Exception ex) { log.error(ex, "Error generating payload for CSW Query"); } finally { IOHelper.close(xml); } return null; } private Operation getOperationForField(SearchCriteria searchCriteria, MetadataField field) { return getOperationForField(searchCriteria, field, false); } private Operation getOperationForField(SearchCriteria searchCriteria, MetadataField field, boolean recursion) { final String[] values = getValuesForField(searchCriteria, field); if(values == null || (!recursion && field.getShownIf() != null)) { // FIXME: not too proud of the shownIf handling // shownIf is meant to link fields for frontend but it also means we need special handling for it in here // another field should have this one linked as dependency so we skip the actual field handling by default return null; } final Map<String, String> deps = field.getDependencies(); log.debug("Field dependencies:", deps); final List<Operation> multiOp = new ArrayList<Operation>(); for(String value: values) { Operation op = getOperation(field, value); final String dep = deps.get(value); if(dep != null) { final MetadataField depField = MetadataCatalogueChannelSearchService.getField(dep); Operation depOp = getOperationForField(searchCriteria, depField, true); if(depOp != null) { List<Operation> combination = new ArrayList<Operation>(2); combination.add(op); combination.add(depOp); op = new LogicalOperation(OperationDefines.AND, combination); } } addOperation(multiOp, op); } if(multiOp.isEmpty()) { return null; } if(field.isMulti() && multiOp.size() > 1) { // combine to one OR-statement if we have a multivalue field with more than one selection Operation op = new LogicalOperation(OperationDefines.OR, multiOp); return op; } return multiOp.get(0); } private String[] getValuesForField(SearchCriteria searchCriteria, MetadataField field) { if(searchCriteria == null || field == null) { return null; } final Object param = searchCriteria.getParam(field.getProperty()); if(param == null) { return null; } log.debug("Got value for metadata field:", field.getProperty(), "=", param); if(field.isMulti()) { return (String[]) param; } else { return new String[]{(String) param}; } } private List<Operation> getOperations(SearchCriteria searchCriteria) { final List<Operation> list = new ArrayList<Operation>(); // user input addOperation(list, getLikeOperation(searchCriteria.getSearchString(), "csw:anyText")); final List<Operation> theOrList = new ArrayList<Operation>(); for(MetadataField field : MetadataCatalogueChannelSearchService.getFields()) { final Operation operation = getOperationForField(searchCriteria, field); if(operation == null) { continue; } // add must matches to toplevel list if(field.isMustMatch()) { addOperation(list, operation); } // others to OR-list else { addOperation(theOrList, operation); } } if(theOrList.size() == 1) { addOperation(list, theOrList.get(0)); } else if (theOrList.size() > 1) { addOperation(list, new LogicalOperation(OperationDefines.OR, theOrList)); } return list; } private Operation getOperation(MetadataField field, String value) { if(field.getFilterOp() == null) { return getLikeOperation(value, field.getFilter()); } else if(field.getFilterOp().equals(SPATIAL_OPERATOR)) { return getSpatialOperation(value, field.getFilter(), field.getFilterOp()); } else { return getCompOperation(value, field.getFilter(), opMap.get(field.getFilterOp())); } } private Operation getLikeOperation(final String searchCriterion, final String searchElementName) { if (searchCriterion == null || searchCriterion.isEmpty()) { return null; } PropertyIsLikeOperation op = new PropertyIsLikeOperation( new PropertyName(new QualifiedName(searchElementName)), new Literal(searchCriterion), WILDCARD_CHARACTER, '?', '/'); return op; } private Operation getCompOperation(final String searchCriterion, final String searchElementName, final int operationId) { if (searchCriterion == null || searchCriterion.isEmpty()) { return null; } PropertyIsCOMPOperation op = new PropertyIsCOMPOperation( operationId, new PropertyName(new QualifiedName(searchElementName)), new Literal(searchCriterion), false); return op; } private Operation getSpatialOperation(final String searchCriterion, final String searchElementName, final String operation ) { if (searchCriterion == null || searchCriterion.isEmpty()) { return null; } final CoordinateSystem crs = CRSFactory.createDummyCRS(TARGET_SRS); try { String polygon = parseWKTPolygon(searchCriterion); Geometry geom = WKTAdapter.wrap(polygon, crs); GeonetworkSpatialOperation op = new GeonetworkSpatialOperation( OperationDefines.INTERSECTS, new PropertyName(new QualifiedName(searchElementName)), geom, polygon); return op; } catch (GeometryException e) { log.error(e, "Error creating spatial operation!"); } return null; } private void addOperation(final List<Operation> list, final Operation op) { if(op != null) { list.add(op); } } /* "{"type":"FeatureCollection","features":[{"type":"Feature","properties":{},"geometry":{"type":"Polygon","coordinates":[[[382186.81433571,6677985.8855768],[382186.81433571,6682065.8855768],[391446.81433571,6682065.8855768],[391446.81433571,6677985.8855768],[382186.81433571,6677985.8855768]]]}}],"crs":{"type":"name","properties":{"name":"EPSG:3067"}}}" */ private String parseWKTPolygon(final String searchCriterion) { try { FeatureCollection fc = null; StringBuilder sb = new StringBuilder("POLYGON(("); FeatureJSON fjs = new FeatureJSON(); fc = fjs.readFeatureCollection(new ByteArrayInputStream( searchCriterion.getBytes("utf-8"))); ReferencedEnvelope env = fc.getBounds(); //Transform to target crs Point minb = ProjectionHelper.transformPoint(env.getMinX(), env.getMinY(), env.getCoordinateReferenceSystem(), TARGET_SRS); Point maxb = ProjectionHelper.transformPoint(env.getMaxX(), env.getMaxY(), env.getCoordinateReferenceSystem(), TARGET_SRS); if(FORCE_TARGET_CRS_4326_XY){ minb.switchLonLat(); maxb.switchLonLat(); } sb.append(minb.getLonToString()+" "); sb.append(minb.getLatToString()); sb.append(","); sb.append(minb.getLonToString()+" "); sb.append(maxb.getLatToString()); sb.append(","); sb.append(maxb.getLonToString()+" "); sb.append(maxb.getLatToString()); sb.append(","); sb.append(maxb.getLonToString()+" "); sb.append(minb.getLatToString()); sb.append(","); sb.append(minb.getLonToString()+" "); sb.append(minb.getLatToString()); sb.append("))"); return sb.toString(); } catch (Exception e){ log.error(e, "Error parsing coverage geometry"); } return null; } }
/* * Copyright 2014 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.dataflow; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.sun.source.tree.BlockTree; import com.sun.source.tree.ClassTree; import com.sun.source.tree.CompilationUnitTree; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.Tree; import com.sun.source.util.TreePath; import com.sun.tools.javac.processing.JavacProcessingEnvironment; import com.sun.tools.javac.util.Context; import org.checkerframework.dataflow.analysis.AbstractValue; import org.checkerframework.dataflow.analysis.Analysis; import org.checkerframework.dataflow.analysis.Store; import org.checkerframework.dataflow.analysis.TransferFunction; import org.checkerframework.dataflow.cfg.CFGBuilder; import org.checkerframework.dataflow.cfg.ControlFlowGraph; import org.checkerframework.dataflow.cfg.UnderlyingAST; import java.util.Objects; import javax.annotation.processing.ProcessingEnvironment; /** * Provides a wrapper around {@link org.checkerframework.dataflow.analysis.Analysis}. * * @author konne@google.com (Konstantin Weitz) */ public final class DataFlow { /** * A pair of Analysis and ControlFlowGraph. */ public static interface Result<A extends AbstractValue<A>, S extends Store<S>, T extends TransferFunction<A, S>> { Analysis<A, S, T> getAnalysis(); ControlFlowGraph getControlFlowGraph(); } /* * We cache both the control flow graph and the analyses that are run on it. * We tuned performance to the following assumptions (which are currently true for error-prone): * * <ul> * <li> all dataflow analyses for a method are finished before another method is analyzed * <li> multiple dataflow analyses for the same method are executed in arbitrary order * </ul> * * TODO(user): Write a test that checks these assumptions */ private static LoadingCache<AnalysisParams, Analysis<?, ?, ?>> analysisCache = CacheBuilder.newBuilder().build( new CacheLoader<AnalysisParams, Analysis<?, ?, ?>>() { @Override public Analysis<?, ?, ?> load(AnalysisParams key) { final ProcessingEnvironment env = key.getEnvironment(); final ControlFlowGraph cfg = key.getCFG(); final TransferFunction<?, ?> transfer = key.getTransferFunction(); @SuppressWarnings({"unchecked", "rawtypes"}) final Analysis<?, ?, ?> analysis = new Analysis(env, transfer); analysis.performAnalysis(cfg); return analysis; } }); private static LoadingCache<CFGParams, ControlFlowGraph> cfgCache = CacheBuilder.newBuilder().maximumSize(1).build( new CacheLoader<CFGParams, ControlFlowGraph>() { @Override public ControlFlowGraph load(CFGParams key) { final TreePath methodPath = key.getMethodPath(); final MethodTree method = (MethodTree) methodPath.getLeaf(); final BlockTree body = method.getBody(); final TreePath bodyPath = new TreePath(methodPath, body); final ClassTree classTree = null; final UnderlyingAST ast = new UnderlyingAST.CFGMethod(method, classTree); final ProcessingEnvironment env = key.getEnvironment(); analysisCache.invalidateAll(); CompilationUnitTree root = bodyPath.getCompilationUnit(); // TODO(user), replace with faster build(bodyPath, env, ast, false, false); return CFGBuilder.build(root, env, ast, false, false); } }); // TODO(user), remove once we merge jdk8 specific's with core public static <T> TreePath findPathFromEnclosingNodeToTopLevel(TreePath path, Class<T> klass) { while (path != null && !(klass.isInstance(path.getLeaf()))) { path = path.getParentPath(); } return path; } /** * Run the {@code transfer} dataflow analysis over the method which is the leaf of the * {@code methodPath}. * * <p>For caching, we make the following assumptions: * - if two paths to methods are {@code equal}, their control flow graph is the same. * - if two transfer functions are {@code equal}, and are run over the same control flow graph, * the analysis result is the same. * - for all contexts, the analysis result is the same. */ public static <A extends AbstractValue<A>, S extends Store<S>, T extends TransferFunction<A, S>> Result<A, S, T> methodDataflow(TreePath methodPath, Context context, T transfer) { final Tree leaf = methodPath.getLeaf(); Preconditions.checkArgument(leaf instanceof MethodTree, "Leaf of methodPath must be of type MethodTree, but was %s", leaf.getClass().getName()); final MethodTree method = (MethodTree) leaf; Preconditions.checkNotNull(method.getBody(), "Method to analyze must have a body. Method passed in: %s() in file %s", method.getName(), methodPath.getCompilationUnit().getSourceFile().getName()); final ProcessingEnvironment env = JavacProcessingEnvironment.instance(context); final ControlFlowGraph cfg = cfgCache.getUnchecked(new CFGParams(methodPath, env)); final AnalysisParams aparams = new AnalysisParams(transfer, cfg, env); @SuppressWarnings("unchecked") final Analysis<A, S, T> analysis = (Analysis<A, S, T>) analysisCache.getUnchecked(aparams); return new Result<A, S, T>() { @Override public Analysis<A, S, T> getAnalysis() { return analysis; } @Override public ControlFlowGraph getControlFlowGraph() { return cfg; } }; } /** * Run the {@code transfer} dataflow analysis to compute the abstract value of the expression * which is the leaf of {@code exprPath}. */ public static <A extends AbstractValue<A>, S extends Store<S>, T extends TransferFunction<A, S>> A expressionDataflow(TreePath exprPath, Context context, T transfer) { final Tree leaf = exprPath.getLeaf(); Preconditions.checkArgument(leaf instanceof ExpressionTree, "Leaf of exprPath must be of type ExpressionTree, but was %s", leaf.getClass().getName()); final ExpressionTree expr = (ExpressionTree) leaf; final TreePath enclosingMethodPath = findPathFromEnclosingNodeToTopLevel(exprPath, MethodTree.class); if (enclosingMethodPath == null) { // TODO(user) this can happen in field initialization. // Currently not supported because it only happens in ~2% of cases. return null; } final MethodTree method = (MethodTree) enclosingMethodPath.getLeaf(); if (method.getBody() == null) { // expressions can occur in abstract methods, for example {@code Map.Entry} in: // // abstract Set<Map.Entry<K, V>> entries(); return null; } return methodDataflow(enclosingMethodPath, context, transfer).getAnalysis().getValue(expr); } private static final class CFGParams { final ProcessingEnvironment env; final TreePath methodPath; public ProcessingEnvironment getEnvironment() { return env; } public TreePath getMethodPath() { return methodPath; } public CFGParams(TreePath methodPath, ProcessingEnvironment env) { this.env = env; this.methodPath = methodPath; } @Override public int hashCode() { return Objects.hash(methodPath); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } CFGParams other = (CFGParams) obj; return Objects.equals(methodPath, other.methodPath); } } private static final class AnalysisParams { private final ProcessingEnvironment env; private final ControlFlowGraph cfg; private final TransferFunction<?, ?> transfer; public AnalysisParams(TransferFunction<?, ?> trans, ControlFlowGraph cfg, ProcessingEnvironment env) { this.env = env; this.cfg = cfg; this.transfer = trans; } public ProcessingEnvironment getEnvironment() { return env; } public ControlFlowGraph getCFG() { return cfg; } @SuppressWarnings("rawtypes") public TransferFunction getTransferFunction() { return transfer; } @Override public int hashCode() { return Objects.hash(cfg, transfer); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } AnalysisParams other = (AnalysisParams) obj; return Objects.equals(cfg, other.cfg) && Objects.equals(transfer, other.transfer); } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.lifeweb.enitity; import java.io.Serializable; import java.util.Date; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.xml.bind.annotation.XmlRootElement; /** * * @author Life */ @Entity @Table(name = "masraf_fisleri", catalog = "lifev1", schema = "") @XmlRootElement @NamedQueries({ @NamedQuery(name = "MasrafFisleri.findAll", query = "SELECT m FROM MasrafFisleri m"), @NamedQuery(name = "MasrafFisleri.findByFisId", query = "SELECT m FROM MasrafFisleri m WHERE m.fisId = :fisId"), @NamedQuery(name = "MasrafFisleri.findByFisTarih", query = "SELECT m FROM MasrafFisleri m WHERE m.fisTarih = :fisTarih"), @NamedQuery(name = "MasrafFisleri.findByMasrafVade", query = "SELECT m FROM MasrafFisleri m WHERE m.masrafVade = :masrafVade"), @NamedQuery(name = "MasrafFisleri.findByFisOdemeTarih", query = "SELECT m FROM MasrafFisleri m WHERE m.fisOdemeTarih = :fisOdemeTarih"), @NamedQuery(name = "MasrafFisleri.findByFisToplam", query = "SELECT m FROM MasrafFisleri m WHERE m.fisToplam = :fisToplam"), @NamedQuery(name = "MasrafFisleri.findByFisFaturano", query = "SELECT m FROM MasrafFisleri m WHERE m.fisFaturano = :fisFaturano"), @NamedQuery(name = "MasrafFisleri.findByFisAciklama", query = "SELECT m FROM MasrafFisleri m WHERE m.fisAciklama = :fisAciklama"), @NamedQuery(name = "MasrafFisleri.findByFisOzelkod", query = "SELECT m FROM MasrafFisleri m WHERE m.fisOzelkod = :fisOzelkod"), @NamedQuery(name = "MasrafFisleri.findByFisDurum", query = "SELECT m FROM MasrafFisleri m WHERE m.fisDurum = :fisDurum"), @NamedQuery(name = "MasrafFisleri.findBySysEkleyen", query = "SELECT m FROM MasrafFisleri m WHERE m.sysEkleyen = :sysEkleyen"), @NamedQuery(name = "MasrafFisleri.findBySysEtarih", query = "SELECT m FROM MasrafFisleri m WHERE m.sysEtarih = :sysEtarih"), @NamedQuery(name = "MasrafFisleri.findBySysDuzelten", query = "SELECT m FROM MasrafFisleri m WHERE m.sysDuzelten = :sysDuzelten"), @NamedQuery(name = "MasrafFisleri.findBySysDtarih", query = "SELECT m FROM MasrafFisleri m WHERE m.sysDtarih = :sysDtarih")}) public class MasrafFisleri implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @Column(name = "FIS_ID", nullable = false) private Integer fisId; @Column(name = "FIS_TARIH") @Temporal(TemporalType.DATE) private Date fisTarih; @Column(name = "MASRAF_VADE") @Temporal(TemporalType.DATE) private Date masrafVade; @Column(name = "FIS_ODEME_TARIH") @Temporal(TemporalType.DATE) private Date fisOdemeTarih; // @Max(value=?) @Min(value=?)//if you know range of your decimal fields consider using these annotations to enforce field validation @Column(name = "FIS_TOPLAM", precision = 15, scale = 2) private Double fisToplam; @Column(name = "FIS_FATURANO", length = 20) private String fisFaturano; @Column(name = "FIS_ACIKLAMA", length = 80) private String fisAciklama; @Column(name = "FIS_OZELKOD", length = 20) private String fisOzelkod; @Column(name = "FIS_DURUM", length = 10) private String fisDurum; @Column(name = "SYS_EKLEYEN", length = 15) private String sysEkleyen; @Column(name = "SYS_ETARIH") private Integer sysEtarih; @Column(name = "SYS_DUZELTEN", length = 15) private String sysDuzelten; @Column(name = "SYS_DTARIH") private Integer sysDtarih; @JoinColumn(name = "MASRAF_REF", referencedColumnName = "MASRAF_ADI") @ManyToOne(fetch = FetchType.EAGER) private Masraflar masrafRef; public MasrafFisleri() { } public MasrafFisleri(Integer fisId) { this.fisId = fisId; } public Integer getFisId() { return fisId; } public void setFisId(Integer fisId) { this.fisId = fisId; } public Date getFisTarih() { return fisTarih; } public void setFisTarih(Date fisTarih) { this.fisTarih = fisTarih; } public Date getMasrafVade() { return masrafVade; } public void setMasrafVade(Date masrafVade) { this.masrafVade = masrafVade; } public Date getFisOdemeTarih() { return fisOdemeTarih; } public void setFisOdemeTarih(Date fisOdemeTarih) { this.fisOdemeTarih = fisOdemeTarih; } public Double getFisToplam() { return fisToplam; } public void setFisToplam(Double fisToplam) { this.fisToplam = fisToplam; } public String getFisFaturano() { return fisFaturano; } public void setFisFaturano(String fisFaturano) { this.fisFaturano = fisFaturano; } public String getFisAciklama() { return fisAciklama; } public void setFisAciklama(String fisAciklama) { this.fisAciklama = fisAciklama; } public String getFisOzelkod() { return fisOzelkod; } public void setFisOzelkod(String fisOzelkod) { this.fisOzelkod = fisOzelkod; } public String getFisDurum() { return fisDurum; } public void setFisDurum(String fisDurum) { this.fisDurum = fisDurum; } public String getSysEkleyen() { return sysEkleyen; } public void setSysEkleyen(String sysEkleyen) { this.sysEkleyen = sysEkleyen; } public Integer getSysEtarih() { return sysEtarih; } public void setSysEtarih(Integer sysEtarih) { this.sysEtarih = sysEtarih; } public String getSysDuzelten() { return sysDuzelten; } public void setSysDuzelten(String sysDuzelten) { this.sysDuzelten = sysDuzelten; } public Integer getSysDtarih() { return sysDtarih; } public void setSysDtarih(Integer sysDtarih) { this.sysDtarih = sysDtarih; } public Masraflar getMasrafRef() { return masrafRef; } public void setMasrafRef(Masraflar masrafRef) { this.masrafRef = masrafRef; } @Override public int hashCode() { int hash = 0; hash += (fisId != null ? fisId.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof MasrafFisleri)) { return false; } MasrafFisleri other = (MasrafFisleri) object; if ((this.fisId == null && other.fisId != null) || (this.fisId != null && !this.fisId.equals(other.fisId))) { return false; } return true; } @Override public String toString() { return "com.lifeweb.enitity.MasrafFisleri[ fisId=" + fisId + " ]"; } }
/* * Copyright 2005 Joe Walker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.directwebremoting.jsonrpc; import java.io.IOException; import java.io.Reader; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.directwebremoting.extend.AccessControl; import org.directwebremoting.extend.Call; import org.directwebremoting.extend.ConverterManager; import org.directwebremoting.extend.Handler; import org.directwebremoting.extend.ModuleManager; import org.directwebremoting.extend.Remoter; import org.directwebremoting.extend.Replies; import org.directwebremoting.extend.Reply; import org.directwebremoting.json.JsonUtil; import org.directwebremoting.json.parse.JsonParseException; import org.directwebremoting.json.parse.JsonParser; import org.directwebremoting.json.parse.JsonParserFactory; import org.directwebremoting.jsonrpc.io.JsonRpcCallException; import org.directwebremoting.jsonrpc.io.JsonRpcCalls; import org.directwebremoting.jsonrpc.io.JsonRpcCallsJsonDecoder; import org.directwebremoting.jsonrpc.io.JsonRpcError; import org.directwebremoting.jsonrpc.io.JsonRpcResponse; import org.directwebremoting.util.MimeConstants; import static javax.servlet.http.HttpServletResponse.*; import static org.directwebremoting.jsonrpc.JsonRpcConstants.*; /** * A Handler for JSON-RPC calls. * @author Joe Walker [joe at getahead dot ltd dot uk] */ public class JsonRpcCallHandler implements Handler { /* (non-Javadoc) * @see org.directwebremoting.extend.Handler#handle(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ public void handle(HttpServletRequest request, HttpServletResponse response) throws IOException { if (!jsonRpcEnabled) { log.warn("JSON-RPC request denied. To enable JSON mode add an init-param of jsonRpcEnabled=true to web.xml"); throw new SecurityException("JSON interface disabled"); } JsonRpcCalls calls = null; try { // TODO: We do not support JSON-RPC-GET. Is this legal? // I'm of the opinion that allow any kind of RPC over GET without an // explicit @idempotent marker is probably against the HTTP spec // Plus there are additional security issues with GET requests // So I'm not rushing to fix this error Reader in = request.getReader(); JsonParser parser = JsonParserFactory.get(); calls = (JsonRpcCalls) parser.parse(in, new JsonRpcCallsJsonDecoder(converterManager, moduleManager)); if (calls.getCallCount() != 1) { JsonRpcError error = new JsonRpcError(calls, "Non unique call", ERROR_CODE_INTERNAL, null); writeResponse(error, response, SC_INTERNAL_SERVER_ERROR); return; } if (!calls.isParseErrorClean()) { JsonRpcError error = new JsonRpcError(calls, calls.getParseErrors(), ERROR_CODE_PARSE, null); writeResponse(error, response, SC_INTERNAL_SERVER_ERROR); return; } // Check the methods are accessible for (Call c : calls) { accessControl.assertGeneralExecutionIsPossible(c.getScriptName(), c.getMethodDeclaration()); } Replies replies = remoter.execute(calls); Reply reply = replies.getReply(0); // The existence of a throwable indicates that something went wrong if (reply.getThrowable() != null) { Throwable ex = reply.getThrowable(); JsonRpcError error = new JsonRpcError(calls, ex.getMessage(), ERROR_CODE_SERVER, null); writeResponse(error, response, SC_INTERNAL_SERVER_ERROR); return; } JsonRpcResponse answer = new JsonRpcResponse(calls.getVersion(), calls.getId(), reply.getReply()); writeResponse(answer, response, HttpServletResponse.SC_OK); } catch (JsonRpcCallException ex) { writeResponse(new JsonRpcError(ex), response, ex.getHttpStatusCode()); return; } catch (JsonParseException ex) { JsonRpcError error = new JsonRpcError("2.0", null, ex.getMessage(), ERROR_CODE_PARSE, null); writeResponse(error, response, SC_INTERNAL_SERVER_ERROR); return; } catch (SecurityException ex) { JsonRpcError error = new JsonRpcError(calls, ex.getMessage(), ERROR_CODE_NO_METHOD, null); writeResponse(error, response, SC_NOT_FOUND); } catch (IOException ex) { throw ex; } catch (Exception ex) { log.warn("Unexpected error:", ex); JsonRpcError error = new JsonRpcError(calls, ex.getMessage(), ERROR_CODE_SERVER, null); writeResponse(error, response, SC_INTERNAL_SERVER_ERROR); } } /** * Create an output data packet that a JSON-RPC client can understand */ protected void writeResponse(Object data, HttpServletResponse response, int httpStatus) throws IOException { // Get the output stream and setup the mime type response.setContentType(MimeConstants.MIME_JSON); response.setStatus(httpStatus); JsonUtil.toJson(data, response.getWriter()); } /** * Accessor for the ConverterManager that we configure * @param converterManager */ public void setConverterManager(ConverterManager converterManager) { this.converterManager = converterManager; } /** * How we convert parameters */ protected ConverterManager converterManager = null; /** * Are we allowing remote hosts to contact us using JSON? */ public void setJsonRpcEnabled(boolean jsonRpcEnabled) { this.jsonRpcEnabled = jsonRpcEnabled; } /** * Are we allowing remote hosts to contact us using JSON? */ protected boolean jsonRpcEnabled = false; /** * Setter for the remoter * @param remoter The new remoter */ public void setRemoter(Remoter remoter) { this.remoter = remoter; } /** * The bean to execute remote requests and generate interfaces */ protected Remoter remoter = null; /** * Accessor for the security manager * @param accessControl The accessControl to set. */ public void setAccessControl(AccessControl accessControl) { this.accessControl = accessControl; } /** * The security manager */ protected AccessControl accessControl = null; /** * Accessor for the ModuleManager that we configure * @param moduleManager */ public void setModuleManager(ModuleManager moduleManager) { this.moduleManager = moduleManager; } /** * How we create new beans */ protected ModuleManager moduleManager = null; /** * The log stream */ private static final Log log = LogFactory.getLog(JsonRpcCallHandler.class); }
/*------------------------------------------------------------------------ * (The MIT License) * * Copyright (c) 2008-2011 Rhomobile, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * * http://rhomobile.com *------------------------------------------------------------------------*/ package com.rhomobile.rhodes.util; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; import java.io.PrintWriter; import android.content.res.AssetManager; public class Utils { public static class FileSource { String[] list(String dir) throws IOException { return new File(dir).list(); } InputStream open(String file) throws FileNotFoundException, IOException { return new FileInputStream(file); } }; public static class AssetsSource extends FileSource { private AssetManager am; public AssetsSource(AssetManager a) { am = a; } String[] list(String dir) throws IOException { return am.list(dir); } InputStream open(String file) throws IOException { return am.open(file); } }; public static String getContent(InputStream in) throws IOException { String retval = ""; byte[] buf = new byte[512]; while(true) { int n = in.read(buf); if (n <= 0) break; retval += new String(buf); } return retval; } public static boolean isContentsEquals(FileSource source1, String file1, FileSource source2, String file2) throws IOException { InputStream stream1 = null; InputStream stream2 = null; try { stream1 = source1.open(file1); stream2 = source2.open(file2); String newName = Utils.getContent(stream1); String oldName = Utils.getContent(stream2); return newName.equals(oldName); } catch (Exception e) { return false; } finally { if (stream1 != null) stream1.close(); if (stream2 != null) stream2.close(); } } public static void deleteRecursively(File target) throws IOException { if (!target.exists()) return; if (target.isDirectory()) { String[] children = target.list(); for(int i = 0; i != children.length; ++i) deleteRecursively(new File(target, children[i])); } //platformLog("delete", target.getPath()); if (!target.delete()) throw new IOException("Can not delete " + target.getAbsolutePath()); } public static void deleteChildrenIgnoreFirstLevel(File target, String strIgnore) throws IOException { if (!target.exists()) return; if (target.isDirectory()) { String[] children = target.list(); for(int i = 0; i != children.length; ++i) { File f = new File(target, children[i]); if ( f.isDirectory()) deleteRecursively(f); else if ( !f.getName().startsWith(strIgnore)) { if (!f.delete()) throw new IOException("Can not delete " + f.getAbsolutePath()); } } } } public static void copyRecursively(FileSource fs, File source, File target, boolean deleteTarget) throws IOException { if (deleteTarget && target.exists()) deleteRecursively(target); if (source.isDirectory()) { String[] children = fs.list(source.getAbsolutePath()); if (children != null && children.length > 0) { if (!target.exists()) target.mkdirs(); for(String child: children) copyRecursively(fs, new File(source, child), new File(target, child), false); } } else if (source.isFile()){ InputStream in = null; OutputStream out = null; try { in = fs.open(source.getAbsolutePath()); target.getParentFile().mkdirs(); out = new FileOutputStream(target); byte[] buf = new byte[1024]; int len; while((len = in.read(buf)) > 0) out.write(buf, 0, len); } catch (FileNotFoundException e) { if (in != null) throw e; target.createNewFile(); } finally { if (in != null) in.close(); if (out != null) out.close(); } } } public static void copy(String src, String dst) throws IOException { InputStream is = null; OutputStream os = null; try { is = new FileInputStream(src); os = new FileOutputStream(dst); byte[] buf = new byte[1024]; for(;;) { int n = is.read(buf); if (n <= 0) break; os.write(buf, 0, n); } os.flush(); } finally { if (is != null) is.close(); if (os != null) os.close(); } } public static String getDirName(String filePath) { if (filePath == null) return null; return new File(filePath).getParent(); } public static String getBaseName(String filePath) { if (filePath == null) return null; return new File(filePath).getName(); } public static void platformLog(String tag, String message) { StringBuilder s = new StringBuilder(); s.append("ms["); s.append(System.currentTimeMillis()); s.append("] "); s.append(message); android.util.Log.v(tag, s.toString()); } public static String getExceptionDetails( Exception e ) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); return (e.toString() + "\n" + sw.toString()); } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multiset; import com.google.common.collect.TreeMultiset; import com.google.javascript.jscomp.DefinitionsRemover.Definition; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import java.util.Collection; import java.util.Set; import java.util.TreeSet; /** * Tests for {@link DefinitionUseSiteFinder} * */ public final class DefinitionUseSiteFinderTest extends CompilerTestCase { Set<String> found = new TreeSet<>(); @Override protected int getNumRepetitions() { // run pass once. return 1; } @Override protected void tearDown() throws Exception { super.tearDown(); found.clear(); } public void testDefineNumber() { checkDefinitionsInJs( "var a = 1", ImmutableSet.of("DEF NAME a -> NUMBER")); checkDefinitionsInJs( "a = 1", ImmutableSet.of("DEF NAME a -> NUMBER")); checkDefinitionsInJs( "a.b = 1", ImmutableSet.of("DEF GETPROP a.b -> NUMBER")); // getelem expressions are invisible to the definition gatherer. checkDefinitionsInJs( "a[\"b\"] = 1", ImmutableSet.<String>of()); checkDefinitionsInJs( "f().b = 1", ImmutableSet.of("DEF GETPROP null -> NUMBER")); checkDefinitionsInJs( "({a : 1}); o.a", ImmutableSet.of("DEF STRING_KEY null -> NUMBER", "USE GETPROP o.a -> [NUMBER]")); // TODO(johnlenz): Fix this. checkDefinitionsInJs( "({'a' : 1}); o['a']", ImmutableSet.of("DEF STRING_KEY null -> NUMBER")); checkDefinitionsInJs( "({1 : 1}); o[1]", ImmutableSet.of("DEF STRING_KEY null -> NUMBER")); checkDefinitionsInJs( "var a = {b : 1}; a.b", ImmutableSet.of("DEF NAME a -> <null>", "DEF STRING_KEY null -> NUMBER", "USE NAME a -> [<null>]", "USE GETPROP a.b -> [NUMBER]")); } public void testDefineGet() { // TODO(johnlenz): Add support for quoted properties checkDefinitionsInJs( "({get a() {}}); o.a", ImmutableSet.of("DEF GETTER_DEF null -> FUNCTION", "USE GETPROP o.a -> [FUNCTION]")); } public void testDefineSet() { // TODO(johnlenz): Add support for quoted properties checkDefinitionsInJs( "({set a(b) {}}); o.a", ImmutableSet.of("DEF NAME b -> <null>", "DEF SETTER_DEF null -> FUNCTION", "USE GETPROP o.a -> [FUNCTION]")); } public void testDefineFunction() { checkDefinitionsInJs( "var a = function(){}", ImmutableSet.of("DEF NAME a -> FUNCTION")); checkDefinitionsInJs( "var a = function f(){}", ImmutableSet.of("DEF NAME f -> FUNCTION", "DEF NAME a -> FUNCTION")); checkDefinitionsInJs( "function a(){}", ImmutableSet.of("DEF NAME a -> FUNCTION")); checkDefinitionsInJs( "a = function(){}", ImmutableSet.of("DEF NAME a -> FUNCTION")); checkDefinitionsInJs( "a.b = function(){}", ImmutableSet.of("DEF GETPROP a.b -> FUNCTION")); // getelem expressions are invisible to the definition gatherer. checkDefinitionsInJs( "a[\"b\"] = function(){}", ImmutableSet.<String>of()); checkDefinitionsInJs( "f().b = function(){}", ImmutableSet.of("DEF GETPROP null -> FUNCTION")); } public void testFunctionArgumentsBasic() { checkDefinitionsInJs( "function f(a){return a}", ImmutableSet.of("DEF NAME a -> <null>", "USE NAME a -> [<null>]", "DEF NAME f -> FUNCTION")); checkDefinitionsInJs( "var a = 1; function f(a){return a}", ImmutableSet.of("DEF NAME a -> NUMBER", "DEF NAME a -> <null>", "USE NAME a -> [<null>, NUMBER]", "DEF NAME f -> FUNCTION")); } private static final String DEF = "var f = function(arg1, arg2){}"; private static final String USE = "f(1, 2)"; public void testFunctionArgumentsInExterns() { // function arguments are definitions when they appear in source. checkDefinitionsInJs( DEF + ";" + USE, ImmutableSet.of("DEF NAME f -> FUNCTION", "DEF NAME arg1 -> <null>", "DEF NAME arg2 -> <null>", "USE NAME f -> [FUNCTION]")); // function arguments are NOT definitions when they appear in externs. checkDefinitions( DEF, USE, ImmutableSet.of("DEF NAME f -> EXTERN FUNCTION", "USE NAME f -> [EXTERN FUNCTION]")); } public void testMultipleDefinition() { checkDefinitionsInJs( "a = 1; a = 2; a", ImmutableSet.of("DEF NAME a -> NUMBER", "USE NAME a -> [NUMBER x 2]")); checkDefinitionsInJs( "a = 1; a = 'a'; a", ImmutableSet.of("DEF NAME a -> NUMBER", "DEF NAME a -> STRING", "USE NAME a -> [NUMBER, STRING]")); checkDefinitionsInJs( "a = 1; b = 2; a = b; a", ImmutableSet.of("DEF NAME a -> <null>", "DEF NAME a -> NUMBER", "DEF NAME b -> NUMBER", "USE NAME a -> [<null>, NUMBER]", "USE NAME b -> [NUMBER]")); checkDefinitionsInJs( "a = 1; b = 2; c = b; c = a; c", ImmutableSet.of("DEF NAME a -> NUMBER", "DEF NAME b -> NUMBER", "DEF NAME c -> <null>", "USE NAME a -> [NUMBER]", "USE NAME b -> [NUMBER]", "USE NAME c -> [<null> x 2]")); checkDefinitionsInJs( "function f(){} f()", ImmutableSet.of("DEF NAME f -> FUNCTION", "USE NAME f -> [FUNCTION]")); checkDefinitionsInJs( "function f(){} f.call(null)", ImmutableSet.of("DEF NAME f -> FUNCTION", "USE NAME f -> [FUNCTION]", "USE GETPROP f.call -> [FUNCTION]")); checkDefinitionsInJs( "function f(){} f.apply(null, [])", ImmutableSet.of("DEF NAME f -> FUNCTION", "USE NAME f -> [FUNCTION]", "USE GETPROP f.apply -> [FUNCTION]")); checkDefinitionsInJs( "function f(){} f.foobar()", ImmutableSet.of("DEF NAME f -> FUNCTION", "USE NAME f -> [FUNCTION]")); checkDefinitionsInJs( "function f(){} f(); f.call(null)", ImmutableSet.of("DEF NAME f -> FUNCTION", "USE NAME f -> [FUNCTION]", "USE GETPROP f.call -> [FUNCTION]")); } public void testDropStubDefinitions() { String externs = LINE_JOINER.join( "obj.prototype.stub;", "/**", " * @param {string} s id.", " * @return {string}", " * @nosideeffects", " */", "obj.prototype.stub = function(s) {};"); checkDefinitionsInExterns( externs, ImmutableSet.of("DEF GETPROP obj.prototype.stub -> EXTERN FUNCTION")); } public void testNoDropStub1() { String externs = LINE_JOINER.join( "var name;", "/**", " * @param {string} s id.", " * @return {string}", " * @nosideeffects", " */", "var name = function(s) {};"); checkDefinitionsInExterns( externs, ImmutableSet.of("DEF NAME name -> EXTERN <null>", "DEF NAME name -> EXTERN FUNCTION")); } public void testNoDropStub2() { String externs = LINE_JOINER.join( "f().name;", // These are not recongnized as stub definitions "/**", " * @param {string} s id.", " * @return {string}", " * @nosideeffects", " */", "f().name;"); checkDefinitionsInExterns(externs, ImmutableSet.<String>of()); } public void testDefinitionInExterns() { String externs = "var a = 1"; checkDefinitionsInExterns( externs, ImmutableSet.of("DEF NAME a -> EXTERN NUMBER")); checkDefinitions( externs, "var b = 1", ImmutableSet.of("DEF NAME a -> EXTERN NUMBER", "DEF NAME b -> NUMBER")); checkDefinitions( externs, "a = \"foo\"; a", ImmutableSet.of("DEF NAME a -> EXTERN NUMBER", "DEF NAME a -> STRING", "USE NAME a -> [EXTERN NUMBER, STRING]")); checkDefinitionsInExterns( "var a = {}; a.b = 10", ImmutableSet.of("DEF GETPROP a.b -> EXTERN NUMBER", "DEF NAME a -> EXTERN <null>")); checkDefinitionsInExterns( "var a = {}; a.b", ImmutableSet.of("DEF GETPROP a.b -> EXTERN <null>", "DEF NAME a -> EXTERN <null>")); checkDefinitions( "var a = {}", "a.b = 1", ImmutableSet.of("DEF GETPROP a.b -> NUMBER", "DEF NAME a -> EXTERN <null>", "USE NAME a -> [EXTERN <null>]")); checkDefinitions( "var a = {}", "a.b", ImmutableSet.of("DEF NAME a -> EXTERN <null>", "USE NAME a -> [EXTERN <null>]")); checkDefinitionsInExterns( externs, ImmutableSet.of("DEF NAME a -> EXTERN NUMBER")); } public void testRecordDefinitionInExterns() { checkDefinitionsInExterns( "var ns = {};" + "/** @type {number} */ ns.NUM;", ImmutableSet.of("DEF NAME ns -> EXTERN <null>", "DEF GETPROP ns.NUM -> EXTERN <null>")); checkDefinitionsInExterns( "var ns = {};" + "/** @type {function(T,T):number} @template T */ ns.COMPARATOR;", ImmutableSet.of("DEF NAME ns -> EXTERN <null>", "DEF GETPROP ns.COMPARATOR -> EXTERN <null>")); checkDefinitionsInExterns( "/** @type {{ prop1 : number, prop2 : string}} */" + "var ns;", ImmutableSet.of("DEF NAME ns -> EXTERN <null>", "DEF STRING_KEY null -> EXTERN <null>", "DEF STRING_KEY null -> EXTERN <null>")); checkDefinitionsInExterns( "/** @typedef {{ prop1 : number, prop2 : string}} */" + "var ns;", ImmutableSet.of("DEF NAME ns -> EXTERN <null>", "DEF STRING_KEY null -> EXTERN <null>", "DEF STRING_KEY null -> EXTERN <null>")); } public void testUnitializedDefinitionInExterns() { checkDefinitionsInExterns( "/** @type {number} */ var HYBRID;", ImmutableSet.of("DEF NAME HYBRID -> EXTERN <null>")); } public void testObjectLitInExterns() { checkDefinitions( "var goog = {};" + "/** @type {number} */ goog.HYBRID;" + "/** @enum */ goog.Enum = {HYBRID: 0, ROADMAP: 1};", "goog.HYBRID; goog.Enum.ROADMAP;", ImmutableSet.of( "DEF GETPROP goog.Enum -> EXTERN <null>", "DEF GETPROP goog.HYBRID -> EXTERN <null>", "DEF NAME goog -> EXTERN <null>", "DEF STRING_KEY null -> EXTERN NUMBER", "USE GETPROP goog.Enum -> [EXTERN <null>]", "USE GETPROP goog.Enum.ROADMAP -> [EXTERN NUMBER]", "USE GETPROP goog.HYBRID -> [EXTERN <null>, EXTERN NUMBER]", "USE NAME goog -> [EXTERN <null>]")); } public void testCallInExterns() { String externs = LINE_JOINER.join( "var goog = {};", "/** @constructor */", "goog.Response = function() {};", "goog.Response.prototype.get;", "goog.Response.prototype.get().get;"); checkDefinitionsInExterns( externs, ImmutableSet.of( "DEF NAME goog -> EXTERN <null>", "DEF GETPROP goog.Response -> EXTERN FUNCTION", "DEF GETPROP goog.Response.prototype.get -> EXTERN <null>")); } public void testDoubleNamedFunction() { String source = LINE_JOINER.join( "A.f = function f_d() { f_d(); };", "A.f();"); checkDefinitionsInJs( source, ImmutableSet.of( "DEF GETPROP A.f -> FUNCTION", "DEF NAME f_d -> FUNCTION", "USE GETPROP A.f -> [FUNCTION]", "USE NAME f_d -> [FUNCTION]")); } public void testGetChangesAndDeletions_changeDoesntOverrideDelete() { Compiler compiler = new Compiler(); DefinitionUseSiteFinder definitionsFinder = new DefinitionUseSiteFinder(compiler); definitionsFinder.process(IR.root(), IR.root()); Node script = compiler.parseSyntheticCode( LINE_JOINER.join( "function foo() {", " foo.propOfFoo = 'asdf';", "}", "function bar() {", " bar.propOfBar = 'asdf';", "}")); Node root = IR.root(script); Node externs = IR.root(IR.script()); IR.root(externs, root); // Create global root. Node functionFoo = script.getFirstChild(); Node functionBar = script.getSecondChild(); // Verify original baseline. buildFound(definitionsFinder, found); assertThat(found).isEmpty(); // Verify the fully processed state. compiler.getChangedScopeNodesForPass("definitionsFinder"); compiler.getDeletedScopeNodesForPass("definitionsFinder"); definitionsFinder = new DefinitionUseSiteFinder(compiler); definitionsFinder.process(externs, root); buildFound(definitionsFinder, found); assertThat(found) .containsExactly( "DEF NAME foo -> FUNCTION", "DEF GETPROP foo.propOfFoo -> STRING", "USE NAME foo -> [FUNCTION]", "DEF NAME bar -> FUNCTION", "DEF GETPROP bar.propOfBar -> STRING", "USE NAME bar -> [FUNCTION]"); // Change nothing and re-verify state. definitionsFinder.rebuildScopeRoots( compiler.getChangedScopeNodesForPass("definitionsFinder"), compiler.getDeletedScopeNodesForPass("definitionsFinder")); buildFound(definitionsFinder, found); assertThat(found) .containsExactly( "DEF NAME foo -> FUNCTION", "DEF GETPROP foo.propOfFoo -> STRING", "USE NAME foo -> [FUNCTION]", "DEF NAME bar -> FUNCTION", "DEF GETPROP bar.propOfBar -> STRING", "USE NAME bar -> [FUNCTION]"); // Verify state after deleting function "foo". compiler.reportFunctionDeleted(functionFoo); definitionsFinder.rebuildScopeRoots( compiler.getChangedScopeNodesForPass("definitionsFinder"), compiler.getDeletedScopeNodesForPass("definitionsFinder")); buildFound(definitionsFinder, found); assertThat(found) .containsExactly( "DEF NAME bar -> FUNCTION", "DEF GETPROP bar.propOfBar -> STRING", "USE NAME bar -> [FUNCTION]"); // Verify state after changing the contents of function "bar" functionBar.getLastChild().removeFirstChild(); compiler.reportChangeToChangeScope(functionBar); definitionsFinder.rebuildScopeRoots( compiler.getChangedScopeNodesForPass("definitionsFinder"), compiler.getDeletedScopeNodesForPass("definitionsFinder")); buildFound(definitionsFinder, found); assertThat(found).containsExactly("DEF NAME bar -> FUNCTION"); } void checkDefinitionsInExterns(String externs, Set<String> expected) { checkDefinitions(externs, "", expected); } void checkDefinitionsInJs(String js, Set<String> expected) { checkDefinitions("", js, expected); } void checkDefinitions(String externs, String source, Set<String> expected) { testSame(externs, source); assertEquals(expected, found); found.clear(); } @Override protected CompilerPass getProcessor(Compiler compiler) { return new DefinitionEnumerator(compiler); } private static void buildFound(DefinitionUseSiteFinder definitionFinder, Set<String> found) { found.clear(); for (DefinitionSite defSite : definitionFinder.getDefinitionSites()) { Node node = defSite.node; Definition definition = defSite.definition; StringBuilder sb = new StringBuilder(); sb.append("DEF "); sb.append(node.getToken()); sb.append(" "); sb.append(node.getQualifiedName()); sb.append(" -> "); if (definition.isExtern()) { sb.append("EXTERN "); } Node rValue = definition.getRValue(); if (rValue != null) { sb.append(rValue.getToken()); } else { sb.append("<null>"); } found.add(sb.toString()); } for (UseSite useSite : definitionFinder.getUseSitesByName().values()) { Node node = useSite.node; Collection<Definition> defs = definitionFinder.getDefinitionsReferencedAt(node); if (defs != null) { StringBuilder sb = new StringBuilder(); sb.append("USE "); sb.append(node.getToken()); sb.append(" "); sb.append(node.getQualifiedName()); sb.append(" -> "); Multiset<String> defstrs = TreeMultiset.create(); for (Definition def : defs) { String defstr; Node rValue = def.getRValue(); if (rValue != null) { defstr = rValue.getToken().toString(); } else { defstr = "<null>"; } if (def.isExtern()) { defstr = "EXTERN " + defstr; } defstrs.add(defstr); } sb.append(defstrs); found.add(sb.toString()); } } } /** * Run DefinitionUseSiteFinder, then gather a set of what's found. */ private class DefinitionEnumerator extends AbstractPostOrderCallback implements CompilerPass { private final DefinitionUseSiteFinder passUnderTest; private final Compiler compiler; DefinitionEnumerator(Compiler compiler) { this.passUnderTest = new DefinitionUseSiteFinder(compiler); this.compiler = compiler; } @Override public void process(Node externs, Node root) { passUnderTest.process(externs, root); NodeTraversal.traverseEs6(compiler, externs, this); NodeTraversal.traverseEs6(compiler, root, this); buildFound(passUnderTest, found); } @Override public void visit(NodeTraversal traversal, Node node, Node parent) {} } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.util; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.handler.UpdateRequestHandler; import org.apache.solr.logging.ListenerConfig; import org.apache.solr.logging.LogWatcher; import org.apache.solr.logging.jul.JulWatcher; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.QueryResponseWriter; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.schema.IndexSchema; import org.apache.solr.servlet.DirectSolrConnection; import org.apache.solr.common.util.NamedList.NamedListEntry; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.util.HashMap; import java.util.Map; /** * This class provides a simple harness that may be useful when * writing testcases. * * <p> * This class lives in the tests-framework source tree (and not in the test source * tree), so that it will be included with even the most minimal solr * distribution, in order to encourage plugin writers to create unit * tests for their plugins. * * */ public class TestHarness extends BaseTestHarness { String coreName; protected volatile CoreContainer container; public UpdateRequestHandler updater; /** * Creates a SolrConfig object for the specified coreName assuming it * follows the basic conventions of being a relative path in the solrHome * dir. (ie: <code>${solrHome}/${coreName}/conf/${confFile}</code> */ public static SolrConfig createConfig(String solrHome, String coreName, String confFile) { // set some system properties for use by tests System.setProperty("solr.test.sys.prop1", "propone"); System.setProperty("solr.test.sys.prop2", "proptwo"); try { return new SolrConfig(solrHome + File.separator + coreName, confFile, null); } catch (Exception xany) { throw new RuntimeException(xany); } } /** * Creates a SolrConfig object for the * {@link CoreContainer#DEFAULT_DEFAULT_CORE_NAME} core using {@link #createConfig(String,String,String)} */ public static SolrConfig createConfig(String solrHome, String confFile) { return createConfig(solrHome, CoreContainer.DEFAULT_DEFAULT_CORE_NAME, confFile); } /** * @param dataDirectory path for index data, will not be cleaned up * @param solrConfig solronfig instance * @param schemaFile schema filename */ public TestHarness( String dataDirectory, SolrConfig solrConfig, String schemaFile) { this( dataDirectory, solrConfig, new IndexSchema(solrConfig, schemaFile, null)); } /** * @param dataDirectory path for index data, will not be cleaned up * @param solrConfig solrconfig instance * @param indexSchema schema instance */ public TestHarness( String dataDirectory, SolrConfig solrConfig, IndexSchema indexSchema) { this(null, new Initializer(null, dataDirectory, solrConfig, indexSchema)); } public TestHarness(String coreName, CoreContainer.Initializer init) { try { container = init.initialize(); if (coreName == null) coreName = CoreContainer.DEFAULT_DEFAULT_CORE_NAME; this.coreName = coreName; updater = new UpdateRequestHandler(); updater.init( null ); } catch (Exception e) { throw new RuntimeException(e); } } // Creates a container based on infos needed to create one core static class Initializer extends CoreContainer.Initializer { String coreName; String dataDirectory; SolrConfig solrConfig; IndexSchema indexSchema; public Initializer(String coreName, String dataDirectory, SolrConfig solrConfig, IndexSchema indexSchema) { if (coreName == null) coreName = CoreContainer.DEFAULT_DEFAULT_CORE_NAME; this.coreName = coreName; this.dataDirectory = dataDirectory; this.solrConfig = solrConfig; this.indexSchema = indexSchema; } public String getCoreName() { return coreName; } @Override public CoreContainer initialize() { CoreContainer container = new CoreContainer(new SolrResourceLoader(SolrResourceLoader.locateSolrHome())) { { hostPort = System.getProperty("hostPort"); hostContext = "solr"; defaultCoreName = CoreContainer.DEFAULT_DEFAULT_CORE_NAME; initShardHandler(null); initZooKeeper(System.getProperty("zkHost"), 10000); } }; LogWatcher<?> logging = new JulWatcher("test"); logging.registerListener(new ListenerConfig(), container); container.setLogging(logging); CoreDescriptor dcore = new CoreDescriptor(container, coreName, solrConfig.getResourceLoader().getInstanceDir()); dcore.setConfigName(solrConfig.getResourceName()); dcore.setSchemaName(indexSchema.getResourceName()); SolrCore core = new SolrCore(coreName, dataDirectory, solrConfig, indexSchema, dcore); container.register(coreName, core, false); // TODO: we should be exercising the *same* core container initialization code, not equivalent code! if (container.getZkController() == null && core.getUpdateHandler().getUpdateLog() != null) { // always kick off recovery if we are in standalone mode. core.getUpdateHandler().getUpdateLog().recoverFromLog(); } return container; } } public CoreContainer getCoreContainer() { return container; } /** Gets a core that does not have it's refcount incremented (i.e. there is no need to * close when done). This is not MT safe in conjunction with reloads! */ public SolrCore getCore() { // get the core & decrease its refcount: // the container holds the core for the harness lifetime SolrCore core = container.getCore(coreName); if (core != null) core.close(); return core; } /** Gets the core with it's reference count incremented. * You must call core.close() when done! */ public SolrCore getCoreInc() { return container.getCore(coreName); } public void reload() throws Exception { container.reload(coreName); } /** * Processes an "update" (add, commit or optimize) and * returns the response as a String. * * @param xml The XML of the update * @return The XML response to the update */ public String update(String xml) { SolrCore core = getCoreInc(); DirectSolrConnection connection = new DirectSolrConnection(core); SolrRequestHandler handler = core.getRequestHandler("/update"); // prefer the handler mapped to /update, but use our generic backup handler // if that lookup fails if (handler == null) { handler = updater; } try { return connection.request(handler, null, xml); } catch (SolrException e) { throw (SolrException)e; } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } finally { core.close(); } } /** * Validates a "query" response against an array of XPath test strings * * @param req the Query to process * @return null if all good, otherwise the first test that fails. * @exception Exception any exception in the response. * @exception IOException if there is a problem writing the XML * @see LocalSolrQueryRequest */ public String validateQuery(SolrQueryRequest req, String... tests) throws Exception { String res = query(req); return validateXPath(res, tests); } /** * Processes a "query" using a user constructed SolrQueryRequest * * @param req the Query to process, will be closed. * @return The XML response to the query * @exception Exception any exception in the response. * @exception IOException if there is a problem writing the XML * @see LocalSolrQueryRequest */ public String query(SolrQueryRequest req) throws Exception { return query(req.getParams().get(CommonParams.QT), req); } /** * Processes a "query" using a user constructed SolrQueryRequest, and closes the request at the end. * * @param handler the name of the request handler to process the request * @param req the Query to process, will be closed. * @return The XML response to the query * @exception Exception any exception in the response. * @exception IOException if there is a problem writing the XML * @see LocalSolrQueryRequest */ public String query(String handler, SolrQueryRequest req) throws Exception { SolrCore core = getCoreInc(); try { SolrQueryResponse rsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); core.execute(core.getRequestHandler(handler),req,rsp); if (rsp.getException() != null) { throw rsp.getException(); } StringWriter sw = new StringWriter(32000); QueryResponseWriter responseWriter = core.getQueryResponseWriter(req); responseWriter.write(sw,req,rsp); req.close(); return sw.toString(); } finally { req.close(); SolrRequestInfo.clearRequestInfo(); core.close(); } } /** It is the users responsibility to close the request object when done with it. * This method does not set/clear SolrRequestInfo */ public SolrQueryResponse queryAndResponse(String handler, SolrQueryRequest req) throws Exception { SolrCore core = getCoreInc(); try { SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(core.getRequestHandler(handler),req,rsp); if (rsp.getException() != null) { throw rsp.getException(); } return rsp; } finally { core.close(); } } /** * Shuts down and frees any resources */ public void close() { if (container != null) { for (SolrCore c : container.getCores()) { if (c.getOpenCount() > 1) throw new RuntimeException("SolrCore.getOpenCount()=="+c.getOpenCount()); } } if (container != null) { container.shutdown(); container = null; } } public LocalRequestFactory getRequestFactory(String qtype, int start, int limit) { LocalRequestFactory f = new LocalRequestFactory(); f.qtype = qtype; f.start = start; f.limit = limit; return f; } /** * 0 and Even numbered args are keys, Odd numbered args are values. */ public LocalRequestFactory getRequestFactory(String qtype, int start, int limit, String... args) { LocalRequestFactory f = getRequestFactory(qtype, start, limit); for (int i = 0; i < args.length; i+=2) { f.args.put(args[i], args[i+1]); } return f; } public LocalRequestFactory getRequestFactory(String qtype, int start, int limit, Map<String,String> args) { LocalRequestFactory f = getRequestFactory(qtype, start, limit); f.args.putAll(args); return f; } /** * A Factory that generates LocalSolrQueryRequest objects using a * specified set of default options. */ public class LocalRequestFactory { public String qtype = null; public int start = 0; public int limit = 1000; public Map<String,String> args = new HashMap<String,String>(); public LocalRequestFactory() { } /** * Creates a LocalSolrQueryRequest based on variable args; for * historical reasons, this method has some peculiar behavior: * <ul> * <li>If there is a single arg, then it is treated as the "q" * param, and the LocalSolrQueryRequest consists of that query * string along with "qt", "start", and "rows" params (based * on the qtype, start, and limit properties of this factory) * along with any other default "args" set on this factory. * </li> * <li>If there are multiple args, then there must be an even number * of them, and each pair of args is used as a key=value param in * the LocalSolrQueryRequest. <b>NOTE: In this usage, the "qtype", * "start", "limit", and "args" properties of this factory are * ignored.</b> * </li> * </ul> * * TODO: this isn't really safe in the presense of core reloads! * Perhaps the best we could do is increment the core reference count * and decrement it in the request close() method? */ public LocalSolrQueryRequest makeRequest(String ... q) { if (q.length==1) { return new LocalSolrQueryRequest(TestHarness.this.getCore(), q[0], qtype, start, limit, args); } if (q.length%2 != 0) { throw new RuntimeException("The length of the string array (query arguments) needs to be even"); } Map.Entry<String, String> [] entries = new NamedListEntry[q.length / 2]; for (int i = 0; i < q.length; i += 2) { entries[i/2] = new NamedListEntry<String>(q[i], q[i+1]); } return new LocalSolrQueryRequest(TestHarness.this.getCore(), new NamedList(entries)); } } }
package com.vaadin.tests.components.treetable; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import com.vaadin.tests.components.table.Tables; import com.vaadin.v7.data.Container; import com.vaadin.v7.data.Container.Hierarchical; import com.vaadin.v7.data.util.HierarchicalContainer; import com.vaadin.v7.ui.Table; import com.vaadin.v7.ui.Table.CellStyleGenerator; import com.vaadin.v7.ui.Tree.CollapseEvent; import com.vaadin.v7.ui.Tree.CollapseListener; import com.vaadin.v7.ui.Tree.ExpandEvent; import com.vaadin.v7.ui.Tree.ExpandListener; import com.vaadin.v7.ui.TreeTable; public class TreeTableTest extends Tables<TreeTable> implements CollapseListener, ExpandListener { @Override protected Class<TreeTable> getTestClass() { return TreeTable.class; } private int rootItemIds = 3; private CellStyleGenerator rootGreenSecondLevelRed = new com.vaadin.v7.ui.Table.CellStyleGenerator() { @Override public String getStyle(Table source, Object itemId, Object propertyId) { if (propertyId != null) { return null; } Hierarchical c = getComponent().getContainerDataSource(); if (c.isRoot(itemId)) { return "green"; } Object parent = c.getParent(itemId); if (!c.isRoot(parent)) { return "red"; } return null; } @Override public String toString() { return "Root green, second level red"; } }; private CellStyleGenerator evenItemsBold = new CellStyleGenerator() { @Override public String getStyle(Table source, Object itemId, Object propertyId) { if (propertyId != null) { return null; } Hierarchical c = getComponent().getContainerDataSource(); int idx = 0; for (Iterator<?> i = c.getItemIds().iterator(); i.hasNext();) { Object id = i.next(); if (id == itemId) { if (idx % 2 == 1) { return "bold"; } else { return null; } } idx++; } return null; } @Override public String toString() { return "Even items bold"; } }; @Override protected void createActions() { super.createActions(); // Causes container changes so doing this first.. createRootItemSelectAction(CATEGORY_DATA_SOURCE); createExpandCollapseActions(CATEGORY_FEATURES); createChildrenAllowedAction(CATEGORY_DATA_SOURCE); createListeners(CATEGORY_LISTENERS); // createItemStyleGenerator(CATEGORY_FEATURES); createBooleanAction("Animate collapse/expand", CATEGORY_STATE, false, animationCommand); // TODO: DropHandler // TODO: DragMode // TODO: ActionHandler } @Override protected Container createContainer(int properties, int items) { return createHierarchicalContainer(properties, items, rootItemIds); } private void createListeners(String category) { createBooleanAction("Item click listener", category, false, itemClickListenerCommand); createBooleanAction("Expand listener", category, false, expandListenerCommand); createBooleanAction("Collapse listener", category, false, collapseListenerCommand); } private Container.Hierarchical createHierarchicalContainer(int properties, int items, int roots) { Container.Hierarchical c = new HierarchicalContainer(); populateContainer(c, properties, items); if (items <= roots) { return c; } // "roots" roots, each with // "firstLevel" children, two with no children (one with childAllowed, // one without) // ("firstLevel"-2)*"secondLevel" children ("secondLevel"/2 with // childAllowed, "secondLevel"/2 without) // N*M+N*(M-2)*C = items // items=N(M+MC-2C) // Using secondLevel=firstLevel/2 => // items = roots*(firstLevel+firstLevel*firstLevel/2-2*firstLevel/2) // =roots*(firstLevel+firstLevel^2/2-firstLevel) // = roots*firstLevel^2/2 // => firstLevel = sqrt(items/roots*2) int firstLevel = (int) Math.ceil(Math.sqrt(items / roots * 2.0)); int secondLevel = firstLevel / 2; while (roots * (1 + 2 + (firstLevel - 2) * secondLevel) < items) { // Increase something so we get enough items secondLevel++; } List<Object> itemIds = new ArrayList<>(c.getItemIds()); int nextItemId = roots; for (int rootIndex = 0; rootIndex < roots; rootIndex++) { // roots use items 0..roots-1 Object rootItemId = itemIds.get(rootIndex); // force roots to be roots even though they automatically should be c.setParent(rootItemId, null); for (int firstLevelIndex = 0; firstLevelIndex < firstLevel; firstLevelIndex++) { if (nextItemId >= items) { break; } Object firstLevelItemId = itemIds.get(nextItemId++); c.setParent(firstLevelItemId, rootItemId); if (firstLevelIndex < 2) { continue; } // firstLevelChildren 2.. have child nodes for (int secondLevelIndex = 0; secondLevelIndex < secondLevel; secondLevelIndex++) { if (nextItemId >= items) { break; } Object secondLevelItemId = itemIds.get(nextItemId++); c.setParent(secondLevelItemId, firstLevelItemId); } } } return c; } private void createRootItemSelectAction(String category) { LinkedHashMap<String, Integer> options = new LinkedHashMap<>(); for (int i = 1; i <= 10; i++) { options.put(String.valueOf(i), i); } options.put("20", 20); options.put("50", 50); options.put("100", 100); createSelectAction("Number of root items", category, options, "3", rootItemIdsCommand); } private void createExpandCollapseActions(String category) { LinkedHashMap<String, Object> options = new LinkedHashMap<>(); for (Object id : getComponent().getItemIds()) { options.put(id.toString(), id); } createMultiClickAction("Expand", category, options, expandItemCommand, null); // createMultiClickAction("Expand recursively", category, options, // expandItemRecursivelyCommand, null); createMultiClickAction("Collapse", category, options, collapseItemCommand, null); } private void createChildrenAllowedAction(String category) { LinkedHashMap<String, Object> options = new LinkedHashMap<>(); for (Object id : getComponent().getItemIds()) { options.put(id.toString(), id); } createMultiToggleAction("Children allowed", category, options, setChildrenAllowedCommand, true); } /* * COMMANDS */ private Command<TreeTable, Integer> rootItemIdsCommand = new Command<TreeTable, Integer>() { @Override public void execute(TreeTable c, Integer value, Object data) { rootItemIds = value; updateContainer(); } }; private Command<TreeTable, Object> expandItemCommand = new Command<TreeTable, Object>() { @Override public void execute(TreeTable c, Object itemId, Object data) { c.setCollapsed(itemId, false); } }; private Command<TreeTable, Object> collapseItemCommand = new Command<TreeTable, Object>() { @Override public void execute(TreeTable c, Object itemId, Object data) { c.setCollapsed(itemId, true); } }; private Command<TreeTable, Boolean> setChildrenAllowedCommand = new Command<TreeTable, Boolean>() { @Override public void execute(TreeTable c, Boolean areChildrenAllowed, Object itemId) { c.setChildrenAllowed(itemId, areChildrenAllowed); } }; private Command<TreeTable, Boolean> expandListenerCommand = new Command<TreeTable, Boolean>() { @Override public void execute(TreeTable c, Boolean value, Object data) { if (value) { c.addListener((ExpandListener) TreeTableTest.this); } else { c.removeListener((ExpandListener) TreeTableTest.this); } } }; private Command<TreeTable, Boolean> collapseListenerCommand = new Command<TreeTable, Boolean>() { @Override public void execute(TreeTable c, Boolean value, Object data) { if (value) { c.addListener((CollapseListener) TreeTableTest.this); } else { c.removeListener((CollapseListener) TreeTableTest.this); } } }; protected Command<TreeTable, Boolean> animationCommand = new Command<TreeTable, Boolean>() { @Override public void execute(TreeTable c, Boolean enabled, Object data) { c.setAnimationsEnabled(enabled); } }; @Override public void nodeCollapse(CollapseEvent event) { log(event.getClass().getSimpleName() + ": " + event.getItemId()); } @Override public void nodeExpand(ExpandEvent event) { log(event.getClass().getSimpleName() + ": " + event.getItemId()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.NodeManager; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import junit.framework.Assert; /** * Verify if NodeManager's in-memory good local dirs list and good log dirs list * get updated properly when disks(nm-local-dirs and nm-log-dirs) fail. Also * verify if the overall health status of the node gets updated properly when * specified percentage of disks fail. */ public class TestDiskFailures { private static final Log LOG = LogFactory.getLog(TestDiskFailures.class); private static final long DISK_HEALTH_CHECK_INTERVAL = 1000;//1 sec private static FileContext localFS = null; private static final File testDir = new File("target", TestDiskFailures.class.getName()).getAbsoluteFile(); private static final File localFSDirBase = new File(testDir, TestDiskFailures.class.getName() + "-localDir"); private static final int numLocalDirs = 4; private static final int numLogDirs = 4; private static MiniYARNCluster yarnCluster; LocalDirsHandlerService dirsHandler; @BeforeClass public static void setup() throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { localFS = FileContext.getLocalFSFileContext(); localFS.delete(new Path(localFSDirBase.getAbsolutePath()), true); localFSDirBase.mkdirs(); // Do not start cluster here } @AfterClass public static void teardown() { if (yarnCluster != null) { yarnCluster.stop(); yarnCluster = null; } FileUtil.fullyDelete(localFSDirBase); } /** * Make local-dirs fail/inaccessible and verify if NodeManager can * recognize the disk failures properly and can update the list of * local-dirs accordingly with good disks. Also verify the overall * health status of the node. * @throws IOException */ @Test public void testLocalDirsFailures() throws IOException { testDirsFailures(true); } /** * Make log-dirs fail/inaccessible and verify if NodeManager can * recognize the disk failures properly and can update the list of * log-dirs accordingly with good disks. Also verify the overall health * status of the node. * @throws IOException */ @Test public void testLogDirsFailures() throws IOException { testDirsFailures(false); } /** * Make a local and log directory inaccessible during initialization * and verify those bad directories are recognized and removed from * the list of available local and log directories. * @throws IOException */ @Test public void testDirFailuresOnStartup() throws IOException { Configuration conf = new YarnConfiguration(); String localDir1 = new File(testDir, "localDir1").getPath(); String localDir2 = new File(testDir, "localDir2").getPath(); String logDir1 = new File(testDir, "logDir1").getPath(); String logDir2 = new File(testDir, "logDir2").getPath(); conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir1 + "," + localDir2); conf.set(YarnConfiguration.NM_LOG_DIRS, logDir1 + "," + logDir2); prepareDirToFail(localDir1); prepareDirToFail(logDir2); LocalDirsHandlerService dirSvc = new LocalDirsHandlerService(); dirSvc.init(conf); List<String> localDirs = dirSvc.getLocalDirs(); Assert.assertEquals(1, localDirs.size()); Assert.assertEquals(localDir2, localDirs.get(0)); List<String> logDirs = dirSvc.getLogDirs(); Assert.assertEquals(1, logDirs.size()); Assert.assertEquals(logDir1, logDirs.get(0)); } private void testDirsFailures(boolean localORLogDirs) throws IOException { String dirType = localORLogDirs ? "local" : "log"; String dirsProperty = localORLogDirs ? YarnConfiguration.NM_LOCAL_DIRS : YarnConfiguration.NM_LOG_DIRS; Configuration conf = new Configuration(); // set disk health check interval to a small value (say 1 sec). conf.setLong(YarnConfiguration.NM_DISK_HEALTH_CHECK_INTERVAL_MS, DISK_HEALTH_CHECK_INTERVAL); // If 2 out of the total 4 local-dirs fail OR if 2 Out of the total 4 // log-dirs fail, then the node's health status should become unhealthy. conf.setFloat(YarnConfiguration.NM_MIN_HEALTHY_DISKS_FRACTION, 0.60F); if (yarnCluster != null) { yarnCluster.stop(); FileUtil.fullyDelete(localFSDirBase); localFSDirBase.mkdirs(); } LOG.info("Starting up YARN cluster"); yarnCluster = new MiniYARNCluster(TestDiskFailures.class.getName(), 1, numLocalDirs, numLogDirs); yarnCluster.init(conf); yarnCluster.start(); NodeManager nm = yarnCluster.getNodeManager(0); LOG.info("Configured nm-" + dirType + "-dirs=" + nm.getConfig().get(dirsProperty)); dirsHandler = nm.getNodeHealthChecker().getDiskHandler(); List<String> list = localORLogDirs ? dirsHandler.getLocalDirs() : dirsHandler.getLogDirs(); String[] dirs = list.toArray(new String[list.size()]); Assert.assertEquals("Number of nm-" + dirType + "-dirs is wrong.", numLocalDirs, dirs.length); String expectedDirs = StringUtils.join(",", list); // validate the health of disks initially verifyDisksHealth(localORLogDirs, expectedDirs, true); // Make 1 nm-local-dir fail and verify if "the nodemanager can identify // the disk failure(s) and can update the list of good nm-local-dirs. prepareDirToFail(dirs[2]); expectedDirs = dirs[0] + "," + dirs[1] + "," + dirs[3]; verifyDisksHealth(localORLogDirs, expectedDirs, true); // Now, make 1 more nm-local-dir/nm-log-dir fail and verify if "the // nodemanager can identify the disk failures and can update the list of // good nm-local-dirs/nm-log-dirs and can update the overall health status // of the node to unhealthy". prepareDirToFail(dirs[0]); expectedDirs = dirs[1] + "," + dirs[3]; verifyDisksHealth(localORLogDirs, expectedDirs, false); // Fail the remaining 2 local-dirs/log-dirs and verify if NM remains with // empty list of local-dirs/log-dirs and the overall health status is // unhealthy. prepareDirToFail(dirs[1]); prepareDirToFail(dirs[3]); expectedDirs = ""; verifyDisksHealth(localORLogDirs, expectedDirs, false); } /** * Wait for the NodeManger to go for the disk-health-check at least once. */ private void waitForDiskHealthCheck() { long lastDisksCheckTime = dirsHandler.getLastDisksCheckTime(); long time = lastDisksCheckTime; for (int i = 0; i < 10 && (time <= lastDisksCheckTime); i++) { try { Thread.sleep(1000); } catch(InterruptedException e) { LOG.error( "Interrupted while waiting for NodeManager's disk health check."); } time = dirsHandler.getLastDisksCheckTime(); } } /** * Verify if the NodeManager could identify disk failures. * @param localORLogDirs <em>true</em> represent nm-local-dirs and <em>false * </em> means nm-log-dirs * @param expectedDirs expected nm-local-dirs/nm-log-dirs as a string * @param isHealthy <em>true</em> if the overall node should be healthy */ private void verifyDisksHealth(boolean localORLogDirs, String expectedDirs, boolean isHealthy) { // Wait for the NodeManager to identify disk failures. waitForDiskHealthCheck(); List<String> list = localORLogDirs ? dirsHandler.getLocalDirs() : dirsHandler.getLogDirs(); String seenDirs = StringUtils.join(",", list); LOG.info("ExpectedDirs=" + expectedDirs); LOG.info("SeenDirs=" + seenDirs); Assert.assertTrue("NodeManager could not identify disk failure.", expectedDirs.equals(seenDirs)); Assert.assertEquals("Node's health in terms of disks is wrong", isHealthy, dirsHandler.areDisksHealthy()); for (int i = 0; i < 10; i++) { Iterator<RMNode> iter = yarnCluster.getResourceManager().getRMContext() .getRMNodes().values().iterator(); if (iter.next().getNodeHealthStatus().getIsNodeHealthy() == isHealthy) { break; } // wait for the node health info to go to RM try { Thread.sleep(1000); } catch(InterruptedException e) { LOG.error("Interrupted while waiting for NM->RM heartbeat."); } } Iterator<RMNode> iter = yarnCluster.getResourceManager().getRMContext() .getRMNodes().values().iterator(); Assert.assertEquals("RM is not updated with the health status of a node", isHealthy, iter.next().getNodeHealthStatus().getIsNodeHealthy()); } /** * Prepare directory for a failure: Replace the given directory on the * local FileSystem with a regular file with the same name. * This would cause failure of creation of directory in DiskChecker.checkDir() * with the same name. * @param dir the directory to be failed * @throws IOException */ private void prepareDirToFail(String dir) throws IOException { File file = new File(dir); FileUtil.fullyDelete(file); file.createNewFile(); LOG.info("Prepared " + dir + " to fail."); } }
package org.hisp.dhis.category; /* * * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement; import com.google.common.collect.Lists; import org.hisp.dhis.common.BaseDimensionalItemObject; import org.hisp.dhis.common.BaseDimensionalObject; import org.hisp.dhis.common.BaseIdentifiableObject; import org.hisp.dhis.common.DataDimensionType; import org.hisp.dhis.common.DimensionType; import org.hisp.dhis.common.DimensionalItemObject; import org.hisp.dhis.common.DxfNamespaces; import org.hisp.dhis.common.MetadataObject; import java.util.ArrayList; import java.util.List; /** * A Category is a dimension of a data element. DataElements can have sets of * dimensions (known as CategoryCombos). An Example of a Category might be * "Sex". The Category could have two (or more) CategoryOptions such as "Male" * and "Female". * * @author Abyot Asalefew */ @JacksonXmlRootElement( localName = "category", namespace = DxfNamespaces.DXF_2_0 ) public class Category extends BaseDimensionalObject implements MetadataObject { public static final String DEFAULT_NAME = "default"; private DataDimensionType dataDimensionType; private List<CategoryOption> categoryOptions = new ArrayList<>(); private List<CategoryCombo> categoryCombos = new ArrayList<>(); // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- public Category() { } public Category( String name, DataDimensionType dataDimensionType ) { this.dataDimensionType = dataDimensionType; this.name = name; } public Category( String name, DataDimensionType dataDimensionType, List<CategoryOption> categoryOptions ) { this( name, dataDimensionType ); this.categoryOptions = categoryOptions; } // ------------------------------------------------------------------------- // Logic // ------------------------------------------------------------------------- public void addCategoryOption( CategoryOption dataElementCategoryOption ) { categoryOptions.add( dataElementCategoryOption ); dataElementCategoryOption.getCategories().add( this ); } public void removeCategoryOption( CategoryOption dataElementCategoryOption ) { categoryOptions.remove( dataElementCategoryOption ); dataElementCategoryOption.getCategories().remove( this ); } public void removeAllCategoryOptions() { for ( CategoryOption categoryOption : categoryOptions ) { categoryOption.getCategories().remove( this ); } categoryOptions.clear(); } public void addCategoryCombo( CategoryCombo categoryCombo ) { categoryCombos.add( categoryCombo ); categoryCombo.getCategories().add( this ); } public void removeCategoryCombo( CategoryCombo categoryCombo ) { categoryCombos.remove( categoryCombo ); categoryCombo.getCategories().remove( this ); } public void removeAllCategoryCombos() { for ( CategoryCombo categoryCombo : categoryCombos ) { categoryCombo.getCategories().remove( this ); } categoryCombos.clear(); } public CategoryOption getCategoryOption( CategoryOptionCombo categoryOptionCombo ) { for ( CategoryOption categoryOption : categoryOptions ) { if ( categoryOption.getCategoryOptionCombos().contains( categoryOptionCombo ) ) { return categoryOption; } } return null; } public boolean isDefault() { return DEFAULT_NAME.equals( name ); } // ------------------------------------------------------------------------- // Dimensional object // ------------------------------------------------------------------------- @Override @JsonProperty @JsonSerialize( contentAs = BaseDimensionalItemObject.class ) @JacksonXmlElementWrapper( localName = "items", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "item", namespace = DxfNamespaces.DXF_2_0 ) public List<DimensionalItemObject> getItems() { return Lists.newArrayList( categoryOptions ); } @Override public DimensionType getDimensionType() { return DimensionType.CATEGORY; } // ------------------------------------------------------------------------ // Getters and setters // ------------------------------------------------------------------------ @Override public String getShortName() { if ( getName() == null || getName().length() <= 50 ) { return getName(); } else { return getName().substring( 0, 49 ); } } @JsonProperty @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public DataDimensionType getDataDimensionType() { return dataDimensionType; } public void setDataDimensionType( DataDimensionType dataDimensionType ) { this.dataDimensionType = dataDimensionType; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JacksonXmlElementWrapper( localName = "categoryOptions", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "categoryOption", namespace = DxfNamespaces.DXF_2_0 ) public List<CategoryOption> getCategoryOptions() { return categoryOptions; } public void setCategoryOptions( List<CategoryOption> categoryOptions ) { this.categoryOptions = categoryOptions; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) @JacksonXmlElementWrapper( localName = "categoryCombos", namespace = DxfNamespaces.DXF_2_0 ) @JacksonXmlProperty( localName = "categoryCombo", namespace = DxfNamespaces.DXF_2_0 ) public List<CategoryCombo> getCategoryCombos() { return categoryCombos; } public void setCategoryCombos( List<CategoryCombo> categoryCombos ) { this.categoryCombos = categoryCombos; } }
package com.fishercoder.solutions; import java.util.ArrayList; import java.util.Deque; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Stack; import java.util.TreeMap; public class _716 { public static class Solution1 { /** * This is O(n) for popMax() and pop() while O(1) for the other three operations which is UN-acceptable during an interview! * We need to do better than O(n) time complexity in order to ace the interview! * But O(1) is impossible, so let's aim for O(logn). */ public static class MaxStack { private int max; private Stack<Integer> stack; /** * initialize your data structure here. */ public MaxStack() { max = Integer.MIN_VALUE; stack = new Stack<>(); } public void push(int x) { if (x > max) { max = x; } stack.push(x); } public int pop() { if (stack.peek() == max) { int result = stack.pop(); max = findMax(); return result; } else { return stack.pop(); } } private int findMax() { if (!stack.isEmpty()) { Iterator<Integer> iterator = stack.iterator(); int max = stack.peek(); while (iterator.hasNext()) { max = Math.max(max, iterator.next()); } return max; } else { max = Integer.MIN_VALUE; return max; } } public int top() { return stack.peek(); } public int peekMax() { return max; } public int popMax() { Stack<Integer> tmp = new Stack<>(); int result = 0; while (!stack.isEmpty()) { if (stack.peek() != max) { tmp.push(stack.pop()); } else { result = stack.pop(); break; } } while (!tmp.isEmpty()) { stack.push(tmp.pop()); } max = findMax(); return result; } } } public static class Solution2 { /** * Use a treemap and a doubly linked list to achieve O(logn) time complexity. */ static class Node { int val; Node prev; Node next; public Node(int val) { this.val = val; } } static class DoublyLinkedList { Node head; Node tail; public DoublyLinkedList() { head = new Node(0); tail = new Node(0); head.next = tail; tail.prev = head; } public Node add(int val) { /**For this doubly linked list, we always add it to the end of the list*/ Node x = new Node(val); x.next = tail; x.prev = tail.prev; tail.prev.next = x; tail.prev = tail.prev.next; return x; } public int pop() { /**for pop(), we always pop one from the tail of the doubly linked list*/ return unlink(tail.prev).val; } public Node unlink(Node node) { node.prev.next = node.next; node.next.prev = node.prev; return node; } public int peek() { return tail.prev.val; } } public static class MaxStack { TreeMap<Integer, List<Node>> treeMap; /** * the reason we have a list of nodes as treemap's value is because one value could be pushed * multiple times into this MaxStack and we want to keep track of all of them. */ DoublyLinkedList doublyLinkedList; /** * initialize your data structure here. */ public MaxStack() { treeMap = new TreeMap(); doublyLinkedList = new DoublyLinkedList(); } public void push(int x) { Node node = doublyLinkedList.add(x); if (!treeMap.containsKey(x)) { treeMap.put(x, new ArrayList<>()); } treeMap.get(x).add(node); } public int pop() { int val = doublyLinkedList.pop(); List<Node> nodes = treeMap.get(val); nodes.remove(nodes.size() - 1); if (nodes.isEmpty()) { treeMap.remove(val); } return val; } public int top() { return doublyLinkedList.peek(); } public int peekMax() { return treeMap.lastKey(); } public int popMax() { int max = treeMap.lastKey(); List<Node> nodes = treeMap.get(max); Node node = nodes.remove(nodes.size() - 1); doublyLinkedList.unlink(node); if (nodes.isEmpty()) { treeMap.remove(max); } return max; } } } public static class Solution3 { /** * My completely original solution on 10/25/2021. * popMax() takes O(n) time, all other operations take O(1) time. */ public static class MaxStack { Deque<int[]> stack; Deque<int[]> tmp; public MaxStack() { stack = new LinkedList<>(); tmp = new LinkedList<>(); } public void push(int x) { if (stack.isEmpty()) { stack.addLast(new int[]{x, x}); } else { int[] last = stack.peekLast(); stack.addLast(new int[]{x, Math.max(last[1], x)}); } } public int pop() { return stack.pollLast()[0]; } public int top() { return stack.peekLast()[0]; } public int peekMax() { return stack.peekLast()[1]; } public int popMax() { tmp.clear(); while (stack.peekLast()[0] != stack.peekLast()[1]) { tmp.addLast(stack.pollLast()); } int[] max = stack.pollLast(); while (!tmp.isEmpty()) { int[] curr = tmp.pollLast(); if (!stack.isEmpty()) { stack.addLast(new int[]{curr[0], Math.max(curr[0], stack.peekLast()[1])}); } else { stack.addLast(new int[]{curr[0], curr[0]}); } } return max[0]; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ranger.service; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.persistence.metamodel.EntityType; import javax.persistence.metamodel.Metamodel; import javax.persistence.metamodel.SingularAttribute; import org.apache.commons.lang.StringUtils; import org.apache.ranger.common.AppConstants; import org.apache.ranger.common.ContextUtil; import org.apache.ranger.common.SearchCriteria; import org.apache.ranger.common.SearchField; import org.apache.ranger.common.SortField; import org.apache.ranger.common.SortField.SORT_ORDER; import org.apache.ranger.common.UserSessionBase; import org.apache.ranger.entity.XXPortalUser; import org.apache.ranger.entity.XXPortalUserRole; import org.apache.ranger.entity.XXServiceDef; import org.apache.ranger.entity.XXTrxLog; import org.apache.ranger.entity.view.VXXTrxLog; import org.apache.ranger.plugin.store.EmbeddedServiceDefsUtil; import org.apache.ranger.view.VXTrxLog; import org.apache.ranger.view.VXTrxLogList; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Service; import org.springframework.util.CollectionUtils; @Service @Scope("singleton") public class XTrxLogService extends XTrxLogServiceBase<XXTrxLog, VXTrxLog> { Long keyadminCount = 0L; public XTrxLogService(){ searchFields.add(new SearchField("attributeName", "obj.attributeName", SearchField.DATA_TYPE.STRING, SearchField.SEARCH_TYPE.PARTIAL)); searchFields.add(new SearchField("action", "obj.action", SearchField.DATA_TYPE.STRING, SearchField.SEARCH_TYPE.PARTIAL)); searchFields.add(new SearchField("sessionId", "obj.sessionId", SearchField.DATA_TYPE.STRING, SearchField.SEARCH_TYPE.FULL)); searchFields.add(new SearchField("startDate", "obj.createTime", SearchField.DATA_TYPE.DATE, SearchField.SEARCH_TYPE.GREATER_EQUAL_THAN)); searchFields.add(new SearchField("endDate", "obj.createTime", SearchField.DATA_TYPE.DATE, SearchField.SEARCH_TYPE.LESS_EQUAL_THAN)); searchFields.add(new SearchField("owner", "obj.addedByUserId", SearchField.DATA_TYPE.INT_LIST, SearchField.SEARCH_TYPE.FULL)); searchFields.add(new SearchField("objectClassType", "obj.objectClassType", SearchField.DATA_TYPE.INT_LIST, SearchField.SEARCH_TYPE.FULL)); sortFields.add(new SortField("createDate", "obj.createTime", true, SORT_ORDER.DESC)); } @Override protected void validateForCreate(VXTrxLog vObj) {} @Override protected void validateForUpdate(VXTrxLog vObj, XXTrxLog mObj) {} @Override public VXTrxLogList searchXTrxLogs(SearchCriteria searchCriteria) { EntityManager em = daoManager.getEntityManager(); CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery<VXXTrxLog> selectCQ = criteriaBuilder.createQuery(VXXTrxLog.class); Root<VXXTrxLog> rootEntityType = selectCQ.from(VXXTrxLog.class); Predicate predicate = generatePredicate(searchCriteria, em, criteriaBuilder, rootEntityType); selectCQ.where(predicate); if ("asc".equalsIgnoreCase(searchCriteria.getSortType())) { selectCQ.orderBy(criteriaBuilder.asc(rootEntityType.get("createTime"))); } else { selectCQ.orderBy(criteriaBuilder.desc(rootEntityType.get("createTime"))); } int startIndex = searchCriteria.getStartIndex(); int pageSize = searchCriteria.getMaxRows(); List<VXXTrxLog> resultList = em.createQuery(selectCQ).setFirstResult(startIndex).setMaxResults(pageSize) .getResultList(); int maxRowSize = Integer.MAX_VALUE; int minRowSize = 0; XXServiceDef xxServiceDef = daoManager.getXXServiceDef() .findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KMS_NAME); UserSessionBase session = ContextUtil.getCurrentUserSession(); if (session != null && session.isKeyAdmin()) { resultList = em.createQuery(selectCQ).setFirstResult(minRowSize).setMaxResults(maxRowSize).getResultList(); } if (session != null && session.isAuditKeyAdmin()) { resultList = em.createQuery(selectCQ).setFirstResult(minRowSize).setMaxResults(maxRowSize).getResultList(); } List<VXTrxLog> trxLogList = new ArrayList<VXTrxLog>(); for (VXXTrxLog xTrxLog : resultList) { VXTrxLog trxLog = mapCustomViewToViewObj(xTrxLog); if (trxLog.getUpdatedBy() != null) { XXPortalUser xXPortalUser = daoManager.getXXPortalUser().getById(Long.parseLong(trxLog.getUpdatedBy())); if (xXPortalUser != null) { trxLog.setOwner(xXPortalUser.getLoginId()); } } trxLogList.add(trxLog); } List<VXTrxLog> keyAdminTrxLogList = new ArrayList<VXTrxLog>(); if (session != null && xxServiceDef != null && (session.isKeyAdmin()|| session.isAuditKeyAdmin())) { List<VXTrxLog> vXTrxLogs = new ArrayList<VXTrxLog>(); for (VXTrxLog xTrxLog : trxLogList) { int parentObjectClassType = xTrxLog.getParentObjectClassType(); Long parentObjectId = xTrxLog.getParentObjectId(); if (parentObjectClassType == AppConstants.CLASS_TYPE_XA_SERVICE_DEF && parentObjectId.equals(xxServiceDef.getId())) { vXTrxLogs.add(xTrxLog); } else if (parentObjectClassType == AppConstants.CLASS_TYPE_XA_SERVICE && !(parentObjectId.equals(xxServiceDef.getId()))) { for (VXTrxLog vxTrxLog : trxLogList) { if (parentObjectClassType == vxTrxLog.getObjectClassType() && parentObjectId.equals(vxTrxLog.getObjectId()) && vxTrxLog.getParentObjectId().equals(xxServiceDef.getId())) { vXTrxLogs.add(xTrxLog); break; } } } else if (xTrxLog.getObjectClassType() == AppConstants.CLASS_TYPE_XA_USER || xTrxLog.getObjectClassType() == AppConstants.CLASS_TYPE_RANGER_POLICY || xTrxLog.getObjectClassType() == AppConstants.HIST_OBJ_STATUS_UPDATED) { XXPortalUser xxPortalUser = null; if (xTrxLog.getUpdatedBy() != null) { xxPortalUser = daoManager.getXXPortalUser() .getById(Long.parseLong(xTrxLog.getUpdatedBy())); } if (xxPortalUser != null && xxPortalUser.getId() != null) { List<XXPortalUserRole> xxPortalUserRole = daoManager.getXXPortalUserRole() .findByUserId(xxPortalUser.getId()); if (xxPortalUserRole != null && (xxPortalUserRole.get(0).getUserRole().equalsIgnoreCase("ROLE_KEY_ADMIN") || xxPortalUserRole.get(0).getUserRole().equalsIgnoreCase("ROLE_KEY_ADMIN_AUDITOR"))) { vXTrxLogs.add(xTrxLog); } } } } keyadminCount = (long) vXTrxLogs.size(); if (vXTrxLogs != null && !vXTrxLogs.isEmpty()) { for (int k = startIndex; k <= pageSize; k++) { if (k < vXTrxLogs.size()) { keyAdminTrxLogList.add(vXTrxLogs.get(k)); } } } } VXTrxLogList vxTrxLogList = new VXTrxLogList(); vxTrxLogList.setStartIndex(startIndex); vxTrxLogList.setPageSize(pageSize); if (session != null && (session.isKeyAdmin() || session.isAuditKeyAdmin()) ) { vxTrxLogList.setVXTrxLogs(keyAdminTrxLogList); } else { vxTrxLogList.setVXTrxLogs(trxLogList); } return vxTrxLogList; } public Long searchXTrxLogsCount(SearchCriteria searchCriteria) { EntityManager em = daoManager.getEntityManager(); CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); CriteriaQuery<VXXTrxLog> selectCQ = criteriaBuilder.createQuery(VXXTrxLog.class); Root<VXXTrxLog> rootEntityType = selectCQ.from(VXXTrxLog.class); Predicate predicate = generatePredicate(searchCriteria, em, criteriaBuilder, rootEntityType); CriteriaQuery<Long> countCQ = criteriaBuilder.createQuery(Long.class); countCQ.select(criteriaBuilder.count(rootEntityType)).where(predicate); List<Long> countList = em.createQuery(countCQ).getResultList(); Long count = 0L; if(!CollectionUtils.isEmpty(countList)) { count = countList.get(0); if(count == null) { count = 0L; } } UserSessionBase session = ContextUtil.getCurrentUserSession(); if (session != null && session.isKeyAdmin()) { count = keyadminCount; } if (session != null && session.isAuditKeyAdmin()) { count = keyadminCount; } return count; } private Predicate generatePredicate(SearchCriteria searchCriteria, EntityManager em, CriteriaBuilder criteriaBuilder, Root<VXXTrxLog> rootEntityType) { Predicate predicate = criteriaBuilder.conjunction(); Map<String, Object> paramList = searchCriteria.getParamList(); if (CollectionUtils.isEmpty(paramList)) { return predicate; } Metamodel entityMetaModel = em.getMetamodel(); EntityType<VXXTrxLog> entityType = entityMetaModel.entity(VXXTrxLog.class); for (Map.Entry<String, Object> entry : paramList.entrySet()) { String key=entry.getKey(); for (SearchField searchField : searchFields) { if (!key.equalsIgnoreCase(searchField.getClientFieldName())) { continue; } String fieldName = searchField.getFieldName(); if (!StringUtils.isEmpty(fieldName)) { fieldName = fieldName.contains(".") ? fieldName.substring(fieldName.indexOf(".") + 1) : fieldName; } Object paramValue = entry.getValue(); boolean isListValue = paramValue instanceof Collection; // build where clause depending upon given parameters if (SearchField.DATA_TYPE.STRING.equals(searchField.getDataType())) { // build where clause for String datatypes SingularAttribute attr = entityType.getSingularAttribute(fieldName); if (attr != null) { Predicate stringPredicate = null; if (SearchField.SEARCH_TYPE.PARTIAL.equals(searchField.getSearchType())) { String val = "%" + paramValue + "%"; stringPredicate = criteriaBuilder.like(rootEntityType.get(attr), val); } else { stringPredicate = criteriaBuilder.equal(rootEntityType.get(attr), paramValue); } predicate = criteriaBuilder.and(predicate, stringPredicate); } } else if (SearchField.DATA_TYPE.INT_LIST.equals(searchField.getDataType()) || isListValue && SearchField.DATA_TYPE.INTEGER.equals(searchField.getDataType())) { // build where clause for integer lists or integers datatypes Collection<Number> intValueList = null; if (paramValue != null && (paramValue instanceof Integer || paramValue instanceof Long)) { intValueList = new ArrayList<Number>(); intValueList.add((Number) paramValue); } else { intValueList = (Collection<Number>) paramValue; } for (Number value : intValueList) { SingularAttribute attr = entityType.getSingularAttribute(fieldName); if (attr != null) { Predicate intPredicate = criteriaBuilder.equal(rootEntityType.get(attr), value); predicate = criteriaBuilder.and(predicate, intPredicate); } } } else if (SearchField.DATA_TYPE.DATE.equals(searchField.getDataType())) { // build where clause for date datatypes Date fieldValue = (Date) paramList.get(searchField.getClientFieldName()); if (fieldValue != null && searchField.getCustomCondition() == null) { SingularAttribute attr = entityType.getSingularAttribute(fieldName); Predicate datePredicate = null; if (SearchField.SEARCH_TYPE.LESS_THAN.equals(searchField.getSearchType())) { datePredicate = criteriaBuilder.lessThan(rootEntityType.get(attr), fieldValue); } else if (SearchField.SEARCH_TYPE.LESS_EQUAL_THAN.equals(searchField.getSearchType())) { datePredicate = criteriaBuilder.lessThanOrEqualTo(rootEntityType.get(attr), fieldValue); } else if (SearchField.SEARCH_TYPE.GREATER_THAN.equals(searchField.getSearchType())) { datePredicate = criteriaBuilder.greaterThan(rootEntityType.get(attr), fieldValue); } else if (SearchField.SEARCH_TYPE.GREATER_EQUAL_THAN.equals(searchField.getSearchType())) { datePredicate = criteriaBuilder.greaterThanOrEqualTo(rootEntityType.get(attr), fieldValue); } else { datePredicate = criteriaBuilder.equal(rootEntityType.get(attr), fieldValue); } predicate = criteriaBuilder.and(predicate, datePredicate); } } } } return predicate; } private VXTrxLog mapCustomViewToViewObj(VXXTrxLog vXXTrxLog){ VXTrxLog vXTrxLog = new VXTrxLog(); vXTrxLog.setId(vXXTrxLog.getId()); vXTrxLog.setAction(vXXTrxLog.getAction()); vXTrxLog.setAttributeName(vXXTrxLog.getAttributeName()); vXTrxLog.setCreateDate(vXXTrxLog.getCreateTime()); vXTrxLog.setNewValue(vXXTrxLog.getNewValue()); vXTrxLog.setPreviousValue(vXXTrxLog.getPreviousValue()); vXTrxLog.setSessionId(vXXTrxLog.getSessionId()); if(vXXTrxLog.getUpdatedByUserId()==null || vXXTrxLog.getUpdatedByUserId()==0){ vXTrxLog.setUpdatedBy(null); }else{ vXTrxLog.setUpdatedBy(String.valueOf(vXXTrxLog.getUpdatedByUserId())); } //We will have to get this from XXUser //vXTrxLog.setOwner(vXXTrxLog.getAddedByUserName()); vXTrxLog.setParentObjectId(vXXTrxLog.getParentObjectId()); vXTrxLog.setParentObjectClassType(vXXTrxLog.getParentObjectClassType()); vXTrxLog.setParentObjectName(vXXTrxLog.getParentObjectName()); vXTrxLog.setObjectClassType(vXXTrxLog.getObjectClassType()); vXTrxLog.setObjectId(vXXTrxLog.getObjectId()); vXTrxLog.setObjectName(vXXTrxLog.getObjectName()); vXTrxLog.setTransactionId(vXXTrxLog.getTransactionId()); return vXTrxLog; } @Override protected XXTrxLog mapViewToEntityBean(VXTrxLog vObj, XXTrxLog mObj, int OPERATION_CONTEXT) { XXTrxLog ret = null; if(vObj!=null && mObj!=null){ ret = super.mapViewToEntityBean(vObj, mObj, OPERATION_CONTEXT); XXPortalUser xXPortalUser=null; if(ret.getAddedByUserId()==null || ret.getAddedByUserId()==0){ if(!stringUtil.isEmpty(vObj.getOwner())){ xXPortalUser=daoManager.getXXPortalUser().findByLoginId(vObj.getOwner()); if(xXPortalUser!=null){ ret.setAddedByUserId(xXPortalUser.getId()); } } } if(ret.getUpdatedByUserId()==null || ret.getUpdatedByUserId()==0){ if(!stringUtil.isEmpty(vObj.getUpdatedBy())){ xXPortalUser= daoManager.getXXPortalUser().findByLoginId(vObj.getUpdatedBy()); if(xXPortalUser!=null){ ret.setUpdatedByUserId(xXPortalUser.getId()); } } } } return ret; } @Override protected VXTrxLog mapEntityToViewBean(VXTrxLog vObj, XXTrxLog mObj) { VXTrxLog ret = null; if(mObj!=null && vObj!=null){ ret = super.mapEntityToViewBean(vObj, mObj); XXPortalUser xXPortalUser=null; if(stringUtil.isEmpty(ret.getOwner())){ xXPortalUser= daoManager.getXXPortalUser().getById(mObj.getAddedByUserId()); if(xXPortalUser!=null){ ret.setOwner(xXPortalUser.getLoginId()); } } if(stringUtil.isEmpty(ret.getUpdatedBy())){ xXPortalUser= daoManager.getXXPortalUser().getById(mObj.getUpdatedByUserId()); if(xXPortalUser!=null){ ret.setUpdatedBy(xXPortalUser.getLoginId()); } } } return ret; } }
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package javaguide.http; import akka.util.ByteString; import org.junit.Test; import play.core.j.JavaHandlerComponents; import play.http.HttpErrorHandler; import play.libs.F; import play.libs.Json; import play.libs.streams.Accumulator; import play.libs.ws.WSClient; import play.libs.ws.WSResponse; import play.test.WithApplication; import javaguide.testhelpers.MockJavaAction; import javax.inject.Inject; import com.fasterxml.jackson.databind.JsonNode; import akka.stream.javadsl.*; import play.mvc.*; import play.mvc.Http.*; import java.util.concurrent.Executor; import java.util.concurrent.CompletionStage; import java.util.*; import static javaguide.testhelpers.MockJavaActionHelper.*; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import static play.test.Helpers.*; public class JavaBodyParsers extends WithApplication { @Test public void accessRequestBody() { assertThat(contentAsString(call(new MockJavaAction(instanceOf(JavaHandlerComponents.class)) { //#access-json-body public Result index() { JsonNode json = request().body().asJson(); return ok("Got name: " + json.get("name").asText()); } //#access-json-body }, fakeRequest("POST", "/").bodyJson(Json.toJson(Collections.singletonMap("name", "foo"))), mat)), containsString("foo")); } @Test public void particularBodyParser() { assertThat(contentAsString(call(new MockJavaAction(instanceOf(JavaHandlerComponents.class)) { //#particular-body-parser @BodyParser.Of(BodyParser.Text.class) public Result index() { RequestBody body = request().body(); return ok("Got text: " + body.asText()); } //#particular-body-parser }, fakeRequest().bodyText("foo"), mat)), containsString("foo")); } public static abstract class BodyParserApply<A> implements BodyParser<A> { // Override the method with another abstract method - if the signature changes, we get a compile error @Override //#body-parser-apply public abstract Accumulator<ByteString, F.Either<Result, A>> apply(RequestHeader request); //#body-parser-apply } public static class User { public String name; } //#composing-class public static class UserBodyParser implements BodyParser<User> { private BodyParser.Json jsonParser; private Executor executor; @Inject public UserBodyParser(BodyParser.Json jsonParser, Executor executor) { this.jsonParser = jsonParser; this.executor = executor; } //#composing-class //#composing-apply public Accumulator<ByteString, F.Either<Result, User>> apply(RequestHeader request) { Accumulator<ByteString, F.Either<Result, JsonNode>> jsonAccumulator = jsonParser.apply(request); return jsonAccumulator.map(resultOrJson -> { if (resultOrJson.left.isPresent()) { return F.Either.Left(resultOrJson.left.get()); } else { JsonNode json = resultOrJson.right.get(); try { User user = play.libs.Json.fromJson(json, User.class); return F.Either.Right(user); } catch (Exception e) { return F.Either.Left(Results.badRequest( "Unable to read User from json: " + e.getMessage())); } } }, executor); } //#composing-apply } @Test public void composingBodyParser() { assertThat(contentAsString(call(new MockJavaAction(instanceOf(JavaHandlerComponents.class)) { //#composing-access @BodyParser.Of(UserBodyParser.class) public Result save() { RequestBody body = request().body(); User user = body.as(User.class); return ok("Got: " + user.name); } //#composing-access }, fakeRequest().bodyJson(Json.toJson(Collections.singletonMap("name", "foo"))), mat)), equalTo("Got: foo")); } @Test public void maxLength() { StringBuilder body = new StringBuilder(); for (int i = 0; i < 1100; i++) { body.append("1234567890"); } assertThat(callWithStringBody(new MaxLengthAction(instanceOf(JavaHandlerComponents.class)), fakeRequest(), body.toString(), mat).status(), equalTo(413)); } public static class MaxLengthAction extends MockJavaAction { MaxLengthAction(JavaHandlerComponents javaHandlerComponents) { super(javaHandlerComponents); } //#max-length // Accept only 10KB of data. public static class Text10Kb extends BodyParser.Text { @Inject public Text10Kb(HttpErrorHandler errorHandler) { super(10 * 1024, errorHandler); } } @BodyParser.Of(Text10Kb.class) public Result index() { return ok("Got body: " + request().body().asText()); } //#max-length } //#forward-body public static class ForwardingBodyParser implements BodyParser<WSResponse> { private WSClient ws; private Executor executor; @Inject public ForwardingBodyParser(WSClient ws, Executor executor) { this.ws = ws; this.executor = executor; } String url = "http://example.com"; public Accumulator<ByteString, F.Either<Result, WSResponse>> apply(RequestHeader request) { Accumulator<ByteString, Source<ByteString, ?>> forwarder = Accumulator.source(); return forwarder.mapFuture(source -> { // TODO: when streaming upload has been implemented, pass the source as the body return ws.url(url) .setMethod("POST") // .setBody(source) .execute().thenApply(F.Either::Right); }, executor); } } //#forward-body // no test for forwarding yet because it doesn't actually work yet //#csv public static class CsvBodyParser implements BodyParser<List<List<String>>> { private Executor executor; @Inject public CsvBodyParser(Executor executor) { this.executor = executor; } @Override public Accumulator<ByteString, F.Either<Result, List<List<String>>>> apply(RequestHeader request) { // A flow that splits the stream into CSV lines Sink<ByteString, CompletionStage<List<List<String>>>> sink = Flow.<ByteString>create() // We split by the new line character, allowing a maximum of 1000 characters per line .via(Framing.delimiter(ByteString.fromString("\n"), 1000, FramingTruncation.ALLOW)) // Turn each line to a String and split it by commas .map(bytes -> { String[] values = bytes.utf8String().trim().split(","); return Arrays.asList(values); }) // Now we fold it into a list .toMat(Sink.<List<List<String>>, List<String>>fold( new ArrayList<>(), (list, values) -> { list.add(values); return list; }), Keep.right()); // Convert the body to a Right either return Accumulator.fromSink(sink).map(F.Either::Right, executor); } } //#csv @Test public void testCsv() { assertThat(contentAsString(call(new MockJavaAction(instanceOf(JavaHandlerComponents.class)) { @BodyParser.Of(CsvBodyParser.class) public Result uploadCsv() { String value = ((List<List<String>>) request().body().as(List.class)).get(1).get(2); return ok("Got: " + value); } }, fakeRequest().bodyText("1,2\n3,4,foo\n5,6"), mat)), equalTo("Got: foo")); } }