gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.isis.core.metamodel.specloader.specimpl; import java.util.Arrays; import java.util.List; import com.google.common.collect.Lists; import org.apache.isis.applib.Identifier; import org.apache.isis.applib.annotation.Where; import org.apache.isis.applib.filter.Filter; import org.apache.isis.core.metamodel.adapter.ObjectAdapter; import org.apache.isis.core.metamodel.consent.Consent; import org.apache.isis.core.metamodel.consent.InteractionInitiatedBy; import org.apache.isis.core.metamodel.facetapi.Facet; import org.apache.isis.core.metamodel.facetapi.FacetHolder; import org.apache.isis.core.metamodel.facetapi.FacetHolderImpl; import org.apache.isis.core.metamodel.facetapi.FacetUtil; import org.apache.isis.core.metamodel.facetapi.FeatureType; import org.apache.isis.core.metamodel.facetapi.MultiTypedFacet; import org.apache.isis.core.metamodel.facets.FacetedMethodParameter; import org.apache.isis.core.metamodel.interactions.InteractionUtils; import org.apache.isis.core.metamodel.interactions.UsabilityContext; import org.apache.isis.core.metamodel.interactions.VisibilityContext; import org.apache.isis.core.metamodel.services.ServicesInjector; import org.apache.isis.core.metamodel.spec.ObjectSpecification; import org.apache.isis.core.metamodel.spec.feature.ObjectAction; import org.apache.isis.core.metamodel.spec.feature.ObjectActionParameter; public class ObjectActionContributee extends ObjectActionDefault implements ContributeeMember2 { private final Object servicePojo; private final ObjectActionDefault serviceAction; private final int contributeeParam; private final ObjectSpecification contributeeType; /** * Hold facets rather than delegate to the contributed action (different types might * use layout metadata to position the contributee in different ways) */ private final FacetHolder facetHolder = new FacetHolderImpl(); private final Identifier identifier; public ObjectActionContributee( final Object servicePojo, final ObjectActionDefault serviceAction, final int contributeeParam, final ObjectSpecification contributeeType, final ServicesInjector servicesInjector) { super(serviceAction.getFacetedMethod(), servicesInjector); this.servicePojo = servicePojo; this.serviceAction = serviceAction; this.contributeeType = contributeeType; this.contributeeParam = contributeeParam; // copy over facets from contributed to own. FacetUtil.copyFacets(serviceAction.getFacetedMethod(), facetHolder); // calculate the identifier final Identifier contributorIdentifier = serviceAction.getFacetedMethod().getIdentifier(); final String memberName = contributorIdentifier.getMemberName(); List<String> memberParameterNames = contributorIdentifier.getMemberParameterNames(); identifier = Identifier.actionIdentifier(getOnType().getCorrespondingClass().getName(), memberName, memberParameterNames); } @Override public ObjectSpecification getOnType() { return contributeeType; } public int getParameterCount() { return serviceAction.getParameterCount() - 1; } public int getContributeeParam() { return contributeeParam; } @Override public boolean isContributedBy(final ObjectAction serviceAction) { return serviceAction == this.serviceAction; } @Override public int getContributeeParamPosition() { return contributeeParam; } @Override protected synchronized List<ObjectActionParameter> determineParameters() { if (parameters != null) { // because possible race condition (caller isn't synchronized) return parameters; } final List<ObjectActionParameter> serviceParameters = serviceAction.getParameters(); final List<FacetedMethodParameter> paramPeers = getFacetedMethod().getParameters(); final List<ObjectActionParameter> contributeeParameters = Lists.newArrayList(); int contributeeParamNum = 0; for (int serviceParamNum = 0; serviceParamNum < serviceParameters.size(); serviceParamNum++ ) { if(serviceParamNum == contributeeParam) { // skip so is omitted from the Contributed action continue; } final ObjectActionParameterAbstract serviceParameter = (ObjectActionParameterAbstract) serviceParameters.get(serviceParamNum); final ObjectActionParameterContributee contributedParam = serviceParameter.getPeer().getFeatureType() == FeatureType.ACTION_PARAMETER_SCALAR ? new OneToOneActionParameterContributee( servicePojo, serviceParameter, contributeeParamNum, this) : new OneToManyActionParameterContributee( servicePojo, serviceParameter, contributeeParamNum, this); contributeeParameters.add(contributedParam); contributeeParamNum++; } return contributeeParameters; } @Override public Consent isVisible( final ObjectAdapter contributee, final InteractionInitiatedBy interactionInitiatedBy, Where where) { final VisibilityContext<?> ic = serviceAction.createVisibleInteractionContext(getServiceAdapter(), interactionInitiatedBy, where); ic.putContributee(this.contributeeParam, contributee); return InteractionUtils.isVisibleResult(this, ic).createConsent(); } @Override public Consent isUsable( final ObjectAdapter contributee, final InteractionInitiatedBy interactionInitiatedBy, final Where where) { final UsabilityContext<?> ic = serviceAction.createUsableInteractionContext(getServiceAdapter(), interactionInitiatedBy, where); ic.putContributee(this.contributeeParam, contributee); return InteractionUtils.isUsableResult(this, ic).createConsent(); } @Override public ObjectAdapter[] getDefaults(final ObjectAdapter target) { final ObjectAdapter[] contributorDefaults = serviceAction.getDefaults(getServiceAdapter()); return removeElementFromArray(contributorDefaults, contributeeParam, new ObjectAdapter[]{}); } @Override public ObjectAdapter[][] getChoices( final ObjectAdapter target, final InteractionInitiatedBy interactionInitiatedBy) { final ObjectAdapter[][] serviceChoices = serviceAction.getChoices(getServiceAdapter(), interactionInitiatedBy); return removeElementFromArray(serviceChoices, contributeeParam, new ObjectAdapter[][]{}); } @Override public Consent isProposedArgumentSetValid( final ObjectAdapter contributee, final ObjectAdapter[] proposedArguments, final InteractionInitiatedBy interactionInitiatedBy) { final ObjectAdapter[] serviceArguments = argsPlusContributee(contributee, proposedArguments); return serviceAction.isProposedArgumentSetValid(getServiceAdapter(), serviceArguments, interactionInitiatedBy); } @Override public Consent isEachIndividualArgumentValid( final ObjectAdapter contributee, final ObjectAdapter[] proposedArguments, final InteractionInitiatedBy interactionInitiatedBy) { final ObjectAdapter[] serviceArguments = argsPlusContributee(contributee, proposedArguments); return serviceAction.isEachIndividualArgumentValid(getServiceAdapter(), serviceArguments, interactionInitiatedBy); } @Override public Consent isArgumentSetValid( final ObjectAdapter contributee, final ObjectAdapter[] proposedArguments, final InteractionInitiatedBy interactionInitiatedBy) { final ObjectAdapter[] serviceArguments = argsPlusContributee(contributee, proposedArguments); return serviceAction.isArgumentSetValid(getServiceAdapter(), serviceArguments, interactionInitiatedBy); } @Override public ObjectAdapter execute( final ObjectAdapter targetAdapter, final ObjectAdapter mixedInAdapter, final ObjectAdapter[] argumentAdapters, final InteractionInitiatedBy interactionInitiatedBy) { setupCommand(targetAdapter, argumentAdapters); final ObjectAdapter[] serviceArguments = argsPlusContributee(targetAdapter, argumentAdapters); return serviceAction.executeInternal( getServiceAdapter(), mixedInAdapter, serviceArguments, interactionInitiatedBy); } private ObjectAdapter[] argsPlusContributee(final ObjectAdapter contributee, final ObjectAdapter[] arguments) { return addElementToArray(arguments, contributeeParam, contributee, new ObjectAdapter[]{}); } // ////////////////////////////////////// // FacetHolder // ////////////////////////////////////// @Override public Class<? extends Facet>[] getFacetTypes() { return facetHolder.getFacetTypes(); } @Override public <T extends Facet> T getFacet(Class<T> cls) { return facetHolder.getFacet(cls); } @Override public boolean containsFacet(Class<? extends Facet> facetType) { return facetHolder.containsFacet(facetType); } @Override public boolean containsDoOpFacet(java.lang.Class<? extends Facet> facetType) { return facetHolder.containsDoOpFacet(facetType); } @Override public List<Facet> getFacets(Filter<Facet> filter) { return facetHolder.getFacets(filter); } @Override public void addFacet(Facet facet) { facetHolder.addFacet(facet); } @Override public void addFacet(MultiTypedFacet facet) { facetHolder.addFacet(facet); } @Override public void removeFacet(Facet facet) { facetHolder.removeFacet(facet); } @Override public void removeFacet(Class<? extends Facet> facetType) { facetHolder.removeFacet(facetType); } // ////////////////////////////////////// /* (non-Javadoc) * @see org.apache.isis.core.metamodel.specloader.specimpl.ObjectMemberAbstract#getIdentifier() */ @Override public Identifier getIdentifier() { return identifier; } // ////////////////////////////////////// static <T> T[] addElementToArray(T[] array, final int n, final T element, final T[] type) { List<T> list = Lists.newArrayList(Arrays.asList(array)); list.add(n, element); return list.toArray(type); } static <T> T[] removeElementFromArray(T[] array, int n, T[] t) { List<T> list = Lists.newArrayList(Arrays.asList(array)); list.remove(n); return list.toArray(t); } public ObjectAdapter getServiceAdapter() { return getPersistenceSessionService().adapterFor(servicePojo); } @Override public ObjectSpecification getServiceContributedBy() { return getServiceAdapter().getSpecification(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.lib.output; import java.io.IOException; import java.text.NumberFormat; import com.google.common.base.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.TaskInputOutputContext; import org.apache.hadoop.mapreduce.security.TokenCache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** A base class for {@link OutputFormat}s that read from {@link FileSystem}s.*/ @InterfaceAudience.Public @InterfaceStability.Stable public abstract class FileOutputFormat<K, V> extends OutputFormat<K, V> { private static final Logger LOG = LoggerFactory.getLogger(FileOutputFormat.class); /** Construct output file names so that, when an output directory listing is * sorted lexicographically, positions correspond to output partitions.*/ private static final NumberFormat NUMBER_FORMAT = NumberFormat.getInstance(); protected static final String BASE_OUTPUT_NAME = "mapreduce.output.basename"; protected static final String PART = "part"; static { NUMBER_FORMAT.setMinimumIntegerDigits(5); NUMBER_FORMAT.setGroupingUsed(false); } private PathOutputCommitter committer = null; /** Configuration option: should output be compressed? {@value}. */ public static final String COMPRESS = "mapreduce.output.fileoutputformat.compress"; /** If compression is enabled, name of codec: {@value}. */ public static final String COMPRESS_CODEC = "mapreduce.output.fileoutputformat.compress.codec"; /** * Type of compression {@value}: NONE, RECORD, BLOCK. * Generally only used in {@code SequenceFileOutputFormat}. */ public static final String COMPRESS_TYPE = "mapreduce.output.fileoutputformat.compress.type"; /** Destination directory of work: {@value}. */ public static final String OUTDIR = "mapreduce.output.fileoutputformat.outputdir"; @Deprecated public enum Counter { BYTES_WRITTEN } /** * Set whether the output of the job is compressed. * @param job the job to modify * @param compress should the output of the job be compressed? */ public static void setCompressOutput(Job job, boolean compress) { job.getConfiguration().setBoolean(FileOutputFormat.COMPRESS, compress); } /** * Is the job output compressed? * @param job the Job to look in * @return <code>true</code> if the job output should be compressed, * <code>false</code> otherwise */ public static boolean getCompressOutput(JobContext job) { return job.getConfiguration().getBoolean( FileOutputFormat.COMPRESS, false); } /** * Set the {@link CompressionCodec} to be used to compress job outputs. * @param job the job to modify * @param codecClass the {@link CompressionCodec} to be used to * compress the job outputs */ public static void setOutputCompressorClass(Job job, Class<? extends CompressionCodec> codecClass) { setCompressOutput(job, true); job.getConfiguration().setClass(FileOutputFormat.COMPRESS_CODEC, codecClass, CompressionCodec.class); } /** * Get the {@link CompressionCodec} for compressing the job outputs. * @param job the {@link Job} to look in * @param defaultValue the {@link CompressionCodec} to return if not set * @return the {@link CompressionCodec} to be used to compress the * job outputs * @throws IllegalArgumentException if the class was specified, but not found */ public static Class<? extends CompressionCodec> getOutputCompressorClass(JobContext job, Class<? extends CompressionCodec> defaultValue) { Class<? extends CompressionCodec> codecClass = defaultValue; Configuration conf = job.getConfiguration(); String name = conf.get(FileOutputFormat.COMPRESS_CODEC); if (name != null) { try { codecClass = conf.getClassByName(name).asSubclass(CompressionCodec.class); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Compression codec " + name + " was not found.", e); } } return codecClass; } public abstract RecordWriter<K, V> getRecordWriter(TaskAttemptContext job ) throws IOException, InterruptedException; public void checkOutputSpecs(JobContext job ) throws FileAlreadyExistsException, IOException{ // Ensure that the output directory is set and not already there Path outDir = getOutputPath(job); if (outDir == null) { throw new InvalidJobConfException("Output directory not set."); } // get delegation token for outDir's file system TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { outDir }, job.getConfiguration()); if (outDir.getFileSystem(job.getConfiguration()).exists(outDir)) { throw new FileAlreadyExistsException("Output directory " + outDir + " already exists"); } } /** * Set the {@link Path} of the output directory for the map-reduce job. * * @param job The job to modify * @param outputDir the {@link Path} of the output directory for * the map-reduce job. */ public static void setOutputPath(Job job, Path outputDir) { try { outputDir = outputDir.getFileSystem(job.getConfiguration()).makeQualified( outputDir); } catch (IOException e) { // Throw the IOException as a RuntimeException to be compatible with MR1 throw new RuntimeException(e); } job.getConfiguration().set(FileOutputFormat.OUTDIR, outputDir.toString()); } /** * Get the {@link Path} to the output directory for the map-reduce job. * * @return the {@link Path} to the output directory for the map-reduce job. * @see FileOutputFormat#getWorkOutputPath(TaskInputOutputContext) */ public static Path getOutputPath(JobContext job) { String name = job.getConfiguration().get(FileOutputFormat.OUTDIR); return name == null ? null: new Path(name); } /** * Get the {@link Path} to the task's temporary output directory * for the map-reduce job * * <b id="SideEffectFiles">Tasks' Side-Effect Files</b> * * <p>Some applications need to create/write-to side-files, which differ from * the actual job-outputs. * * <p>In such cases there could be issues with 2 instances of the same TIP * (running simultaneously e.g. speculative tasks) trying to open/write-to the * same file (path) on HDFS. Hence the application-writer will have to pick * unique names per task-attempt (e.g. using the attemptid, say * <tt>attempt_200709221812_0001_m_000000_0</tt>), not just per TIP.</p> * * <p>To get around this the Map-Reduce framework helps the application-writer * out by maintaining a special * <tt>${mapreduce.output.fileoutputformat.outputdir}/_temporary/_${taskid}</tt> * sub-directory for each task-attempt on HDFS where the output of the * task-attempt goes. On successful completion of the task-attempt the files * in the <tt>${mapreduce.output.fileoutputformat.outputdir}/_temporary/_${taskid}</tt> (only) * are <i>promoted</i> to <tt>${mapreduce.output.fileoutputformat.outputdir}</tt>. Of course, the * framework discards the sub-directory of unsuccessful task-attempts. This * is completely transparent to the application.</p> * * <p>The application-writer can take advantage of this by creating any * side-files required in a work directory during execution * of his task i.e. via * {@link #getWorkOutputPath(TaskInputOutputContext)}, and * the framework will move them out similarly - thus she doesn't have to pick * unique paths per task-attempt.</p> * * <p>The entire discussion holds true for maps of jobs with * reducer=NONE (i.e. 0 reduces) since output of the map, in that case, * goes directly to HDFS.</p> * * @return the {@link Path} to the task's temporary output directory * for the map-reduce job. */ public static Path getWorkOutputPath(TaskInputOutputContext<?,?,?,?> context ) throws IOException, InterruptedException { PathOutputCommitter committer = (PathOutputCommitter) context.getOutputCommitter(); Path workPath = committer.getWorkPath(); LOG.debug("Work path is {}", workPath); return workPath; } /** * Helper function to generate a {@link Path} for a file that is unique for * the task within the job output directory. * * <p>The path can be used to create custom files from within the map and * reduce tasks. The path name will be unique for each task. The path parent * will be the job output directory.</p>ls * * <p>This method uses the {@link #getUniqueFile} method to make the file name * unique for the task.</p> * * @param context the context for the task. * @param name the name for the file. * @param extension the extension for the file * @return a unique path accross all tasks of the job. */ public static Path getPathForWorkFile(TaskInputOutputContext<?,?,?,?> context, String name, String extension ) throws IOException, InterruptedException { return new Path(getWorkOutputPath(context), getUniqueFile(context, name, extension)); } /** * Generate a unique filename, based on the task id, name, and extension * @param context the task that is calling this * @param name the base filename * @param extension the filename extension * @return a string like $name-[mrsct]-$id$extension */ public synchronized static String getUniqueFile(TaskAttemptContext context, String name, String extension) { TaskID taskId = context.getTaskAttemptID().getTaskID(); int partition = taskId.getId(); StringBuilder result = new StringBuilder(); result.append(name); result.append('-'); result.append( TaskID.getRepresentingCharacter(taskId.getTaskType())); result.append('-'); result.append(NUMBER_FORMAT.format(partition)); result.append(extension); return result.toString(); } /** * Get the default path and filename for the output format. * @param context the task context * @param extension an extension to add to the filename * @return a full path $output/_temporary/$taskid/part-[mr]-$id * @throws IOException */ public Path getDefaultWorkFile(TaskAttemptContext context, String extension) throws IOException{ OutputCommitter c = getOutputCommitter(context); Preconditions.checkState(c instanceof PathOutputCommitter, "Committer %s is not a PathOutputCommitter", c); Path workPath = ((PathOutputCommitter) c).getWorkPath(); Preconditions.checkNotNull(workPath, "Null workPath returned by committer %s", c); Path workFile = new Path(workPath, getUniqueFile(context, getOutputName(context), extension)); LOG.debug("Work file for {} extension '{}' is {}", context, extension, workFile); return workFile; } /** * Get the base output name for the output file. */ protected static String getOutputName(JobContext job) { return job.getConfiguration().get(BASE_OUTPUT_NAME, PART); } /** * Set the base output name for output file to be created. */ protected static void setOutputName(JobContext job, String name) { job.getConfiguration().set(BASE_OUTPUT_NAME, name); } public synchronized OutputCommitter getOutputCommitter(TaskAttemptContext context ) throws IOException { if (committer == null) { Path output = getOutputPath(context); committer = new FileOutputCommitter(output, context); } return committer; } }
package org.apache.hawq.pxf.plugins.ignite; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.hawq.pxf.api.OneRow; import org.apache.hawq.pxf.api.ReadAccessor; import org.apache.hawq.pxf.api.WriteAccessor; import org.apache.hawq.pxf.api.UserDataException; import org.apache.hawq.pxf.api.utilities.ColumnDescriptor; import org.apache.hawq.pxf.api.utilities.InputData; import org.apache.hawq.pxf.plugins.ignite.IgnitePlugin; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import java.util.LinkedList; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.io.InputStreamReader; import java.io.BufferedReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLEncoder; import java.net.MalformedURLException; import java.net.ProtocolException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.google.gson.JsonParser; import com.google.gson.JsonElement; import com.google.gson.JsonArray; /** * PXF-Ignite accessor class */ public class IgniteAccessor extends IgnitePlugin implements ReadAccessor, WriteAccessor { /** * Class constructor */ public IgniteAccessor(InputData inputData) throws UserDataException { super(inputData); } /** * openForRead() implementation */ @Override public boolean openForRead() throws Exception { if (bufferSize == 0) { bufferSize = 1; } StringBuilder sb = new StringBuilder(); // Insert a list of fields to be selected ArrayList<ColumnDescriptor> columns = inputData.getTupleDescription(); if (columns == null) { throw new UserDataException("Tuple description must be present."); } sb.append("SELECT "); for (int i = 0; i < columns.size(); i++) { ColumnDescriptor column = columns.get(i); if (i > 0) { sb.append(", "); } sb.append(column.columnName()); } // Insert the name of the table to select values from sb.append(" FROM "); String tableName = inputData.getDataSource(); if (tableName == null) { throw new UserDataException("Table name must be set as DataSource."); } sb.append(tableName); // Insert query constraints // Note: Filter constants may be passed to Ignite separately from the WHERE expression, primarily for the safety of the SQL queries. However, at the moment they are passed in the query. ArrayList<String> filterConstants = null; if (inputData.hasFilter()) { WhereSQLBuilder filterBuilder = new WhereSQLBuilder(inputData); String whereSql = filterBuilder.buildWhereSQL(); if (whereSql != null) { sb.append(" WHERE ").append(whereSql); } } // Insert partition constraints IgnitePartitionFragmenter.buildFragmenterSql(inputData, sb); // Format URL urlReadStart = buildQueryFldexe(sb.toString(), filterConstants); // Send the first REST request that opens the connection JsonElement response = sendRestRequest(urlReadStart); // Build 'urlReadFetch' and 'urlReadClose' isLastReadFinished = response.getAsJsonObject().get("last").getAsBoolean(); urlReadFetch = buildQueryFetch(response.getAsJsonObject().get("queryId").getAsInt()); urlReadClose = buildQueryCls(response.getAsJsonObject().get("queryId").getAsInt()); if (LOG.isDebugEnabled()) { LOG.debug("Ignite read request. URL: '" + urlReadStart + "'"); } return true; } /** * readNextObject() implementation */ @Override public OneRow readNextObject() throws Exception { if (urlReadFetch == null) { LOG.error("readNextObject(): urlReadFetch is null. This means the Ignite qryfldexe query was not executed properly"); throw new ProtocolException("readNextObject(): urlReadFetch is null. This means the Ignite qryfldexe query was not executed properly"); } if (bufferRead.isEmpty()) { // Refill buffer if (isLastReadFinished) { if (LOG.isDebugEnabled()) { LOG.debug("readNextObject(): All the data received from Ignite"); } return null; } JsonElement response = sendRestRequest(urlReadFetch); isLastReadFinished = response.getAsJsonObject().get("last").getAsBoolean(); // Parse 'items' Iterator<JsonElement> itemsIterator = response.getAsJsonObject().get("items").getAsJsonArray().iterator(); while (itemsIterator.hasNext()) { if (!bufferRead.add(itemsIterator.next().getAsJsonArray())) { throw new IOException("readNextObject(): not enough memory in 'bufferRead'"); } } // Check again in case "response" contains no elements if (bufferRead.isEmpty()) { if (LOG.isDebugEnabled()) { LOG.debug("readNextObject(): Buffer refill failed"); LOG.debug("readNextObject(): All the data received from Ignite"); } return null; } } return new OneRow(bufferRead.pollFirst()); } /** * closeForRead() implementation */ @Override public void closeForRead() { if (urlReadClose != null) { try { sendRestRequest(urlReadClose); } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.debug("closeForRead() Exception: " + e.getClass().getSimpleName()); } } } isLastReadFinished = false; if (LOG.isDebugEnabled()) { LOG.debug("Ignite read request finished. URL: '" + urlReadClose + "'"); } } /** * openForWrite() implementation. * No queries are sent to Ignite by this procedure, so if there are some problems (for example, with connection), they will be revealed only during the execution of 'writeNextObject()' */ @Override public boolean openForWrite() throws UserDataException { // This is a temporary solution. At the moment there is no other way (except for the usage of user-defined parameters) to get the correct name of Ignite table: GPDB inserts extra data into the address, as required by Hadoop. // Note that if no extra data is present, the 'definedSource' will be left unchanged String definedSource = inputData.getDataSource(); Matcher matcher = writeAddressPattern.matcher(definedSource); if (matcher.find()) { inputData.setDataSource(matcher.group(1)); } StringBuilder sb = new StringBuilder(); sb.append("INSERT INTO "); // Insert the table name String tableName = inputData.getDataSource(); if (tableName == null) { throw new UserDataException("Table name must be set as DataSource."); } sb.append(tableName); // Insert the column names sb.append("("); ArrayList<ColumnDescriptor> columns = inputData.getTupleDescription(); if (columns == null) { throw new UserDataException("Tuple description must be present."); } String fieldDivisor = ""; for (int i = 0; i < columns.size(); i++) { sb.append(fieldDivisor); fieldDivisor = ", "; sb.append(columns.get(i).columnName()); } sb.append(")"); sb.append(" VALUES "); queryWrite = sb.toString(); return true; } /** * writeNextObject() implementation */ @Override public boolean writeNextObject(OneRow currentRow) throws Exception { boolean currentRowInBuffer = bufferWrite.add(currentRow); if (!isWriteActive) { if (!currentRowInBuffer) { LOG.error("writeNextObject(): Failed (not enough memory in 'bufferWrite')"); throw new IOException("writeNextObject(): not enough memory in 'bufferWrite'"); } LOG.info("Ignite write request. Query: '" + queryWrite + "'"); sendInsertRestRequest(queryWrite); isWriteActive = true; return true; } if ((bufferWrite.size() >= bufferSize) || (!currentRowInBuffer)) { sendInsertRestRequest(queryWrite); } if (!currentRowInBuffer) { if (!bufferWrite.add(currentRow)) { LOG.error("writeNextObject(): Failed (not enough memory in 'bufferSend')"); throw new IOException("writeNextObject(): not enough memory in 'bufferSend'"); } } return true; } /** * closeForWrite() implementation */ @Override public void closeForWrite() throws Exception { isWriteActive = false; if (!bufferWrite.isEmpty()) { sendInsertRestRequest(queryWrite); } if (isWriteActive) { // At this point, the request must have finished successfully LOG.info("Ignite write request finished successfully. Query: '" + queryWrite + "'"); } } private static final Log LOG = LogFactory.getLog(IgniteAccessor.class); // A pattern to cut extra parameters from 'InputData.dataSource' when write operation is performed. See {@link openForWrite()} for the details private static final Pattern writeAddressPattern = Pattern.compile("/(.*)/[0-9]*-[0-9]*_[0-9]*"); // Prepared URLs to send to Ignite when reading data private String urlReadStart = null; private String urlReadFetch = null; private String urlReadClose = null; // Set to true when Ignite reported all the data for the SELECT query was retreived private boolean isLastReadFinished = false; // A buffer to store the SELECT query results (without Ignite metadata) private LinkedList<JsonArray> bufferRead = new LinkedList<JsonArray>(); // A template for the INSERT private String queryWrite = null; // Set to true when the INSERT operation is in progress private boolean isWriteActive = false; // A buffer to store prepared values for the INSERT query private LinkedList<OneRow> bufferWrite = new LinkedList<OneRow>(); /** * Build HTTP GET query for Ignite REST API with command 'qryfldexe' * * @param querySql SQL query * @param filterConstants A list of Constraints' constants. Must be null in this version. * * @return Prepared HTTP query. The query will be properly encoded with {@link java.net.URLEncoder} * * @throws UnsupportedEncodingException from {@link java.net.URLEncoder.encode()} */ private String buildQueryFldexe(String querySql, List<String> filterConstants) throws UnsupportedEncodingException { StringBuilder sb = new StringBuilder(); sb.append("http://"); sb.append(igniteHost); sb.append("/ignite"); sb.append("?"); sb.append("cmd=qryfldexe"); sb.append("&"); sb.append("pageSize=0"); sb.append("&"); if (cacheName != null) { sb.append("cacheName="); // Note that Ignite supports only "good" cache names (those that will be left unchanged by the URLEncoder.encode()) sb.append(URLEncoder.encode(cacheName, "UTF-8")); sb.append("&"); } /* 'filterConstants' must always be null in the current version. This code allows to pass filters' constants separately from the filters' expressions. This feature is supported by Ignite database; however, it is not implemented in PXF Ignite plugin at the moment. To implement this, changes should be made in {@link WhereSQLBuilder} (form SQL query without filter constants) and {@link IgnitePartitionFragmenter} (form partition constraints the similar way). */ int counter = 1; if (filterConstants != null) { for (String constant : filterConstants) { sb.append("arg"); sb.append(counter); sb.append("="); sb.append(URLEncoder.encode(constant, "UTF-8")); sb.append("&"); counter += 1; } } sb.append("qry="); sb.append(URLEncoder.encode(querySql, "UTF-8")); return sb.toString(); } /** * Build HTTP GET query for Ignite REST API with command 'qryfetch' * This query is used to retrieve data after the 'qryfldexe' command started * * @param queryId ID of the query assigned by Ignite when the query started * * @return Prepared HTTP query */ private String buildQueryFetch(int queryId) { StringBuilder sb = new StringBuilder(); sb.append("http://"); sb.append(igniteHost); sb.append("/ignite"); sb.append("?"); sb.append("cmd=qryfetch"); sb.append("&"); sb.append("pageSize="); sb.append(bufferSize); sb.append("&"); sb.append("qryId="); sb.append(queryId); return sb.toString(); } /** * Build HTTP GET query for Ignite REST API with command 'qrycls' * This query is used to close query resources on Ignite side * * @param queryId ID of the query assigned by Ignite when the query started * * @return Prepared HTTP query */ private String buildQueryCls(int queryId) { StringBuilder sb = new StringBuilder(); sb.append("http://"); sb.append(igniteHost); sb.append("/ignite"); sb.append("?"); sb.append("cmd=qrycls"); sb.append("&"); sb.append("qryId="); sb.append(queryId); return sb.toString(); } /** * Send a REST request to the Ignite server * * @param query A prepared and properly encoded HTTP GET request * * @return "response" field from the received JSON object * (See Ignite REST API documentation for details) * * @throws ProtocolException if Ignite reports error in it's JSON response * @throws MalformedURLException if URL is malformed * @throws IOException in case of connection failure */ private JsonElement sendRestRequest(String query) throws ProtocolException, MalformedURLException, IOException { // Create URL object. This operation may throw 'MalformedURLException' URL url = new URL(query); // Connect to the Ignite server, send query and get raw response BufferedReader reader = null; String responseRaw = null; try { StringBuilder sb = new StringBuilder(); reader = new BufferedReader(new InputStreamReader(url.openStream())); String responseLine; while ((responseLine = reader.readLine()) != null) { sb.append(responseLine); } responseRaw = sb.toString(); if (LOG.isDebugEnabled()) { LOG.debug("sendRestRequest(): URL: '" + query + "'; Result: '" + responseRaw + "'"); } } catch (Exception e) { LOG.error("sendRestRequest(): Failed (connection failure). URL is '" + query + "'"); throw e; } finally { if (reader != null) { reader.close(); } // if 'reader' is null, an exception must have been thrown } // Parse raw Ignite server response JsonElement response = null; String error = null; int successStatus; try { response = new JsonParser().parse(responseRaw); if (!response.getAsJsonObject().get("error").isJsonNull()) { error = response.getAsJsonObject().get("error").getAsString(); } successStatus = response.getAsJsonObject().get("successStatus").getAsInt(); } catch (Exception e) { LOG.error("sendRestRequest(): Failed (JSON parsing failure). URL is '" + query + "'"); throw e; } // Check errors reported by Ignite if ((error != null) || (successStatus != 0)) { LOG.error("sendRestRequest(): Failed (failure on Ignite side: '" + error + "'). URL is '" + query + "'"); throw new ProtocolException("Ignite failure: status " + successStatus + ", '" + error + "'"); } // Return response without metadata try { return response.getAsJsonObject().get("response"); } catch (Exception e) { LOG.error("sendRestRequest(): Failed (JSON parsing failure). URL is '" + query + "'"); throw e; } } /** * Send an INSERT REST request to the Ignite server. * * Note that * * The {@link sendRestRequest()} is used to handle network operations, thus all its exceptions may be thrown. They are: * @throws ProtocolException if Ignite reports error in it's JSON response * @throws MalformedURLException if URL is malformed * @throws IOException in case of connection failure */ private void sendInsertRestRequest(String query) throws ProtocolException, MalformedURLException, IOException { if (query == null) { LOG.error("sendInsertRestRequest(): Failed (malformed URL). URL is null"); throw new MalformedURLException("sendInsertRestRequest(): query is null"); } if (bufferWrite.isEmpty()) { return; } StringBuilder sb = new StringBuilder(query); String fieldDivisor = ""; for (OneRow row : bufferWrite) { sb.append(fieldDivisor); fieldDivisor = ", "; sb.append((String)row.getData()); } bufferWrite.clear(); // Send REST request 'qryfldexe' to Ignite JsonElement response = sendRestRequest(buildQueryFldexe(sb.toString(), null)); // Close the request immediately sendRestRequest(buildQueryCls(response.getAsJsonObject().get("queryId").getAsInt())); } }
package inpro.irmrsc.simplepcfg; import inpro.irmrsc.simplepcfg.Production; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.io.*; import java.net.URL; import org.jdom.*; import org.jdom.input.*; // TODO: // - it is possible to load a grammar into an already existing grammar. // i am not sure whether this is actually useful or rather harmful. // - rule probabilities can still be arbitrary. at some time this should be // consistently and mathematically grounded? .. but why should the grammar check? // - compute leftcorner set to reduce search space /** * A simple probabilistic context-free grammar. * @author Andreas Peldszus */ public class Grammar { private Set<Symbol> mTerminals; private Set<Symbol> mNonTerminals; private Symbol mStart; private Symbol mEnd; private Map<String, Production> mProductions; private Map<Symbol, ArrayList<String>> mExpandsRelation; private Set<Symbol> mEliminable; public Grammar() { mProductions = new HashMap<String, Production>(); this.update(); } /** * builds a new production and adds it to the grammar. Make sure to {@link #update()} before using an altered grammar. */ public void addProduction(String id, Symbol LHS, List<Symbol> RHS, double Prob) { Production p = new Production(id, LHS, RHS, Prob); this.addProduction(id, p); } /** * adds a new production to the grammar. Make sure to {@link #update()} before using an altered grammar. */ public void addProduction(String id, Production p) { mProductions.put(id, p); //TODO: warn if id != p.id //this.update(); } public Symbol getStart() { return mStart; } public void setStart(Symbol sym) { if (sym != null) { mStart = sym; this.update(); } } public Symbol getEnd() { return mEnd; } public boolean isTerminalSymbol(Symbol sym) { return mTerminals.contains(sym); } public boolean isEliminable(Symbol sym) { if (sym.equals(mEnd)) { return true; } return mEliminable.contains(sym); } /** * @return a list of IDs of {@link Production}s expanding the specified symbol */ public List<String> getProductionsExpandingSymbol(Symbol sym) { return mExpandsRelation.get(sym); } public Production getProduction(String id) { return mProductions.get(id); } /** * recomputes the symbols sets (terminal, nonterminal, and eliminable symbols) and * the expand-relation. This is necessary after the grammar was altered, as e.g. by * adding a new production. Using an altered but not yet updated grammar will result * in unexpected behaviour. */ public void update() { TreeSet<Symbol> symbols = new TreeSet<Symbol>(); mTerminals = new TreeSet<Symbol>(); mNonTerminals = new TreeSet<Symbol>(); mEliminable = new TreeSet<Symbol>(); mExpandsRelation = new HashMap<Symbol, ArrayList<String>>(); // add start symbol if (mStart != null) { mNonTerminals.add(mStart); } // add end symbol if (mEnd != null) { mTerminals.add(mEnd); mEliminable.add(mEnd); } // for each production, add LHSs to NonTerminals and RHSs to symbol for (Map.Entry<String,Production> e : mProductions.entrySet()) { String id = e.getKey(); Production p = e.getValue(); Symbol LHS = p.getLHS(); if (mExpandsRelation.keySet().contains(LHS)) { mExpandsRelation.get(LHS).add(id); } else { ArrayList<String> l = new ArrayList<String>(); l.add(id); mExpandsRelation.put(LHS, l); } mNonTerminals.add(LHS); if (p.getRHS().isEmpty()) { mEliminable.add(LHS); } else { for (Symbol sym : p.getRHS()) symbols.add(sym); } } // compute terminals for (Symbol sym : symbols) { if (! (mNonTerminals.contains(sym))) { mTerminals.add(sym); } } } public void info() { // NO_UCD (unused code): debug method that might be useful in the future System.out.println("Start: "+mStart); System.out.println("Terminals: "+mTerminals); System.out.println("NonTerminals: "+mNonTerminals); System.out.println("ExpandRel: "+mExpandsRelation); System.out.println("Eliminable: "+mEliminable); // print productions for (Production p : mProductions.values()) { System.out.println(p); } } private boolean hasProductionWithID(String id) { return mProductions.keySet().contains(id); } /** * loads a grammar from a xml specification provided at the url * @param url the specified url of the xml to load */ @SuppressWarnings("unchecked") public void loadXML(URL url) { String filename = url.toString(); try { SAXBuilder builder = new SAXBuilder(); Document doc = builder.build(url); Element root = doc.getRootElement(); if (root.getName() == "simplecfggrammar") { mStart = new Symbol(root.getAttributeValue("start")); mEnd = new Symbol(root.getAttributeValue("end")); // read rules for (Object child : root.getChildren()) { String id = ((Element)child).getAttributeValue("id"); if (this.hasProductionWithID(id)) { System.out.println("Grammar already has a production with id '"+id+"'. Skipping."); continue; } double prob = Double.parseDouble(((Element)child).getAttributeValue("prob")); List<Element> rulesyms = new ArrayList<Element>(((Element) child).getChild("syntax").getChildren()); boolean firstsym = true; Symbol lhs = null; ArrayList<Symbol> rhs = new ArrayList<Symbol>(); for (Element sym : rulesyms) { // the first symbol is the lhs, remainings are on the rhs if (firstsym) { lhs = new Symbol(sym.getTextTrim()); firstsym = false; } else { rhs.add(new Symbol(sym.getTextTrim())); } } this.addProduction(id, lhs, rhs, prob); } } else { System.out.println("Grammar file '"+filename+"' does not specify a simplecfggrammar."); } } catch (IOException e) { System.out.println("Grammar file '"+filename+"' was not found."); } catch (JDOMException e) { System.out.println("Could not prase grammar file '"+filename+"':\n"+e); } this.update(); } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.collect.timeseries; import static org.testng.Assert.assertEquals; import java.time.LocalDate; import java.util.Arrays; import java.util.Collection; import java.util.OptionalDouble; import org.testng.annotations.Test; import com.google.common.primitives.Doubles; /** * Test LocalDateDoubleTimeSeriesBuilder. */ @Test public class LocalDateDoubleTimeSeriesBuilderTest { @Test public void test_buildEmptySeries() { assertEquals(LocalDateDoubleTimeSeries.builder().build(), LocalDateDoubleTimeSeries.empty()); } //------------------------------------------------------------------------- public void test_get() { LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder() .put(date(2014, 1, 1), 14) .put(date(2012, 1, 1), 12) .put(date(2013, 1, 1), 13); assertEquals(test.get(date(2012, 1, 1)), OptionalDouble.of(12d)); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(13d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(14d)); assertEquals(test.get(date(2015, 1, 1)), OptionalDouble.empty()); } //------------------------------------------------------------------------- public void test_merge_dateValue() { LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.put(date(2013, 1, 1), 2d); test.merge(date(2013, 1, 1), 3d, Double::sum); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(5d)); } public void test_merge_point() { LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.put(date(2013, 1, 1), 2d); test.merge(LocalDateDoublePoint.of(date(2013, 1, 1), 3d), Double::sum); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(5d)); } //------------------------------------------------------------------------- public void test_putAll_collections() { Collection<LocalDate> dates = Arrays.asList(date(2013, 1, 1), date(2014, 1, 1)); Collection<Double> values = Doubles.asList(2d, 3d); LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.putAll(dates, values); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(2d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(3d)); } public void test_putAll_collection_array() { Collection<LocalDate> dates = Arrays.asList(date(2013, 1, 1), date(2014, 1, 1)); double[] values = new double[]{2d, 3d}; LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.putAll(dates, values); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(2d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(3d)); } @Test(expectedExceptions = IllegalArgumentException.class) public void test_putAll_collectionsMismatch() { LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.putAll(Arrays.asList(date(2014, 1, 1)), Doubles.asList(2d, 3d)); } //------------------------------------------------------------------------- public void test_putAll_stream() { Collection<LocalDate> dates = Arrays.asList(date(2013, 1, 1), date(2014, 1, 1)); Collection<Double> values = Doubles.asList(2d, 3d); LocalDateDoubleTimeSeries base = LocalDateDoubleTimeSeries.builder().putAll(dates, values).build(); LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.put(date(2012, 1, 1), 0d); test.put(date(2013, 1, 1), 1d); test.putAll(base.stream()); assertEquals(test.get(date(2012, 1, 1)), OptionalDouble.of(0d)); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(2d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(3d)); } public void test_putAll_toBuilder() { Collection<LocalDate> dates = Arrays.asList(date(2013, 1, 1), date(2014, 1, 1)); Collection<Double> values = Doubles.asList(2d, 3d); LocalDateDoubleTimeSeries base = LocalDateDoubleTimeSeries.builder().putAll(dates, values).build(); LocalDateDoubleTimeSeriesBuilder test = LocalDateDoubleTimeSeries.builder(); test.put(date(2012, 1, 1), 0d); test.put(date(2013, 1, 1), 1d); test.putAll(base.toBuilder()); assertEquals(test.get(date(2012, 1, 1)), OptionalDouble.of(0d)); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(2d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(3d)); } //------------------------------------------------------------------------- public void test_seriesGetsSorted() { LocalDateDoubleTimeSeries test = LocalDateDoubleTimeSeries.builder() .put(date(2014, 1, 1), 14) .put(date(2012, 1, 1), 12) .put(date(2013, 1, 1), 13) .build(); assertEquals(test.size(), 3); assertEquals(test.getEarliestDate(), date(2012, 1, 1)); assertEquals(test.getLatestDate(), date(2014, 1, 1)); assertEquals(test.get(date(2012, 1, 1)), OptionalDouble.of(12d)); assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(13d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(14d)); } public void test_duplicatesGetOverwritten() { LocalDateDoubleTimeSeries test = LocalDateDoubleTimeSeries.builder() .put(date(2014, 1, 1), 12) .put(date(2014, 1, 1), 14) .build(); assertEquals(test.size(), 1); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(14d)); } public void test_useBuilderToAlterSeries() { LocalDateDoubleTimeSeries base = LocalDateDoubleTimeSeries.builder() .put(date(2014, 1, 1), 14) .put(date(2012, 1, 1), 12) .put(date(2013, 1, 1), 13) .build(); LocalDateDoubleTimeSeries test = base.toBuilder() .put(date(2013, 1, 1), 23) .put(date(2011, 1, 1), 21) .build(); assertEquals(test.size(), 4); assertEquals(test.getEarliestDate(), date(2011, 1, 1)); assertEquals(test.getLatestDate(), date(2014, 1, 1)); // new value assertEquals(test.get(date(2011, 1, 1)), OptionalDouble.of(21d)); assertEquals(test.get(date(2012, 1, 1)), OptionalDouble.of(12d)); // updated value assertEquals(test.get(date(2013, 1, 1)), OptionalDouble.of(23d)); assertEquals(test.get(date(2014, 1, 1)), OptionalDouble.of(14d)); } public void densityChoosesImplementation() { LocalDateDoubleTimeSeries series1 = LocalDateDoubleTimeSeries.builder() .put(date(2015, 1, 5), 14) // Monday .put(date(2015, 1, 12), 12) .put(date(2015, 1, 19), 13) .build(); assertEquals(series1.getClass(), SparseLocalDateDoubleTimeSeries.class); // Now add in a week's worth of data LocalDateDoubleTimeSeries series2 = series1.toBuilder() .put(date(2015, 1, 6), 14) .put(date(2015, 1, 7), 13) .put(date(2015, 1, 8), 12) .put(date(2015, 1, 9), 13) .build(); // Not yet enough as we have 7/11 populated (i.e. below 70%) assertEquals(series2.getClass(), SparseLocalDateDoubleTimeSeries.class); // Add in 1 more days giving 8/11 populated LocalDateDoubleTimeSeries series3 = series2.toBuilder() .put(date(2015, 1, 13), 11) .build(); assertEquals(series3.getClass(), DenseLocalDateDoubleTimeSeries.class); // Now add in a weekend date, which means we have 9/15 LocalDateDoubleTimeSeries series4 = series3.toBuilder() .put(date(2015, 1, 10), 12) // Saturday .build(); assertEquals(series4.getClass(), SparseLocalDateDoubleTimeSeries.class); // Add in 2 new dates giving 11/15 LocalDateDoubleTimeSeries series5 = series4.toBuilder() .put(date(2015, 1, 14), 11) .put(date(2015, 1, 15), 10) .build(); assertEquals(series5.getClass(), DenseLocalDateDoubleTimeSeries.class); } //------------------------------------------------------------------------- private static LocalDate date(int year, int month, int day) { return LocalDate.of(year, month, day); } }
package com.github.theholywaffle.lolchatapi; /* * #%L * League of Legends XMPP Chat Library * %% * Copyright (C) 2014 Bert De Geyter * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import java.io.IOException; import java.io.StringReader; import java.util.Date; import org.custommonkey.xmlunit.Diff; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.JDOMException; import org.jdom2.input.SAXBuilder; import org.jdom2.output.XMLOutputter; import org.xml.sax.SAXException; public class LolStatus { public enum Division { NONE, I, II, III, IV, V; } public enum GameStatus { TEAM_SELECT("teamSelect"), HOSTING_NORMAL_GAME("hostingNormalGame"), HOSTING_PRACTICE_GAME("hostingPracticeGame"), HOSTING_RANKED_GAME("hostingRankedGame"), HOSTING_COOP_VS_AI_GAME("hostingCoopVsAIGame"), IN_QUEUE("inQueue"), SPECTATING("spectating"), OUT_OF_GAME("outOfGame"), CHAMPION_SELECT("championSelect"), IN_GAME("inGame"), IN_TEAMBUILDER("inTeamBuilder"), TUTORIAL("tutorial"); private String internal; GameStatus(String internal) { this.internal = internal; } public String internal() { return internal; } } public enum Queue { NONE, NORMAL, NORMAL_3x3, ODIN_UNRANKED, ARAM_UNRANKED_5x5, BOT, BOT_3x3, RANKED_SOLO_5x5, RANKED_TEAM_3x3, RANKED_TEAM_5x5, ONEFORALL_5x5, FIRSTBLOOD_1x1, FIRSTBLOOD_2x2, SR_6x6, CAP_5x5, URF, URF_BOT, NIGHTMARE_BOT; } public enum Tier { UNRANKED, BRONZE, SILVER, GOLD, PLATINUM, DIAMOND, MASTER, CHALLENGER; } private enum XMLProperty { level, rankedLeagueDivision, rankedLosses, rankedRating, leaves, gameQueueType, skinname, profileIcon, rankedLeagueQueue, tier, rankedLeagueName, queueType, timeStamp, rankedWins, odinLeaves, dropInSpectateGameId, statusMsg, rankedLeagueTier, featuredGameData, odinWins, wins, gameStatus, isObservable, mobile, rankedSoloRestricted; @Override public String toString() { return name(); } } private static final XMLOutputter outputter = new XMLOutputter(); private final Document doc; /** * Generate a default LoLStatus that can later be modified and be used to * change the current LolStatus ({@link LolChat#setStatus(LolStatus)}). * */ public LolStatus() { outputter .setFormat(outputter.getFormat().setExpandEmptyElements(false)); doc = new Document(new Element("body")); for (final XMLProperty p : XMLProperty.values()) { doc.getRootElement().addContent(new Element(p.toString())); } } /** * This constructor is not intended for usage. * * @param xml * An XML string * @throws JDOMException * Is thrown when the xml string is invalid * @throws IOException * Is thrown when the xml string is invalid */ public LolStatus(String xml) throws JDOMException, IOException { outputter .setFormat(outputter.getFormat().setExpandEmptyElements(false)); final SAXBuilder saxBuilder = new SAXBuilder(); doc = saxBuilder.build(new StringReader(xml)); for (final Element e : doc.getRootElement().getChildren()) { boolean found = false; for (final XMLProperty p : XMLProperty.values()) { if (p.name().equals(e.getName())) { found = true; } } if (!found) { System.err.println("XMLProperty \"" + e.getName() + "\" value: \"" + e.getValue() + "\" not implemented yet!"); } } } @Override public final boolean equals(Object other) { if (other == null) return false; if (other == this) return true; if (!(other instanceof LolStatus)) return false; final LolStatus otherStatus = (LolStatus) other; Diff diff; try { diff = new Diff(otherStatus.toString(), toString()); return diff.similar(); } catch (SAXException | IOException e) { e.printStackTrace(); } return false; } private String get(XMLProperty p) { final Element child = getElement(p); if (child == null) { return ""; } return child.getValue(); } public int getDominionLeaves() { return getInt(XMLProperty.odinLeaves); } public int getDominionWins() { return getInt(XMLProperty.odinWins); } private Element getElement(XMLProperty p) { return doc.getRootElement().getChild(p.toString()); } public String getFeaturedGameData() { return get(XMLProperty.featuredGameData); } public String getGameQueueType() { return get(XMLProperty.gameQueueType); } public GameStatus getGameStatus() { final String status = get(XMLProperty.gameStatus); if (!status.isEmpty()) { for (final GameStatus s : GameStatus.values()) { if (s.internal.equals(status)) { return s; } } System.err .println("GameStatus " + status + " not implemented yet!"); } return null; } private int getInt(XMLProperty p) { final String value = get(p); if (value.isEmpty()) { return -1; } return Integer.parseInt(value); } public int getLevel() { return getInt(XMLProperty.level); } private long getLong(XMLProperty p) { final String value = get(p); if (value.isEmpty()) { return -1L; } return Long.parseLong(value); } public int getNormalLeaves() { return getInt(XMLProperty.leaves); } public int getNormalWins() { return getInt(XMLProperty.wins); } public int getProfileIconId() { return getInt(XMLProperty.profileIcon); } /** * Seems like an unused variable of Riot * * @return Empty string */ @Deprecated public String getQueueType() { return get(XMLProperty.queueType); } public Division getRankedLeagueDivision() { final String div = get(XMLProperty.rankedLeagueDivision); if (!div.isEmpty()) { return Division.valueOf(div); } return Division.NONE; } public String getRankedLeagueName() { return get(XMLProperty.rankedLeagueName); } public String getRankedLeagueQueue() { return get(XMLProperty.rankedLeagueQueue); } public Tier getRankedLeagueTier() { final String tier = get(XMLProperty.rankedLeagueTier); if (!tier.isEmpty()) { return Tier.valueOf(tier); } return Tier.UNRANKED; } /** * Seems like an unused variable of Riot. * * @return 0 */ @Deprecated public int getRankedLosses() { return getInt(XMLProperty.rankedLosses); } /** * Seems like an unused variable of Riot. * * @return 0 */ @Deprecated public int getRankedRating() { return getInt(XMLProperty.rankedRating); } public int getRankedWins() { return getInt(XMLProperty.rankedWins); } public String getSkin() { return get(XMLProperty.skinname); } public String getSpectatedGameId() { return get(XMLProperty.dropInSpectateGameId); } public String getStatusMessage() { return get(XMLProperty.statusMsg); } public Tier getTier() { final String tier = get(XMLProperty.tier); if (!tier.isEmpty()) { return Tier.valueOf(tier); } return Tier.UNRANKED; } public Date getTimestamp() { final long l = getLong(XMLProperty.timeStamp); if (l > 0) { return new Date(l); } return null; } public boolean isObservable() { return get(XMLProperty.isObservable).equals("ALL"); } public LolStatus setDominionLeaves(int leaves) { setElement(XMLProperty.odinLeaves, leaves); return this; } public LolStatus setDominionWins(int wins) { setElement(XMLProperty.odinWins, wins); return this; } private void setElement(XMLProperty p, int value) { setElement(p, String.valueOf(value)); } private void setElement(XMLProperty p, long value) { setElement(p, String.valueOf(value)); } private void setElement(XMLProperty p, String value) { getElement(p).setText(value); } private void setElement(XMLProperty p, boolean value) { setElement(p, String.valueOf(value)); } public LolStatus setFeaturedGameData(String data) { setElement(XMLProperty.featuredGameData, data); return this; } public LolStatus setGameQueueType(Queue q) { return setGameQueueType(q.name()); } public LolStatus setGameQueueType(String q) { setElement(XMLProperty.gameQueueType, q); return this; } public LolStatus setGameStatus(GameStatus s) { setElement(XMLProperty.gameStatus, s.internal); return this; } public LolStatus setLevel(int level) { setElement(XMLProperty.level, level); return this; } public LolStatus setNormalLeaves(int leaves) { setElement(XMLProperty.leaves, leaves); return this; } public LolStatus setNormalWins(int wins) { setElement(XMLProperty.wins, wins); return this; } public LolStatus setObservable() { setElement(XMLProperty.isObservable, "ALL"); return this; } public LolStatus setProfileIconId(int id) { setElement(XMLProperty.profileIcon, id); return this; } @Deprecated public LolStatus setQueueType(Queue q) { setElement(XMLProperty.queueType, q.name()); return this; } public LolStatus setRankedLeagueDivision(Division d) { setElement(XMLProperty.rankedLeagueDivision, d.name()); return this; } public LolStatus setRankedLeagueName(String name) { setElement(XMLProperty.rankedLeagueName, name); return this; } public LolStatus setRankedLeagueQueue(Queue q) { setElement(XMLProperty.rankedLeagueQueue, q.name()); return this; } public LolStatus setRankedLeagueTier(Tier t) { setElement(XMLProperty.rankedLeagueTier, t.name()); return this; } @Deprecated public LolStatus setRankedLosses(int losses) { setElement(XMLProperty.rankedLosses, losses); return this; } @Deprecated public LolStatus setRankedRating(int rating) { setElement(XMLProperty.rankedRating, rating); return this; } public LolStatus setRankedWins(int wins) { setElement(XMLProperty.rankedWins, wins); return this; } public LolStatus setSkin(String name) { setElement(XMLProperty.skinname, name); return this; } public LolStatus setSpectatedGameId(String id) { setElement(XMLProperty.dropInSpectateGameId, id); return this; } public LolStatus setStatusMessage(String message) { setElement(XMLProperty.statusMsg, message); return this; } public LolStatus setTier(Tier t) { setElement(XMLProperty.tier, t.name()); return this; } public LolStatus setTimestamp(Date date) { return setTimestamp(date.getTime()); } public LolStatus setTimestamp(long date) { setElement(XMLProperty.timeStamp, date); return this; } public LolStatus setMobile(String mobile) { setElement(XMLProperty.mobile, mobile); return this; } public String getMobile() { return get(XMLProperty.mobile); } public LolStatus setRankedSoloRestricted(boolean rankedSoloRestricted) { setElement(XMLProperty.rankedSoloRestricted, rankedSoloRestricted); return this; } public boolean getRankedSoloRestricted() { return Boolean.valueOf(get(XMLProperty.rankedSoloRestricted)); } @Override public String toString() { return outputter.outputString(doc.getRootElement()); } }
/* * Copyright (C) 2019 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.android.accessibility.talkback.dialog; import static com.google.android.accessibility.talkback.Feedback.Focus.Action.RESTORE_ON_NEXT_WINDOW; import static com.google.android.accessibility.utils.Performance.EVENT_ID_UNTRACKED; import android.content.Context; import android.content.DialogInterface; import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import android.text.TextUtils; import android.view.View; import android.view.WindowManager; import com.google.android.accessibility.talkback.Feedback; import com.google.android.accessibility.talkback.Pipeline.FeedbackReturner; import com.google.android.accessibility.talkback.TalkBackService; import com.google.android.accessibility.talkback.utils.MaterialComponentUtils; import com.google.android.accessibility.utils.widget.DialogUtils; import com.google.android.libraries.accessibility.utils.log.LogUtils; /** * This is a base class to handle show, dismiss and click events from dialogs. If the context is * from {@link TalkBackService}, sets window type to accessibility overlay, registers and * unregisters to {@link TalkBackService} for screen monitor and restores focus. */ public abstract class BaseDialog { private static final String TAG = BaseDialog.class.getSimpleName(); private static final int RESOURCE_ID_UNKNOWN = -1; protected final Context context; private final int dialogTitleResId; private @Nullable AlertDialog dialog; private @Nullable FeedbackReturner pipeline; private boolean isSoftInputMode = false; private boolean needToRestoreFocus = false; private int positiveButtonStringRes; private int negativeButtonStringRes; private int neutralButtonStringRes; public BaseDialog(Context context, int dialogTitleResId, FeedbackReturner pipeline) { this.context = context; this.dialogTitleResId = dialogTitleResId; this.pipeline = pipeline; this.positiveButtonStringRes = android.R.string.ok; this.negativeButtonStringRes = android.R.string.cancel; this.neutralButtonStringRes = RESOURCE_ID_UNKNOWN; } //////////////////////////////////////////////////////////////////////////// // Basic setter for dialog /** Handles Ok and Cancel button click events in dialog. */ public abstract void handleDialogClick(int buttonClicked); /** Handles dialog dismissed event. */ public abstract void handleDialogDismiss(); /** Gets the message string for dialog to display. */ public abstract String getMessageString(); /** Gets the customized view for dialog to display. */ public abstract View getCustomizedView(); //////////////////////////////////////////////////////////////////////////// // Optional setter for dialog /** * Enables the button on the dialog. * * @param button the button on the dialog, either {@link DialogInterface#BUTTON_POSITIVE} or * {@link DialogInterface#BUTTON_NEGATIVE} * @param enabled enable status */ public void setButtonEnabled(int button, boolean enabled) { if (dialog == null || (button != DialogInterface.BUTTON_POSITIVE && button != DialogInterface.BUTTON_NEGATIVE && button != DialogInterface.BUTTON_NEUTRAL)) { return; } dialog.getButton(button).setEnabled(enabled); } /** Sets to {@code true} if focus on EditText and needs to launch IME automatically. */ public void setSoftInputMode(boolean isSoftInputMode) { this.isSoftInputMode = isSoftInputMode; } /** * Sets to {@code true} if it needs to restore focus. In general, it will set to true if the * dialog is generated from Talkback context menu and we would like to restore focus to the * original node after context menu dismiss. */ public void setRestoreFocus(boolean needToRestoreFocus) { this.needToRestoreFocus = needToRestoreFocus; } /** * Sets string resource for the default positive button. This method must be called before {@link * #showDialog()} to take effect. */ public void setPositiveButtonStringRes(int res) { this.positiveButtonStringRes = res; } /** * Sets string resource for the default negative button. This method must be called before {@link * #showDialog()} to take effect. */ public void setNegativeButtonStringRes(int res) { this.negativeButtonStringRes = res; } /** * Sets string resource for the neutral button and enable it. This method must be called before * {@link #showDialog()} to take effect and show neutral button. */ public void setNeutralButtonStringRes(int res) { this.neutralButtonStringRes = res; } //////////////////////////////////////////////////////////////////////////// // Status controller for dialog /** * Returns and shows the dialog with ok/cancel button by default, or set string Id of neutral * button by {@link #setNeutralButtonStringRes(int)} to show neutral button for specifical * function before call this function. */ public AlertDialog showDialog() { // Only show one dialog at a time. if (dialog != null && dialog.isShowing()) { return dialog; } final DialogInterface.OnClickListener onClickListener = (dialog, buttonClicked) -> clickDialogInternal(buttonClicked); final DialogInterface.OnDismissListener onDismissListener = dialog -> dismissDialogInternal(); AlertDialog.Builder dialogBuilder = MaterialComponentUtils.alertDialogBuilder(context) .setTitle(dialogTitleResId) .setNegativeButton(negativeButtonStringRes, onClickListener) .setPositiveButton(positiveButtonStringRes, onClickListener) .setOnDismissListener(onDismissListener) .setCancelable(true); if (neutralButtonStringRes != RESOURCE_ID_UNKNOWN) { dialogBuilder.setNeutralButton(neutralButtonStringRes, onClickListener); } String message = getMessageString(); if (!TextUtils.isEmpty(message)) { dialogBuilder.setMessage(message); } View customizedView = getCustomizedView(); if (customizedView != null) { dialogBuilder.setView(customizedView); } dialog = dialogBuilder.create(); if (isSoftInputMode) { dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); } if (context instanceof TalkBackService) { DialogUtils.setWindowTypeToDialog(dialog.getWindow()); } else { LogUtils.v( TAG, "Create BaseDialog from context not instance of TalkBackService, class:" + context.getClass()); } dialog.show(); registerServiceDialog(); return dialog; } /** Cancels dialog. */ public void cancelDialog() { if (dialog != null && dialog.isShowing()) { dialog.cancel(); } } /** Dismisses dialog. */ public void dismissDialog() { if (dialog != null) { dialog.dismiss(); } } //////////////////////////////////////////////////////////////////////////////// /** Registers screen monitor for dialog. When the screen turns off, cancel dialog. */ private void registerServiceDialog() { if (context instanceof TalkBackService) { ((TalkBackService) context).registerDialog(dialog); } } /** * Unregisters screen monitor for dialog and restores focus if needToRestoreFocus is {@code true}. */ private void unregisterServiceDialog() { if (context instanceof TalkBackService) { ((TalkBackService) context).unregisterDialog(dialog); } } private void dismissDialogInternal() { handleDialogDismiss(); unregisterServiceDialog(); dialog = null; } private void clickDialogInternal(int buttonClicked) { handleDialogClick(buttonClicked); // If it is triggered by Talkback context menu, restores focus after executing actions by // clicking buttons. if ((buttonClicked == DialogInterface.BUTTON_POSITIVE || buttonClicked == DialogInterface.BUTTON_NEUTRAL || buttonClicked == DialogInterface.BUTTON_NEGATIVE) && context instanceof TalkBackService && needToRestoreFocus && pipeline != null) { pipeline.returnFeedback(EVENT_ID_UNTRACKED, Feedback.focus(RESTORE_ON_NEXT_WINDOW)); } } }
package redis.clients.johm.collections; import java.lang.reflect.Field; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import redis.clients.johm.Indexed; import redis.clients.johm.JOhm; import redis.clients.johm.JOhmUtils; import redis.clients.johm.Nest; import redis.clients.johm.JOhmUtils.Convertor; import redis.clients.johm.JOhmUtils.JOhmCollectionDataType; /** * RedisMap is a JOhm-internal Map implementation to serve as a proxy for the * Redis persisted hash and provide lazy-loading semantics to minimize datastore * network traffic. It does a best-effort job of minimizing hash staleness but * does so without any locking and is not thread-safe. Only add and remove * operations trigger a remote-sync of local internal storage. * * RedisMap does not support null keys or values. */ public class RedisMap<K, V> implements Map<K, V> { private final Nest<? extends V> nest; private final Class<? extends K> keyClazz; private final Class<? extends V> valueClazz; private final JOhmCollectionDataType johmKeyType; private final JOhmCollectionDataType johmValueType; private final Field field; private final Object owner; public RedisMap(final Class<? extends K> keyClazz, final Class<? extends V> valueClazz, final Nest<? extends V> nest, Field field, Object owner) { this.keyClazz = keyClazz; this.valueClazz = valueClazz; johmKeyType = JOhmUtils.detectJOhmCollectionDataType(keyClazz); johmValueType = JOhmUtils.detectJOhmCollectionDataType(valueClazz); this.nest = nest; this.field = field; this.owner = owner; } private void indexValue(K element) { if (field.isAnnotationPresent(Indexed.class)) { if (johmKeyType == JOhmCollectionDataType.PRIMITIVE) { nest.cat(field.getName()).cat(element).sadd( JOhmUtils.getId(owner).toString()); } else if (johmKeyType == JOhmCollectionDataType.MODEL) { nest.cat(field.getName()).cat(JOhmUtils.getId(element)).sadd( JOhmUtils.getId(owner).toString()); } } } private void unindexValue(K element) { if (field.isAnnotationPresent(Indexed.class)) { if (johmKeyType == JOhmCollectionDataType.PRIMITIVE) { nest.cat(field.getName()).cat(element).srem( JOhmUtils.getId(owner).toString()); } else if (johmKeyType == JOhmCollectionDataType.MODEL) { nest.cat(field.getName()).cat(JOhmUtils.getId(element)).srem( JOhmUtils.getId(owner).toString()); } } } public void clear() { Map<String, String> savedHash = nest.cat(JOhmUtils.getId(owner)).cat( field.getName()).hgetAll(); for (Map.Entry<String, String> entry : savedHash.entrySet()) { nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).hdel( entry.getKey()); } nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).del(); } public boolean containsKey(Object key) { return scrollElements().containsKey(key); } public boolean containsValue(Object value) { return scrollElements().containsValue(value); } public Set<java.util.Map.Entry<K, V>> entrySet() { return scrollElements().entrySet(); } @SuppressWarnings("unchecked") public V get(Object key) { V value = null; String valueKey = null; if (johmKeyType == JOhmCollectionDataType.PRIMITIVE) { valueKey = nest.cat(JOhmUtils.getId(owner)).cat(field.getName()) .hget(key.toString()); } else if (johmKeyType == JOhmCollectionDataType.MODEL) { valueKey = nest.cat(JOhmUtils.getId(owner)).cat(field.getName()) .hget(JOhmUtils.getId(key).toString()); } if (!JOhmUtils.isNullOrEmpty(valueKey)) { if (johmValueType == JOhmCollectionDataType.PRIMITIVE) { value = (V) Convertor.convert(valueClazz, valueKey); } else if (johmValueType == JOhmCollectionDataType.MODEL) { value = JOhm.<V> get(valueClazz, Integer.parseInt(valueKey)); } } return value; } public boolean isEmpty() { return this.size() == 0; } @SuppressWarnings("unchecked") public Set<K> keySet() { Set<K> keys = new LinkedHashSet<K>(); for (String key : nest.cat(JOhmUtils.getId(owner)).cat(field.getName()) .hkeys()) { if (johmKeyType == JOhmCollectionDataType.PRIMITIVE) { keys.add((K) JOhmUtils.Convertor.convert(keyClazz, key)); } else if (johmKeyType == JOhmCollectionDataType.MODEL) { keys.add(JOhm.<K> get(keyClazz, Integer.parseInt(key))); } } return keys; } public V put(K key, V value) { V previousValue = get(key); internalPut(key, value); return previousValue; } public void putAll(Map<? extends K, ? extends V> mapToCopyIn) { for (Map.Entry<? extends K, ? extends V> entry : mapToCopyIn.entrySet()) { internalPut(entry.getKey(), entry.getValue()); } } @SuppressWarnings("unchecked") public V remove(Object key) { V value = get(key); if (johmKeyType == JOhmCollectionDataType.PRIMITIVE) { nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).hdel( key.toString()); } else if (johmKeyType == JOhmCollectionDataType.MODEL) { nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).hdel( JOhmUtils.getId(key).toString()); } unindexValue((K) key); return value; } public int size() { return nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).hlen() .intValue(); } public Collection<V> values() { return scrollElements().values(); } private V internalPut(final K key, final V value) { Map<String, String> hash = new LinkedHashMap<String, String>(); String keyString = null; String valueString = null; if (johmKeyType == JOhmCollectionDataType.PRIMITIVE && johmValueType == JOhmCollectionDataType.PRIMITIVE) { keyString = key.toString(); valueString = value.toString(); } else if (johmKeyType == JOhmCollectionDataType.PRIMITIVE && johmValueType == JOhmCollectionDataType.MODEL) { keyString = key.toString(); valueString = JOhmUtils.getId(value).toString(); } else if (johmKeyType == JOhmCollectionDataType.MODEL && johmValueType == JOhmCollectionDataType.PRIMITIVE) { keyString = JOhmUtils.getId(key).toString(); valueString = value.toString(); } else if (johmKeyType == JOhmCollectionDataType.MODEL && johmValueType == JOhmCollectionDataType.MODEL) { keyString = JOhmUtils.getId(key).toString(); valueString = JOhmUtils.getId(value).toString(); } hash.put(keyString, valueString); nest.cat(JOhmUtils.getId(owner)).cat(field.getName()).hmset(hash); indexValue(key); return value; } @SuppressWarnings("unchecked") private synchronized Map<K, V> scrollElements() { Map<String, String> savedHash = nest.cat(JOhmUtils.getId(owner)).cat( field.getName()).hgetAll(); Map<K, V> backingMap = new HashMap<K, V>(); K savedKey = null; V savedValue = null; for (Map.Entry<String, String> entry : savedHash.entrySet()) { if (johmKeyType == JOhmCollectionDataType.PRIMITIVE && johmValueType == JOhmCollectionDataType.PRIMITIVE) { savedKey = (K) JOhmUtils.Convertor.convert(keyClazz, entry .getKey()); savedValue = (V) JOhmUtils.Convertor.convert(valueClazz, entry .getValue()); } else if (johmKeyType == JOhmCollectionDataType.PRIMITIVE && johmValueType == JOhmCollectionDataType.MODEL) { savedKey = (K) JOhmUtils.Convertor.convert(keyClazz, entry .getKey()); savedValue = JOhm.<V> get(valueClazz, Integer.parseInt(entry .getValue())); } else if (johmKeyType == JOhmCollectionDataType.MODEL && johmValueType == JOhmCollectionDataType.PRIMITIVE) { savedKey = JOhm.<K> get(keyClazz, Integer.parseInt(entry .getKey())); savedValue = (V) JOhmUtils.Convertor.convert(valueClazz, entry .getValue()); } else if (johmKeyType == JOhmCollectionDataType.MODEL && johmValueType == JOhmCollectionDataType.MODEL) { savedKey = JOhm.<K> get(keyClazz, Integer.parseInt(entry .getKey())); savedValue = JOhm.<V> get(valueClazz, Integer.parseInt(entry .getValue())); } backingMap.put(savedKey, savedValue); } return backingMap; } }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jcr; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.jcr.Binary; import javax.jcr.ImportUUIDBehavior; import javax.jcr.InvalidItemStateException; import javax.jcr.ItemExistsException; import javax.jcr.ItemNotFoundException; import javax.jcr.PathNotFoundException; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.ValueFactory; import javax.jcr.ValueFormatException; import javax.jcr.nodetype.ConstraintViolationException; import org.modeshape.common.SystemFailureException; import org.modeshape.common.annotation.NotThreadSafe; import org.modeshape.common.collection.Collections; import org.modeshape.common.collection.LinkedHashMultimap; import org.modeshape.common.text.TextDecoder; import org.modeshape.common.text.XmlNameEncoder; import org.modeshape.common.util.Base64; import org.modeshape.common.util.StringUtil; import org.modeshape.jcr.cache.CachedNode; import org.modeshape.jcr.cache.CachedNode.ReferenceType; import org.modeshape.jcr.cache.MutableCachedNode; import org.modeshape.jcr.cache.NodeKey; import org.modeshape.jcr.cache.ReferrerCounts; import org.modeshape.jcr.cache.SessionCache; import org.modeshape.jcr.value.Name; import org.modeshape.jcr.value.NameFactory; import org.modeshape.jcr.value.NamespaceRegistry; import org.modeshape.jcr.value.Path; import org.modeshape.jcr.value.PathFactory; import org.modeshape.jcr.value.Property; import org.modeshape.jcr.value.PropertyFactory; import org.modeshape.jcr.value.basic.NodeKeyReference; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * Content handler that provides SAX-based event handling that maps incoming documents to the repository based on the * functionality described in section 7.3 of the JCR 1.0.1 specification. * <p> * Each content handler is only intended to be used once and discarded. This class is <b>NOT</b> thread-safe. * </p> * * @see JcrSession#getImportContentHandler(String, int) * @see JcrWorkspace#getImportContentHandler(String, int) */ @NotThreadSafe class JcrContentHandler extends DefaultHandler { /** * Encoder to properly escape XML names. * * @see XmlNameEncoder */ protected static final TextDecoder SYSTEM_VIEW_NAME_DECODER = new XmlNameEncoder(); protected static final TextDecoder DOCUMENT_VIEW_NAME_DECODER = JcrDocumentViewExporter.NAME_DECODER; protected static final TextDecoder DOCUMENT_VIEW_VALUE_DECODER = JcrDocumentViewExporter.VALUE_DECODER; protected static final List<String> INTERNAL_MIXINS = Arrays.asList(JcrMixLexicon.VERSIONABLE.getString().toLowerCase()); private static final String ALT_XML_SCHEMA_NAMESPACE_PREFIX = "xsd"; protected final NamespaceRegistry namespaces; protected final int uuidBehavior; protected final boolean retentionInfoRetained; protected final boolean lifecycleInfoRetained; protected final List<AbstractJcrNode> nodesForPostProcessing = new LinkedList<>(); protected final Map<String, NodeKey> uuidToNodeKeyMapping = new HashMap<>(); protected final Set<NodeKey> importedNodeKeys = new HashSet<>(); protected final Map<NodeKey, String> shareIdsToUUIDMap = new HashMap<>(); protected final Map<NodeKey, ReferrerCounts> referrersByNodeKey = new HashMap<>(); protected final LinkedHashMultimap<NodeKey, ReferenceProperty> allReferenceProperties = LinkedHashMultimap.create(); protected SessionCache cache; protected final String primaryTypeName; protected final String mixinTypesName; protected final String uuidName; private final JcrSession session; private final ExecutionContext context; private final NameFactory nameFactory; private final PathFactory pathFactory; private final org.modeshape.jcr.value.ValueFactory<String> stringFactory; private final ValueFactory jcrValueFactory; private final JcrNodeTypeManager nodeTypes; private final org.modeshape.jcr.api.NamespaceRegistry jcrNamespaceRegistry; private final boolean saveWhenCompleted; private final String systemWorkspaceKey; private AbstractJcrNode currentNode; private ContentHandler delegate; JcrContentHandler( JcrSession session, AbstractJcrNode parent, int uuidBehavior, boolean saveWhenCompleted, boolean retentionInfoRetained, boolean lifecycleInfoRetained, String binaryStoreHint ) throws PathNotFoundException, RepositoryException { assert session != null; assert uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW || uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING || uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_REPLACE_EXISTING || uuidBehavior == ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW; this.session = session; this.session.initBaseVersionKeys(); this.context = this.session.context(); this.namespaces = context.getNamespaceRegistry(); this.nameFactory = context.getValueFactories().getNameFactory(); this.pathFactory = context.getValueFactories().getPathFactory(); this.stringFactory = context.getValueFactories().getStringFactory(); this.uuidBehavior = uuidBehavior; this.retentionInfoRetained = retentionInfoRetained; this.lifecycleInfoRetained = lifecycleInfoRetained; this.saveWhenCompleted = saveWhenCompleted; this.cache = session.cache(); this.currentNode = parent; this.jcrValueFactory = session.getValueFactory(binaryStoreHint); this.nodeTypes = session.nodeTypeManager(); this.jcrNamespaceRegistry = (org.modeshape.jcr.api.NamespaceRegistry)session.workspace().getNamespaceRegistry(); this.primaryTypeName = JcrLexicon.PRIMARY_TYPE.getString(this.namespaces); this.mixinTypesName = JcrLexicon.MIXIN_TYPES.getString(this.namespaces); this.uuidName = JcrLexicon.UUID.getString(this.namespaces); this.systemWorkspaceKey = session.repository().systemWorkspaceKey(); } protected final JcrSession session() { return session; } protected final NamespaceRegistry namespaces() { return namespaces; } protected final JcrNodeTypeManager nodeTypes() { return nodeTypes; } protected final JcrNodeType nodeTypeFor( String name ) { return nodeTypes.getNodeType(nameFor(name)); } protected final Map<Name, Integer> propertyTypesFor( String primaryTypeName ) { Map<Name, Integer> propertyTypesMap = new HashMap<>(); JcrNodeType nodeType = nodeTypeFor(primaryTypeName); if (nodeType == null) { // nt:share falls in this category return propertyTypesMap; } for (JcrPropertyDefinition propertyDefinition : nodeType.getPropertyDefinitions()) { propertyTypesMap.put(propertyDefinition.getInternalName(), propertyDefinition.getRequiredType()); } return propertyTypesMap; } protected final String stringFor( Object name ) { return stringFactory.create(name); } protected final Name nameFor( String name ) { return nameFactory.create(name); } protected final Name nameFor( String namespaceUri, String localName ) { return nameFactory.create(namespaceUri, localName); } protected final Path pathFor( Path parentPath, Name... names ) { return pathFactory.create(parentPath, names); } protected final Value valueFor( String value, int type ) throws ValueFormatException { return jcrValueFactory.createValue(value, type); } protected final Value valueFor( InputStream stream ) throws RepositoryException { Binary binary = jcrValueFactory.createBinary(stream); return jcrValueFactory.createValue(binary); } protected final SessionCache cache() { return cache; } protected final boolean isInternal(Name propertyName) { return propertyName.getNamespaceUri().equals(JcrLexicon.Namespace.URI) || propertyName.getNamespaceUri().equals(JcrNtLexicon.Namespace.URI) || propertyName.getNamespaceUri().equals(ModeShapeLexicon.NAMESPACE.getNamespaceUri()); } protected void postProcessNodes() throws SAXException { try { // first make sure all the necessary reference properties have been set processReferences(); for (AbstractJcrNode node : nodesForPostProcessing) { MutableCachedNode mutable = node.mutable(); // --------------- // mix:versionable // --------------- if (node.isNodeType(JcrMixLexicon.VERSIONABLE)) { // Does the versionable node already have a reference to the version history? // If so, then we ignore it because we'll use our own key ... // Does the versionable node already have a base version? AbstractJcrProperty baseVersionProp = node.getProperty(JcrLexicon.BASE_VERSION); if (baseVersionProp != null) { // we rely on the fact that the base version ref is exported with full key NodeKeyReference baseVersionRef = (NodeKeyReference)baseVersionProp.getValue().value(); String workspaceKey = baseVersionRef.getNodeKey().getWorkspaceKey(); //we only register the base version if it comes from the system workspace (if it doesn't come from the //system workspace, it's not valid - e.g. could be coming from an older version of ModeShape) if (systemWorkspaceKey.equals(workspaceKey)) { session.setDesiredBaseVersionKey(node.key(), baseVersionRef.getNodeKey()); } } } // --------------- // mix:lockable // --------------- if (node.isNodeType(JcrMixLexicon.LOCKABLE) && node.isLocked()) { // Nodes should not be locked upon import ... node.unlock(); } // --------------- // mix:lifecycle // --------------- if (node.isNodeType(JcrMixLexicon.LIFECYCLE)) { if (lifecycleInfoRetained && !isValidReference(node, JcrLexicon.LIFECYCLE_POLICY, false)) { // The 'jcr:lifecyclePolicy' REFERENCE values is not valid or does not reference an existing node, // so the 'jcr:lifecyclePolicy' and 'jcr:currentLifecycleState' properties should be removed... mutable.removeProperty(cache, JcrLexicon.LIFECYCLE_POLICY); mutable.removeProperty(cache, JcrLexicon.CURRENT_LIFECYCLE_STATE); } } // -------------------- // mix:managedRetention // -------------------- if (node.isNodeType(JcrMixLexicon.MANAGED_RETENTION)) { if (retentionInfoRetained && !isValidReference(node, JcrLexicon.RETENTION_POLICY, false)) { // The 'jcr:retentionPolicy' REFERENCE values is not valid or does not reference an existing node, // so the 'jcr:retentionPolicy', 'jcr:hold' and 'jcr:isDeep' properties should be removed ... mutable.removeProperty(cache, JcrLexicon.HOLD); mutable.removeProperty(cache, JcrLexicon.IS_DEEP); mutable.removeProperty(cache, JcrLexicon.RETENTION_POLICY); } } // -------------------- // mix:share // -------------------- if (node.isNodeType(ModeShapeLexicon.SHARE)) { // get the actual key of the shareable node String shareableNodeUUID = shareIdsToUUIDMap.get(node.key()); assert shareableNodeUUID != null; NodeKey shareableNodeKey = uuidToNodeKeyMapping.get(shareableNodeUUID); assert shareableNodeKey != null; // unlink the current key from its parent references NodeKey parentKey = mutable.getParentKey(cache); MutableCachedNode parent = cache.mutable(parentKey); parent.removeChild(cache, node.key()); // re-link it with the correct key - that of the shareable node parent.linkChild(cache, shareableNodeKey, node.name()); } } } catch (RepositoryException e) { throw new EnclosingSAXException(e); } } private void processReferences() throws RepositoryException { // if there were any reference properties imported, they can only be set on the corresponding nodes *after* all // the graph has been imported for (Map.Entry<NodeKey, ReferenceProperty> entry : this.allReferenceProperties.entries()) { AbstractJcrNode node = session.node(entry.getKey(), null); ReferenceProperty referenceProperty = entry.getValue(); AbstractJcrProperty property = null; // set the reference property without validating it first if (!referenceProperty.isMultiple()) { property = node.setProperty(referenceProperty.name(), referenceProperty.value(), true, true, true, false); } else { property = node.setProperty(referenceProperty.name(), referenceProperty.values(), referenceProperty.type(), true, true, false, true); } // check if there are any public hard references with constraints in which case we should validate them // this should not look at any internal (protected) references as they may be changed later on if (property.getType() == PropertyType.REFERENCE && property.getDefinition().getValueConstraints().length != 0 && !property.getDefinition().isProtected()) { if (!isValidReference(property)) { JcrPropertyDefinition defn = property.getDefinition(); String name = stringFor(property.name()); String path = property.getParent().getPath(); throw new ConstraintViolationException(JcrI18n.constraintViolatedOnReference.text(name, path, defn)); } } } // Restore the back references on the nodes which have been removed/replaced by the import and which have referrers // outside the graph of nodes that was imported for (Map.Entry<NodeKey, ReferrerCounts> entry : referrersByNodeKey.entrySet()) { PropertyFactory propFactory = context.getPropertyFactory(); MutableCachedNode referred = cache.mutable(entry.getKey()); ReferrerCounts counts = entry.getValue(); if (referred != null && counts != null) { // Add in the strong and weak referrers (that are outside the import scope) that used to be in the node // before it was replaced ... for (NodeKey key : counts.getStrongReferrers()) { int count = counts.countStrongReferencesFrom(key); for (int i = 0; i != count; ++i) { Property prop = propFactory.create(nameFor(key.toString() + i)); referred.addReferrer(cache, prop, key, ReferenceType.STRONG); } } for (NodeKey key : counts.getWeakReferrers()) { int count = counts.countWeakReferencesFrom(key); for (int i = 0; i != count; ++i) { Property prop = propFactory.create(nameFor(key.toString() + i)); referred.addReferrer(cache, prop, key, ReferenceType.WEAK); } } } } } protected boolean isValidReference( AbstractJcrNode node, Name propertyName, boolean returnValueIfNoProperty ) throws RepositoryException { AbstractJcrProperty property = node.getProperty(propertyName); return property == null ? returnValueIfNoProperty : isValidReference(property); } protected boolean isValidReference( AbstractJcrProperty property ) throws RepositoryException { JcrPropertyDefinition defn = property.getDefinition(); if (defn == null) return false; if (property.isMultiple()) { for (Value value : property.getValues()) { if (!defn.canCastToTypeAndSatisfyConstraints(value, session)) { // We know it's not valid, so return ... return false; } } // All values appeared to be valid ... return true; } // Just a single value ... return defn.canCastToTypeAndSatisfyConstraints(property.getValue(), session); } @Override public void characters( char[] ch, int start, int length ) throws SAXException { assert this.delegate != null; delegate.characters(ch, start, length); } @Override public void endDocument() throws SAXException { postProcessNodes(); if (saveWhenCompleted) { try { session.save(); } catch (RepositoryException e) { throw new SAXException(e); } } super.endDocument(); } @Override public void endElement( String uri, String localName, String name ) throws SAXException { assert this.delegate != null; delegate.endElement(uri, localName, name); } @Override public void startElement( String uri, String localName, String name, Attributes atts ) throws SAXException { checkDelegate(uri); assert this.delegate != null; delegate.startElement(uri, localName, name, atts); } private void checkDelegate( String namespaceUri ) { if (delegate != null) return; if (JcrSvLexicon.Namespace.URI.equals(namespaceUri)) { this.delegate = new SystemViewContentHandler(this.currentNode); } else { this.delegate = new DocumentViewContentHandler(this.currentNode); } } protected static byte[] decodeBase64( String value ) throws IOException { try { return Base64.decode(value.getBytes("UTF-8")); } catch (IOException e) { // try re-reading, in case this was an export from a prior ModeShape version that used URL_SAFE ... return Base64.decode(value, Base64.URL_SAFE); } } protected static String decodeBase64AsString( String value ) throws IOException { byte[] decoded = decodeBase64(value); return new String(decoded, "UTF-8"); } @Override public void startPrefixMapping( String prefix, String uri ) throws SAXException { try { if (ALT_XML_SCHEMA_NAMESPACE_PREFIX.equals(prefix) && uri.equals(JcrNamespaceRegistry.XML_SCHEMA_NAMESPACE_URI)) { prefix = JcrNamespaceRegistry.XML_SCHEMA_NAMESPACE_PREFIX; } // Read from the workspace's ModeShape registry, as its semantics are more friendly String existingUri = namespaces.getNamespaceForPrefix(prefix); if (existingUri != null) { if (existingUri.equals(uri)) { // prefix/uri mapping is already in registry return; } // The prefix is used and it does not match the registered namespace. Therefore, register using // a generated namespace ... this.jcrNamespaceRegistry.registerNamespace(uri); } else { // It is not yet registered, so register through the JCR workspace to ensure consistency this.jcrNamespaceRegistry.registerNamespace(prefix, uri); } } catch (RepositoryException re) { throw new EnclosingSAXException(re); } } class EnclosingSAXException extends SAXException { private static final long serialVersionUID = -1044992767566435542L; EnclosingSAXException( Exception e ) { super(e); } } // ---------------------------------------------------------------------------------------------------------------- // NodeHandler framework ... // ---------------------------------------------------------------------------------------------------------------- @SuppressWarnings( "unused" ) protected abstract class NodeHandler { public void finish() throws SAXException { } public AbstractJcrNode node() throws SAXException { return null; } public NodeHandler parentHandler() { return null; } public boolean ignoreAllChildren() { return false; } public void addPropertyValue( Name name, String value, boolean forceMultiValued, int propertyType, TextDecoder decoder ) throws EnclosingSAXException { } protected String name() { try { Path path = node().path(); return path.isRoot() ? "" : stringFor(path.getLastSegment()); } catch (Exception e) { throw new SystemFailureException(e); } } @Override public String toString() { NodeHandler parent = parentHandler(); if (parent != null) { return parent.toString() + "/" + name(); } try { return node().getPath(); } catch (Throwable e) { try { return node().toString(); } catch (SAXException e2) { throw new SystemFailureException(e2); } } } } /** * Some nodes need additional post-processing upon import, and this set of property names is used to come up with the nodes * that may need to be post-processed. * <p> * Really, the nodes that need to be post-processed are best found using the node types of each node. However, that is more * expensive to compute. Thus, we'll collect the candidate nodes that are candidates for post-processing, then in the * post-processing we can more effectively and efficiently use the node types. * </p> * <p> * Currently, we want to post-process nodes that contain repository-level semantics. In other words, nodes that are of the * following node types: * <ul> * <li><code>mix:versionable</code></li> * <li><code>mix:lockable</code></li> * <li><code>mix:lifecycle</code></li> * <li><code>mix:managedRetention</code></li> * </ul> * The <code>mix:simpleVersionable</code> would normally also be included here, except that the <code>jcr:isCheckedOut</code> * property is a boolean value that doesn't need any particular post-processing. * </p> * <p> * Some of these node types has a mandatory property, so the names of these mandatory properties are used to quickly determine * candidates for post-processing. In cases where there is no mandatory property, then the set of all properties for that node * type are included: * <ul> * <li><code>mix:versionable</code> --> <code>jcr:baseVersion</code> (mandatory)</li> * <li><code>mix:lockable</code> --> <code>jcr:lockOwner</code> and <code>jcr:lockIsDeep</code></li> * <li><code>mix:lifecycle</code> --> <code>jcr:lifecyclePolicy</code> and <code>jcr:currentLifecycleState</code></li> * <li><code>mix:managedRetention</code> --> <code>jcr:hold</code>, <code>jcr:isDeep</code>, and * <code>jcr:retentionPolicy</code></li> * </ul> * </p> */ protected static final Set<Name> PROPERTIES_FOR_POST_PROCESSING = Collections.unmodifiableSet( /* 'mix:lockable' has two optional properties */ JcrLexicon.LOCK_IS_DEEP, JcrLexicon.LOCK_OWNER, /* 'mix:versionable' has several mandatory properties, but we only need to check one */ JcrLexicon.BASE_VERSION, /* 'mix:lifecycle' has two optional properties */ JcrLexicon.LIFECYCLE_POLICY, JcrLexicon.CURRENT_LIFECYCLE_STATE, /* 'mix:managedRetention' has three optional properties */ JcrLexicon.HOLD, JcrLexicon.IS_DEEP, JcrLexicon.RETENTION_POLICY); protected class BasicNodeHandler extends NodeHandler { private final Map<Name, List<Value>> properties; private final Set<Name> multiValuedPropertyNames; private final Name nodeName; private NodeHandler parentHandler; private AbstractJcrNode node; private final int uuidBehavior; private boolean postProcessed = false; private boolean ignoreAllChildren = false; protected BasicNodeHandler( Name name, NodeHandler parentHandler, int uuidBehavior ) { this.nodeName = name; this.parentHandler = parentHandler; this.properties = new HashMap<>(); this.multiValuedPropertyNames = new HashSet<>(); this.uuidBehavior = uuidBehavior; } @Override public void finish() throws SAXException { node(); } @Override public boolean ignoreAllChildren() { return ignoreAllChildren; } @Override protected String name() { return stringFor(nodeName); } @Override public AbstractJcrNode node() throws SAXException { if (node == null) create(); assert node != null; return node; } @Override public NodeHandler parentHandler() { return parentHandler; } @Override public void addPropertyValue( Name name, String value, boolean forceMultiValued, int propertyType, TextDecoder decoder ) throws EnclosingSAXException { if (forceMultiValued) { this.multiValuedPropertyNames.add(name); } try { if (node != null) { if (JcrLexicon.PRIMARY_TYPE.equals(name)) return; if (JcrLexicon.MIXIN_TYPES.equals(name)) return; if (JcrLexicon.UUID.equals(name)) return; // The node was already created, so set the property using the editor ... node.setProperty(name, (JcrValue)valueFor(value, propertyType), true, true, true, false); } else { // The node hasn't been created yet, so just enqueue the property value into the map ... List<Value> values = properties.get(name); if (values == null) { values = new ArrayList<>(); properties.put(name, values); } if (forceMultiValued && value.indexOf(" ") > 0) { String[] stringValues = value.split(" "); for (String stringValue : stringValues) { processPropertyValue(name, stringValue, propertyType, decoder, values); } } else { processPropertyValue(name, value, propertyType, decoder, values); } } if (!postProcessed && PROPERTIES_FOR_POST_PROCESSING.contains(name)) { postProcessed = true; } } catch (IOException | SAXException | RepositoryException ioe) { throw new EnclosingSAXException(ioe); } } private void processPropertyValue( Name name, String value, int propertyType, TextDecoder decoder, List<Value> values ) throws UnsupportedEncodingException, RepositoryException, SAXException { if (propertyType == PropertyType.BINARY) { Base64.InputStream is = new Base64.InputStream(new ByteArrayInputStream(value.getBytes("UTF-8"))); values.add(valueFor(is)); } else { if (decoder != null) value = decoder.decode(value); if (value != null && propertyType == PropertyType.STRING) { // Strings and binaries can be empty -- other data types cannot values.add(valueFor(value, propertyType)); } else if (!StringUtil.isBlank(value) && isReference(propertyType)) { boolean isInternalReference = isInternal(name); if (!isInternalReference) { // we only prepend the parent information for non-internal references value = parentHandler().node().key().withId(value).toString(); } // we only have the identifier of the node, so try to use the parent to determine the workspace & // source key values.add(valueFor(value, propertyType)); } else if (!StringUtil.isBlank(value)) { values.add(valueFor(value, propertyType)); } } } protected void create() throws SAXException { try { AbstractJcrNode parent = parentHandler.node(); final NodeKey parentKey = parent.key(); assert parent != null; // Figure out the key for the node ... NodeKey key = null; List<Value> rawUuid = properties.get(JcrLexicon.UUID); String uuid = null; boolean shareableNodeAlreadyExists = false; if (rawUuid != null) { assert rawUuid.size() == 1; uuid = rawUuid.get(0).getString(); key = parentKey.withId(uuid); try { // Deal with any existing node ... AbstractJcrNode existingNode = session().node(key, null, parentKey); switch (uuidBehavior) { case ImportUUIDBehavior.IMPORT_UUID_COLLISION_REPLACE_EXISTING: parent = existingNode.getParent(); // Attention: this *does not* remove the entry from the DB. Therefore, it's always // accessible // to the workspace cache and thus to the current session !!!. // Therefore, *old properties, mixins etc* will be accessible on the new child created later on // until a session.save() is performed. preRemoveNode(existingNode); existingNode.remove(); break; case ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW: key = cache().getRootKey().withRandomId(); break; case ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING: if (existingNode.path().isAtOrAbove(parent.path())) { String text = JcrI18n.cannotRemoveParentNodeOfTarget.text(existingNode.getPath(), key, parent.getPath()); throw new ConstraintViolationException(text); } // Attention: this *does not* remove the entry from the DB. Therefore, it's always // accessible // to the workspace cache and thus to the current session !!!. // Therefore, *old properties, mixins etc* will be accessible on the new child created later on // until a session.save() is performed. preRemoveNode(existingNode); existingNode.remove(); break; case ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW: if (existingNode.isShareable()) { shareableNodeAlreadyExists = true; } else { throw new ItemExistsException(JcrI18n.itemAlreadyExistsWithUuid.text(key, session().workspace() .getName(), existingNode.getPath())); } } } catch (ItemNotFoundException e) { // there wasn't an existing item, so just continue } } // See if the node was already autocreated by the parent AbstractJcrNode existingNode = parent.getNodeIfExists(nodeName); boolean nodeAlreadyExists = existingNode != null && existingNode.getDefinition().isAutoCreated(); // Create the new node ... AbstractJcrNode child; if (!nodeAlreadyExists) { List<Value> primaryTypeValueList = properties.get(JcrLexicon.PRIMARY_TYPE); String typeName = primaryTypeValueList != null ? primaryTypeValueList.get(0).getString() : null; Name primaryTypeName = nameFor(typeName); if (JcrNtLexicon.SHARE.equals(primaryTypeName)) { assert key != null; assert uuid != null; // check if we already have the key of the shareable node NodeKey shareableNodeKey = uuidToNodeKeyMapping.get(uuid); if (shareableNodeKey != null) { // we already know the key of the shareable node, so we need to just link it and return parent.mutable().linkChild(cache, shareableNodeKey, nodeName); node = session().node(shareableNodeKey, null, parentKey); } else { // we haven't processed the shareable node yet, so we need to make sure we process the share later. parent.mutable().linkChild(cache, key, nodeName); node = session().node(key, null, parentKey); // make sure we post-process the share and set the correct id nodesForPostProcessing.add(node); // save the original UUID of the share to be able to track it back to the shareable node shareIdsToUUIDMap.put(key, uuid); } ignoreAllChildren = true; return; } // store the node key that we created for this UUID, so we can create shares uuidToNodeKeyMapping.put(uuid, key); if (shareableNodeAlreadyExists && key != null) { parent.mutable().linkChild(cache, key, nodeName); node = session().node(key, null, parentKey); ignoreAllChildren = true; return; } // Otherwise, it's just a regular node... child = parent.addChildNode(nodeName, primaryTypeName, key, true, false); } else { child = existingNode; } assert child != null; // Set the properties on the new node ... // Set the mixin types first (before we set any properties that may require the mixins to be present) ... List<Value> mixinTypeValueList = properties.get(JcrLexicon.MIXIN_TYPES); if (mixinTypeValueList != null) { for (Value value : mixinTypeValueList) { String mixinName = value.getString(); // in the case when keys are being reused, the old node at that key is visible (with all its properties) // via the WS cache -> db. Therefore, there might be the case when even though the child was created // via addChild(), the old node with all the old properties and mixins is still visible at the key() and // so the "new child" reports the mixin as already present (even though it's not) boolean addMixinInternally = (child.isNodeType(mixinName) && !nodeAlreadyExists) || INTERNAL_MIXINS.contains(mixinName.toLowerCase()); if (addMixinInternally) { child.mutable().addMixin(child.sessionCache(), nameFor(mixinName)); } else { child.addMixin(mixinName); } } } for (Map.Entry<Name, List<Value>> entry : properties.entrySet()) { Name propertyName = entry.getKey(); // These are all handled earlier ... if (JcrLexicon.PRIMARY_TYPE.equals(propertyName)) { continue; } if (JcrLexicon.MIXIN_TYPES.equals(propertyName)) { continue; } if (JcrLexicon.UUID.equals(propertyName)) { continue; } List<Value> values = entry.getValue(); boolean allowEmptyValues = !isInternal(propertyName) || JcrLexicon.DATA.equals(propertyName); if (values.size() == 1 && !this.multiValuedPropertyNames.contains(propertyName)) { JcrValue value = (JcrValue)values.get(0); if (!allowEmptyValues && StringUtil.isBlank(value.getString())) { //if an empty value has creeped here it means we weren't able to perform proper type validation earlier continue; } if (isReference(value.getType())) { // if this is a reference, we won't set it on the node until we've finished loading all the nodes ReferenceProperty referenceProperty = new ReferenceProperty(propertyName, value); allReferenceProperties.put(child.key(), referenceProperty); nodesForPostProcessing.add(child); } else { // Don't check references or the protected status ... child.setProperty(propertyName, value, true, true, true, false); } } else { if (!allowEmptyValues) { for (Iterator<Value> iterator = values.iterator(); iterator.hasNext();) { if (StringUtil.isBlank(iterator.next().getString())) { iterator.remove(); } } } if (!values.isEmpty()) { Value[] processedValues = values.toArray(new Value[values.size()]); if (isReference(processedValues[0].getType())) { // if this is a reference, we won't set it on the node until we've finished loading all the nodes ReferenceProperty referenceProperty = new ReferenceProperty(propertyName, processedValues); allReferenceProperties.put(child.key(), referenceProperty); nodesForPostProcessing.add(child); } else { // Don't check references or the protected status ... child.setProperty(propertyName, processedValues, PropertyType.UNDEFINED, true, true, false, true); } } } } node = child; importedNodeKeys.add(node.key()); if (postProcessed) { // This node needs to be post-processed ... nodesForPostProcessing.add(node); } } catch (RepositoryException re) { throw new EnclosingSAXException(re); } } /** * Handle any operations before a node is removed or replaced by an imported node. * * @param removedNode the removed node */ protected void preRemoveNode( AbstractJcrNode removedNode ) { // Figure out if the node has backreferences ... CachedNode node; try { node = removedNode.node(); ReferrerCounts referrers = node.getReferrerCounts(cache); if (referrers != null) referrersByNodeKey.put(node.getKey(), referrers); } catch (ItemNotFoundException | InvalidItemStateException err) { // do nothing ... } } } protected boolean isReference( int propertyType ) { return (propertyType == PropertyType.REFERENCE || propertyType == PropertyType.WEAKREFERENCE || propertyType == org.modeshape.jcr.api.PropertyType.SIMPLE_REFERENCE); } protected class ExistingNodeHandler extends NodeHandler { private final AbstractJcrNode node; private final NodeHandler parentHandler; protected ExistingNodeHandler( AbstractJcrNode node, NodeHandler parentHandler ) { this.node = node; this.parentHandler = parentHandler; } @Override public AbstractJcrNode node() { return node; } @Override public NodeHandler parentHandler() { return parentHandler; } @Override public void addPropertyValue( Name propertyName, String value, boolean forceMultiValued, int propertyType, TextDecoder decoder ) { throw new UnsupportedOperationException(); } } protected class JcrRootHandler extends ExistingNodeHandler { protected JcrRootHandler( AbstractJcrNode root ) { super(root, null); } @Override public void addPropertyValue( Name propertyName, String value, boolean forceMultiValued, int propertyType, TextDecoder decoder ) { // do nothing ... } } protected class IgnoreBranchHandler extends NodeHandler { private NodeHandler parentHandler; protected IgnoreBranchHandler( NodeHandler parentHandler ) { this.parentHandler = parentHandler; } @Override public NodeHandler parentHandler() { return parentHandler; } } protected class JcrSystemHandler extends IgnoreBranchHandler { protected JcrSystemHandler( NodeHandler parentHandler ) { super(parentHandler); } } protected interface NodeHandlerFactory { NodeHandler createFor( Name nodeName, NodeHandler parentHandler, int uuidBehavior ) throws SAXException; } protected class StandardNodeHandlerFactory implements NodeHandlerFactory { @Override public NodeHandler createFor( Name name, NodeHandler parentHandler, int uuidBehavior ) throws SAXException { if (parentHandler instanceof IgnoreBranchHandler || parentHandler.ignoreAllChildren()) { return new IgnoreBranchHandler(parentHandler); } if (JcrLexicon.ROOT.equals(name)) { try { JcrRootNode rootNode = session().getRootNode(); return new JcrRootHandler(rootNode); } catch (RepositoryException re) { throw new EnclosingSAXException(re); } } if (JcrLexicon.SYSTEM.equals(name)) { // Always do this, regardless of where the "jcr:system" branch is located ... return new JcrSystemHandler(parentHandler); } return new BasicNodeHandler(name, parentHandler, uuidBehavior); } } private class SystemViewContentHandler extends DefaultHandler { private final String svNameName; private final String svTypeName; private final String svMultipleName; private NodeHandler current; private final NodeHandlerFactory nodeHandlerFactory; private String currentPropertyName; private int currentPropertyType; private boolean currentPropertyValueIsBase64Encoded; private boolean currentPropertyIsMultiValued; private final StringBuilder currentPropertyValue; SystemViewContentHandler( AbstractJcrNode parent ) { super(); this.svNameName = JcrSvLexicon.NAME.getString(namespaces()); this.svTypeName = JcrSvLexicon.TYPE.getString(namespaces()); this.svMultipleName = JcrSvLexicon.MULTIPLE.getString(namespaces()); this.current = new ExistingNodeHandler(parent, null); this.nodeHandlerFactory = new StandardNodeHandlerFactory(); this.currentPropertyValue = new StringBuilder(); } @Override public void startElement( String uri, String localName, String name, Attributes atts ) throws SAXException { // Always reset the string builder at the beginning of an element currentPropertyValue.setLength(0); currentPropertyValueIsBase64Encoded = false; if ("node".equals(localName)) { // Finish the parent handler ... current.finish(); // Create a new handler for this element ... String nodeName = atts.getValue(SYSTEM_VIEW_NAME_DECODER.decode(svNameName)); current = nodeHandlerFactory.createFor(nameFor(nodeName), current, uuidBehavior); } else if ("property".equals(localName)) { currentPropertyName = atts.getValue(SYSTEM_VIEW_NAME_DECODER.decode(svNameName)); currentPropertyType = org.modeshape.jcr.api.PropertyType.valueFromName(atts.getValue(svTypeName)); String svMultiple = atts.getValue(svMultipleName); currentPropertyIsMultiValued = Boolean.TRUE.equals(Boolean.valueOf(svMultiple)); } else if ("value".equals(localName)) { // See if there is an "xsi:type" attribute on this element, which means the property value contained // characters that cannot be represented in XML without escaping. See Section 11.2, Item 11.b ... String xsiType = atts.getValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (StringUtil.isBlank(xsiType)) { return; } String propertyPrefix = namespaces.getPrefixForNamespaceUri("http://www.w3.org/2001/XMLSchema", false); if (StringUtil.isBlank(propertyPrefix)) { return; } String base64TypeName = propertyPrefix + ":base64Binary"; currentPropertyValueIsBase64Encoded = base64TypeName.equals(xsiType); } else if (!"value".equals(localName)) { throw new IllegalStateException("Unexpected element '" + name + "' in system view"); } } @Override public void characters( char[] ch, int start, int length ) { currentPropertyValue.append(ch, start, length); } @Override public void endElement( String uri, String localName, String name ) throws SAXException { switch (localName) { case "node": current.finish(); // make sure the node is created current = current.parentHandler(); break; case "value": // Add the content for the current property ... String currentPropertyString = currentPropertyValue.toString(); if (currentPropertyValueIsBase64Encoded) { // The current string is a base64 encoded string, so we need to decode it first ... try { currentPropertyString = decodeBase64AsString(currentPropertyString); } catch (IOException ioe) { throw new EnclosingSAXException(ioe); } } current.addPropertyValue(nameFor(currentPropertyName), currentPropertyString, currentPropertyIsMultiValued, currentPropertyType, SYSTEM_VIEW_NAME_DECODER); break; case "property": break; default: throw new IllegalStateException("Unexpected element '" + name + "' in system view"); } } } private class DocumentViewContentHandler extends DefaultHandler { private NodeHandler current; private final NodeHandlerFactory nodeHandlerFactory; DocumentViewContentHandler( AbstractJcrNode currentNode ) { super(); this.current = new ExistingNodeHandler(currentNode, null); this.nodeHandlerFactory = new StandardNodeHandlerFactory(); } @Override public void startElement( String uri, String localName, String name, Attributes atts ) throws SAXException { // Create the new handler for the new node ... String decodedLocalName = DOCUMENT_VIEW_NAME_DECODER.decode(localName); current = nodeHandlerFactory.createFor(nameFor(uri, decodedLocalName), current, uuidBehavior); List<String> allTypes = new ArrayList<>(); Map<Name, String> propertiesNamesValues = new HashMap<>(); for (int i = 0; i < atts.getLength(); i++) { Name propertyName = nameFor(atts.getURI(i), DOCUMENT_VIEW_NAME_DECODER.decode(atts.getLocalName(i))); String value = atts.getValue(i); if (value != null) { propertiesNamesValues.put(propertyName, value); if (JcrLexicon.PRIMARY_TYPE.equals(propertyName) || JcrLexicon.MIXIN_TYPES.equals(propertyName)) { allTypes.add(value); } } } Map<Name, Integer> propertyTypes = new HashMap<>(); for (String typeName : allTypes) { propertyTypes.putAll(propertyTypesFor(typeName)); } for (Map.Entry<Name, String> entry : propertiesNamesValues.entrySet()) { Name propertyName = entry.getKey(); Integer propertyDefinitionType = propertyTypes.get(propertyName); int propertyType = propertyDefinitionType != null ? propertyDefinitionType : PropertyType.STRING; String value = entry.getValue(); boolean isMultiValued = value.indexOf(" ") > 0; current.addPropertyValue(propertyName, value, isMultiValued, propertyType, DOCUMENT_VIEW_VALUE_DECODER); } // Now create the node ... current.finish(); } @Override public void endElement( String uri, String localName, String name ) throws SAXException { current.finish(); current = current.parentHandler(); } @Override public void characters( char[] ch, int start, int length ) throws SAXException { String value = new String(ch, start, length); value = value.trim(); if (value.length() == 0) return; // Create a 'jcr:xmltext' child node with a single 'jcr:xmlcharacters' property ... current = nodeHandlerFactory.createFor(JcrLexicon.XMLTEXT, current, uuidBehavior); current.addPropertyValue(JcrLexicon.PRIMARY_TYPE, stringFor(JcrNtLexicon.UNSTRUCTURED), false, PropertyType.NAME, DOCUMENT_VIEW_NAME_DECODER); current.addPropertyValue(JcrLexicon.XMLCHARACTERS, value, false, PropertyType.STRING, null);// don't decode value current.finish(); // Pop the stack ... current = current.parentHandler(); } } private class ReferenceProperty { private final Name name; private final Value[] values; private final boolean singleValued; protected ReferenceProperty(Name name, Value value) { assert name != null; this.name = name; assert value != null; this.values = new Value[] {value}; this.singleValued = true; } protected ReferenceProperty( Name name, Value[] values ) { assert name != null; this.name = name; assert values != null && values.length > 0; this.values = values; this.singleValued = false; } protected int type() { return value().getType(); } protected Name name() { return name; } protected boolean isMultiple() { return !singleValued; } protected JcrValue value() { return (JcrValue)values[0]; } protected Value[] values() { return values; } } }
/******************************************************************************* * * Copyright FUJITSU LIMITED 2017 * * Author: jaeger * * Creation Date: 22.01.2009 * * Completion Time: 13.12.2011 * *******************************************************************************/ package org.oscm.configurationservice.bean; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.oscm.test.matchers.BesMatchers.hasAnnotation; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import javax.ejb.EJBException; import javax.ejb.Lock; import javax.ejb.Schedule; import javax.persistence.TypedQuery; import org.junit.Test; import org.oscm.configurationservice.local.ConfigurationServiceLocal; import org.oscm.dataservice.bean.DataServiceBean; import org.oscm.dataservice.local.DataService; import org.oscm.domobjects.ConfigurationSetting; import org.oscm.internal.types.enumtypes.ConfigurationKey; import org.oscm.internal.vo.VOConfigurationSetting; import org.oscm.test.EJBTestBase; import org.oscm.test.ejb.TestContainer; import org.oscm.test.stubs.ConfigurationServiceStub; import org.oscm.types.constants.Configuration; /** * @author jaeger */ public class ConfigurationServiceBeanIT extends EJBTestBase { private ConfigurationServiceBean confSvc; private ConfigurationServiceLocal confSvcLocal; @Override protected void setup(TestContainer container) throws Exception { container.addBean(new ConfigurationServiceStub()); container.addBean(new DataServiceBean()); container.addBean(new ConfigurationServiceBean()); confSvc = container.get(ConfigurationServiceBean.class); confSvcLocal = container.get(ConfigurationServiceLocal.class); confSvc.init(); } @Test(expected = Exception.class) public void testGetConfigurationSetting_ForNullContext() throws Exception { final ConfigurationSetting setting = new ConfigurationSetting( ConfigurationKey.BASE_URL, Configuration.GLOBAL_CONTEXT, "bla"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); confSvc.getVOConfigurationSetting(ConfigurationKey.BASE_URL, null); } @Test public void testGetConfigurationSetting_ForNonNullContext() throws Exception { final ConfigurationSetting initSetting = new ConfigurationSetting( ConfigurationKey.BASE_URL, "context2", "anotherValue"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(initSetting); return null; } }); VOConfigurationSetting setting = confSvc.getVOConfigurationSetting( ConfigurationKey.BASE_URL, "context2"); assertNotNull(setting); } @Test public void testSetConfigurationSetting_NotExistingNonNullContext() throws Exception { final ConfigurationSetting initialSetting = new ConfigurationSetting( ConfigurationKey.BASE_URL, "context3", "testValueForSet"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(initialSetting); return null; } }); VOConfigurationSetting setting = confSvc.getVOConfigurationSetting( ConfigurationKey.BASE_URL, "context3"); assertNotNull(setting); } @Test public void testSetConfigurationSetting_ExistingSettingNonNullContext() throws Exception { final ConfigurationSetting initialSetting = new ConfigurationSetting( ConfigurationKey.BASE_URL, "context", "testValueForSet"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(initialSetting); return null; } }); final ConfigurationSetting initialSetting1 = new ConfigurationSetting( ConfigurationKey.BASE_URL, "context", "testValueForSet2"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(initialSetting1); return null; } }); VOConfigurationSetting setting = confSvc.getVOConfigurationSetting( ConfigurationKey.BASE_URL, "context"); assertNotNull(setting); assertEquals("testValueForSet2", setting.getValue()); } @Test public void testSetConfigurationSetting_EmptyValueOptional() throws Exception { // must not be saved final ConfigurationSetting setting = createConfigurationSetting(" ", false); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); ConfigurationSetting read = runTX(new Callable<ConfigurationSetting>() { @Override public ConfigurationSetting call() { return confSvcLocal.getConfigurationSetting( setting.getInformationId(), setting.getContextId()); } }); // must be a fresh object created with default value assertEquals(0, read.getKey()); assertEquals(setting.getInformationId().getFallBackValue(), read.getValue()); } @Test public void testSetConfigurationSetting_NullValueOptional() throws Exception { // must not be saved final ConfigurationSetting setting = createConfigurationSetting(null, false); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); ConfigurationSetting read = runTX(new Callable<ConfigurationSetting>() { @Override public ConfigurationSetting call() { return confSvcLocal.getConfigurationSetting( setting.getInformationId(), setting.getContextId()); } }); // must be a fresh object created with default value assertEquals(0, read.getKey()); assertEquals(setting.getInformationId().getFallBackValue(), read.getValue()); } @Test public void testSetConfigurationSetting_DeleteOptional() throws Exception { // first save it final ConfigurationSetting setting = createConfigurationSetting("test", false); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); final ConfigurationSetting read = runTX( new Callable<ConfigurationSetting>() { @Override public ConfigurationSetting call() { return confSvcLocal.getConfigurationSetting( setting.getInformationId(), setting.getContextId()); } }); assertEquals(setting.getInformationId(), read.getInformationId()); assertEquals(setting.getValue(), read.getValue()); // and now delete it by setting value to empty string read.setValue(" "); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(read); return null; } }); // now check that the default value is used again ConfigurationSetting read1 = runTX( new Callable<ConfigurationSetting>() { @Override public ConfigurationSetting call() { return confSvcLocal.getConfigurationSetting( setting.getInformationId(), setting.getContextId()); } }); // must be a fresh object created with default value assertEquals(0, read1.getKey()); assertEquals(setting.getInformationId().getFallBackValue(), read1.getValue()); } @Test public void testGetConfigurationSettings_OneHit() throws Exception { runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting( new ConfigurationSetting(ConfigurationKey.BASE_URL, Configuration.GLOBAL_CONTEXT, "initialValue")); return null; } }); List<ConfigurationSetting> result = runTX( new Callable<List<ConfigurationSetting>>() { @Override public List<ConfigurationSetting> call() { return confSvcLocal.getAllConfigurationSettings(); } }); assertNotNull(result); assertEquals(1, result.size()); ConfigurationSetting entry = result.get(0); assertEquals(ConfigurationKey.BASE_URL, entry.getInformationId()); assertEquals("initialValue", entry.getValue()); } @Test public void testGetConfigurationSettings_NoHits() throws Exception { List<ConfigurationSetting> result = runTX( new Callable<List<ConfigurationSetting>>() { @Override public List<ConfigurationSetting> call() { return confSvcLocal.getAllConfigurationSettings(); } }); assertNotNull(result); assertTrue(result.isEmpty()); } @Test(expected = EJBException.class) public void testGetConfigurationSetting_NoHitsException() throws Exception { final ConfigurationSetting setting = createConfigurationSetting("test", true); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); // BASE_URL_HTTPS not set, but mandatory confSvcLocal.getConfigurationSetting(ConfigurationKey.BASE_URL_HTTPS, ""); } @Test public void testGetConfigurationSetting_NoHitsUseGlobalContext() throws Exception { String value = "test"; final ConfigurationSetting setting = createConfigurationSetting(value, false); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(setting); return null; } }); // empty context id, global context will be used assertEquals(value, confSvcLocal .getConfigurationSetting(ConfigurationKey.LOG_LEVEL, "") .getValue()); } @Test public void testGetNodeName() { assertEquals("SingleNode", confSvcLocal.getNodeName()); System.setProperty("bss.nodename", "local"); assertEquals("local", confSvcLocal.getNodeName()); } @Test public void testGetConfigurationSettings_MultipleHits() throws Exception { runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting( new ConfigurationSetting(ConfigurationKey.BASE_URL, Configuration.GLOBAL_CONTEXT, "initialValue")); return null; } }); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(new ConfigurationSetting( ConfigurationKey.HIDDEN_UI_ELEMENTS, Configuration.GLOBAL_CONTEXT, "initialValue2")); return null; } }); List<ConfigurationSetting> result = runTX( new Callable<List<ConfigurationSetting>>() { @Override public List<ConfigurationSetting> call() { return confSvcLocal.getAllConfigurationSettings(); } }); assertNotNull(result); assertEquals(2, result.size()); ConfigurationSetting entry = result.get(0); assertEquals(ConfigurationKey.BASE_URL, entry.getInformationId()); assertEquals("initialValue", entry.getValue()); entry = result.get(1); assertEquals(ConfigurationKey.HIDDEN_UI_ELEMENTS, entry.getInformationId()); assertEquals("initialValue2", entry.getValue()); } private static ConfigurationSetting createConfigurationSetting(String value, boolean mandatory) { ConfigurationKey key; if (mandatory) { key = ConfigurationKey.LOG_FILE_PATH; } else { key = ConfigurationKey.LOG_LEVEL; } return new ConfigurationSetting(key, Configuration.GLOBAL_CONTEXT, value); } @Test public void init() { // given ConfigurationServiceBean service = spy(new ConfigurationServiceBean()); service.dm = mock(DataService.class); doReturn(mock(TypedQuery.class)).when(service.dm) .createNamedQuery(anyString(), eq(ConfigurationSetting.class)); // when service.init(); // then verify(service, times(1)).refreshCache(); } @Test public void refreshCache() { // when ConfigurationServiceBean service = spy(new ConfigurationServiceBean()); doReturn(givenConfigurationSettings()).when(service) .getAllConfigurationSettings(); // given service.refreshCache(); // then service.cache.containsKey(ConfigurationKey.BASE_URL); } private List<ConfigurationSetting> givenConfigurationSettings() { List<ConfigurationSetting> result = new ArrayList<>(); result.add(new ConfigurationSetting(ConfigurationKey.BASE_URL, "aContext", "aValue")); return result; } /** * Verify if the cache is refreshed every 10 minutes and the method is * locked properly. */ @Test public void refreshCache_checkAnnotations() throws Exception { // given Method method = ConfigurationServiceBean.class .getMethod("refreshCache"); List<Annotation> annotations = givenRefreshCacheAnnotations(); // when assertThat(method, hasAnnotation(annotations)); // then no exception } private List<Annotation> givenRefreshCacheAnnotations() { List<Annotation> result = new ArrayList<>(); Annotation schedule = mock(Annotation.class); doReturn(Schedule.class).when(schedule).annotationType(); doReturn("minute = \"*/10\"").when(schedule).toString(); result.add(schedule); result.add(createLockAnnotation("LockType.WRITE")); return result; } private Annotation createLockAnnotation(String type) { Annotation lock = mock(Annotation.class); doReturn(Lock.class).when(lock).annotationType(); doReturn(type).when(lock).toString(); return lock; } @Test public void setConfigurationSetting_refreshCacheCalled() throws Exception { // given ConfigurationServiceBean service = spy(new ConfigurationServiceBean()); service.dm = mock(DataService.class); doReturn(mock(TypedQuery.class)).when(service.dm) .createNamedQuery(anyString(), eq(ConfigurationSetting.class)); // when service.setConfigurationSetting(new ConfigurationSetting()); // then verify(service, times(1)).refreshCache(); } @Test public void setConfigurationSetting_writeLockSet() throws Exception { // given Method method = ConfigurationServiceBean.class.getMethod( "setConfigurationSetting", ConfigurationSetting.class); List<Annotation> annotations = givenSetConfigurationSettingAnnotations(); // when assertThat(method, hasAnnotation(annotations)); // then no exception } private List<Annotation> givenSetConfigurationSettingAnnotations() { return Arrays.asList(createLockAnnotation("LockType.WRITE")); } @Test public void ConfigurationServiceBean_readLockSet() { // given List<Annotation> annotations = Arrays .asList(createLockAnnotation("LockType.READ")); // when assertThat(ConfigurationServiceBean.class, hasAnnotation(annotations)); // then no exception } @Test public void ConfigurationServiceBean_readLockSet1() { // given List<Annotation> annotations = Arrays .asList(createLockAnnotation("LockType.READ")); // when assertThat(ConfigurationServiceBean.class, hasAnnotation(annotations)); // then no exception } @Test public void ConfigurationServiceBean_getBaseUrl_EmptyBaseUrl() throws Exception { // given final ConfigurationSetting settingHttp = new ConfigurationSetting( ConfigurationKey.BASE_URL, Configuration.GLOBAL_CONTEXT, ""); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(settingHttp); return null; } }); final ConfigurationSetting settingHttps = new ConfigurationSetting( ConfigurationKey.BASE_URL_HTTPS, Configuration.GLOBAL_CONTEXT, "initialValue"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(settingHttps); return null; } }); // when String result = confSvc.getBaseURL(); // then no exception assertEquals("initialValue", result); } @Test public void ConfigurationServiceBean_getBaseUrl_NullBaseUrl() throws Exception { // given final ConfigurationSetting settingHttp = new ConfigurationSetting( ConfigurationKey.BASE_URL, Configuration.GLOBAL_CONTEXT, null); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(settingHttp); return null; } }); final ConfigurationSetting settingHttps = new ConfigurationSetting( ConfigurationKey.BASE_URL_HTTPS, Configuration.GLOBAL_CONTEXT, "initialValue"); runTX(new Callable<Void>() { @Override public Void call() throws Exception { confSvcLocal.setConfigurationSetting(settingHttps); return null; } }); // when String result = confSvc.getBaseURL(); // then no exception assertEquals("initialValue", result); } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.editors.documentation.common; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.inject.Inject; import com.ait.lienzo.client.core.shape.Viewport; import com.ait.lienzo.shared.core.types.DataURLType; import org.kie.workbench.common.dmn.api.definition.HasExpression; import org.kie.workbench.common.dmn.api.definition.HasName; import org.kie.workbench.common.dmn.api.definition.HasVariable; import org.kie.workbench.common.dmn.api.definition.model.DRGElement; import org.kie.workbench.common.dmn.api.property.dmn.DMNExternalLink; import org.kie.workbench.common.dmn.api.property.dmn.QName; import org.kie.workbench.common.dmn.client.common.BoxedExpressionHelper; import org.kie.workbench.common.dmn.client.editors.expressions.ExpressionContainerGrid; import org.kie.workbench.common.dmn.client.editors.expressions.ExpressionEditorView; import org.kie.workbench.common.dmn.client.editors.expressions.ExpressionEditorViewImpl; import org.kie.workbench.common.dmn.client.session.DMNSession; import org.kie.workbench.common.stunner.core.client.api.SessionManager; import org.kie.workbench.common.stunner.core.diagram.Diagram; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Graph; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.content.view.View; public class DMNDocumentationDRDsFactory { static final String NONE = ""; private final SessionManager sessionManager; private final BoxedExpressionHelper expressionHelper; @Inject public DMNDocumentationDRDsFactory(final SessionManager sessionManager, final BoxedExpressionHelper expressionHelper) { this.sessionManager = sessionManager; this.expressionHelper = expressionHelper; } public List<DMNDocumentationDRD> create(final Diagram diagram) { final Optional<String> previousNodeUUID = getExpressionContainerGrid().getNodeUUID(); final List<DMNDocumentationDRD> drds = createDMNDocumentationDRDs(diagram); previousNodeUUID.ifPresent(uuid -> setExpressionContainerGrid(diagram, uuid)); return drds; } String getNodeImage(final Diagram diagram, final Node<View, Edge> node) { if (!hasExpression(node)) { return NONE; } setExpressionContainerGrid(diagram, node.getUUID()); final ExpressionContainerGrid grid = getExpressionContainerGrid(); final Viewport viewport = grid.getViewport(); final int padding = 10; final int wide = (int) (grid.getWidth() + padding); final int high = (int) (grid.getHeight() + padding); viewport.setPixelSize(wide, high); return viewport.toDataURL(DataURLType.PNG); } void clearSelections(final ExpressionContainerGrid grid) { grid.getBaseExpressionGrid().ifPresent(expressionGrid -> { expressionGrid.getModel().clearSelections(); expressionGrid.draw(); }); } void setExpressionContainerGrid(final Diagram diagram, final String uuid) { final Node<View, Edge> node = getNode(diagram, uuid); final Object definition = expressionHelper.getDefinition(node); final HasExpression hasExpression = expressionHelper.getHasExpression(node); final Optional<HasName> hasName = Optional.of((HasName) definition); final ExpressionContainerGrid grid = getExpressionContainerGrid(); grid.setExpression(node.getUUID(), hasExpression, hasName, false); clearSelections(grid); } private List<DMNDocumentationDRD> createDMNDocumentationDRDs(final Diagram diagram) { final List<DMNDocumentationDRD> dmnDocumentationDRDS = new ArrayList<>(); getNodeStream(diagram).forEach(node -> { final Object definition = expressionHelper.getDefinition(node); if (definition instanceof DRGElement) { final DRGElement drgElement = (DRGElement) definition; dmnDocumentationDRDS.add(createDMNDocumentationDRD(diagram, node, drgElement)); } }); return dmnDocumentationDRDS; } private DMNDocumentationDRD createDMNDocumentationDRD(final Diagram diagram, final Node<View, Edge> node, final DRGElement drgElement) { final String name = getName(drgElement); final String description = getDescription(drgElement); final String type = getType(drgElement); final String image = getNodeImage(diagram, node); final List<DMNDocumentationExternalLink> externalLinks = getExternalLinks(drgElement); return DMNDocumentationDRD.create(name, type, description, image, externalLinks, !externalLinks.isEmpty()); } private List<DMNDocumentationExternalLink> getExternalLinks(final DRGElement drgElement) { final List<DMNDocumentationExternalLink> list = new ArrayList<>(); if (!Objects.isNull(drgElement.getLinksHolder()) && !Objects.isNull(drgElement.getLinksHolder().getValue())) { for (final DMNExternalLink link : drgElement.getLinksHolder().getValue().getLinks()) { list.add(DMNDocumentationExternalLink.create(link.getDescription(), link.getUrl())); } } return list; } private String getType(final DRGElement drgElement) { if (drgElement instanceof HasVariable) { return getType(((HasVariable) drgElement).getVariable().getTypeRef()); } return NONE; } private String getType(final QName qName) { return Optional .ofNullable(qName) .map(QName::getLocalPart) .orElse(NONE); } private String getName(final DRGElement drgElement) { return drgElement.getName().getValue(); } private String getDescription(final DRGElement drgElement) { return drgElement.getDescription().getValue(); } private ExpressionContainerGrid getExpressionContainerGrid() { final ExpressionEditorView.Presenter expressionEditor = getCurrentSession().getExpressionEditor(); return ((ExpressionEditorViewImpl) expressionEditor.getView()).getExpressionContainerGrid(); } private boolean hasExpression(final Node<View, Edge> node) { return expressionHelper.getOptionalHasExpression(node).isPresent(); } private DMNSession getCurrentSession() { return sessionManager.getCurrentSession(); } private Node<View, Edge> getNode(final Diagram diagram, final String uuid) { return getNodeStream(diagram) .filter(node -> Objects.equals(uuid, node.getUUID())) .findFirst() .orElseThrow(UnsupportedOperationException::new); } @SuppressWarnings("unchecked") private Stream<Node<View, Edge>> getNodeStream(final Diagram diagram) { final Graph graph = diagram.getGraph(); final Iterable<Node> nodes = graph.nodes(); return StreamSupport .stream(nodes.spliterator(), false) .map(node -> (Node<View, Edge>) node); } }
package edu.harvard.iq.dataverse.api; import com.google.common.base.Stopwatch; import static com.jayway.restassured.RestAssured.given; import com.jayway.restassured.http.ContentType; import com.jayway.restassured.internal.path.xml.NodeChildrenImpl; import com.jayway.restassured.path.json.JsonPath; import static com.jayway.restassured.path.json.JsonPath.with; import com.jayway.restassured.path.xml.XmlPath; import static com.jayway.restassured.path.xml.XmlPath.from; import com.jayway.restassured.response.Response; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SearchFields; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import static java.lang.Thread.sleep; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import junit.framework.Assert; import static junit.framework.Assert.assertEquals; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; public class SearchIT { private static final Logger logger = Logger.getLogger(SearchIT.class.getCanonicalName()); private static final String builtinUserKey = "burrito"; private static final String keyString = "X-Dataverse-key"; private static final String EMPTY_STRING = ""; private static final String idKey = "id"; private static final String apiTokenKey = "apiToken"; private static final String usernameKey = "userName"; private static final String emailKey = "email"; private static TestUser homer; private static TestUser ned; private static TestUser clancy; private static final String categoryTestDataverse = "categoryTestDataverse"; private static final String dvForPermsTesting = "dvForPermsTesting"; private static String dataset1; private static String dataset2; private static String dataset3; private static Integer dataset2Id; private static Integer dataset3Id; private static long nedAdminOnRootAssignment; private static String dataverseToCreateDataset1In = "root"; /** * @todo Figure out why we sometimes get database deadlocks when all tests * are enabled: https://github.com/IQSS/dataverse/issues/2460 */ private static final boolean disableTestPermsonRootDv = false; private static final boolean disableTestPermsOnNewDv = false; private static final boolean homerPublishesVersion2AfterDeletingFile = false; private static final boolean disableTestCategory = false; private Stopwatch timer; private boolean haveToUseCurlForUpload = true; public SearchIT() { } @BeforeClass public static void setUpClass() { boolean enabled = true; if (!enabled) { return; } logger.info("Running setup..."); JsonObject homerJsonObject = createUser(getUserAsJsonString("homer", "Homer", "Simpson")); homer = new TestUser(homerJsonObject); int homerIdFromDatabase = getUserIdFromDatabase(homer.getUsername()); if (homerIdFromDatabase != homer.getId()) { // should never reach here: https://github.com/IQSS/dataverse/issues/2418 homer.setId(homerIdFromDatabase); } Response makeSuperUserResponse = makeSuperuser(homer.getUsername()); assertEquals(200, makeSuperUserResponse.getStatusCode()); JsonObject nedJsonObject = createUser(getUserAsJsonString("ned", "Ned", "Flanders")); ned = new TestUser(nedJsonObject); int nedIdFromDatabase = getUserIdFromDatabase(ned.getUsername()); if (nedIdFromDatabase != ned.getId()) { // should never reach here: https://github.com/IQSS/dataverse/issues/2418 ned.setId(nedIdFromDatabase); } JsonObject clancyJsonObject = createUser(getUserAsJsonString("clancy", "Clancy", "Wiggum")); clancy = new TestUser(clancyJsonObject); int clancyIdFromDatabase = getUserIdFromDatabase(clancy.getUsername()); if (clancyIdFromDatabase != clancy.getId()) { // should never reach here: https://github.com/IQSS/dataverse/issues/2418 clancy.setId(clancyIdFromDatabase); } } @Test public void homerGivesNedPermissionAtRoot() { if (disableTestPermsonRootDv) { return; } Response enableNonPublicSearch = enableSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, enableNonPublicSearch.getStatusCode()); long rootDataverseId = 1; String rootDataverseAlias = getDataverseAlias(rootDataverseId, homer.getApiToken()); if (rootDataverseAlias != null) { dataverseToCreateDataset1In = rootDataverseAlias; } String xmlIn = getDatasetXml(homer.getUsername(), homer.getUsername(), homer.getUsername()); Response createDataset1Response = createDataset(xmlIn, dataverseToCreateDataset1In, homer.getApiToken()); // System.out.println(createDataset1Response.prettyPrint()); assertEquals(201, createDataset1Response.getStatusCode()); dataset1 = getGlobalId(createDataset1Response); // String zipFileName = "1000files.zip"; String zipFileName = "trees.zip"; if (haveToUseCurlForUpload) { Process uploadZipFileProcess = uploadZipFileWithCurl(dataset1, zipFileName, homer.getApiToken()); // printCommandOutput(uploadZipFileProcess); } else { try { Response uploadZipFileResponse = uploadZipFile(dataset1, zipFileName, homer.getApiToken()); } catch (FileNotFoundException ex) { System.out.println("Problem uploading " + zipFileName + ": " + ex.getMessage()); } } Integer idHomerFound = printDatasetId(dataset1, homer); assertEquals(true, idHomerFound != null); Integer idNedFoundBeforeBecomingAdmin = printDatasetId(dataset1, ned); String roleToAssign = "admin"; assertEquals(null, idNedFoundBeforeBecomingAdmin); timer = Stopwatch.createStarted(); Response grantNedAdminOnRoot = grantRole(dataverseToCreateDataset1In, roleToAssign, ned.getUsername(), homer.getApiToken()); // System.out.println(grantNedAdminOnRoot.prettyPrint()); System.out.println("Method took: " + timer.stop()); assertEquals(200, grantNedAdminOnRoot.getStatusCode()); Integer idNedFoundAfterBecomingAdmin = printDatasetId(dataset1, ned); // Response contentDocResponse = querySolr("entityId:" + idHomerFound); // System.out.println(contentDocResponse.prettyPrint()); // Response permDocResponse = querySolr("definitionPointDvObjectId:" + idHomerFound); // System.out.println(idHomerFound + " was found by homer (user id " + homer.getId() + ")"); // System.out.println(idNedFoundAfterBecomingAdmin + " was found by ned (user id " + ned.getId() + ")"); assertEquals(idHomerFound, idNedFoundAfterBecomingAdmin); nedAdminOnRootAssignment = getRoleAssignmentId(grantNedAdminOnRoot); timer = Stopwatch.createStarted(); Response revokeNedAdminOnRoot = revokeRole(dataverseToCreateDataset1In, nedAdminOnRootAssignment, homer.getApiToken()); // System.out.println(revokeNedAdminOnRoot.prettyPrint()); System.out.println("Method took: " + timer.stop()); assertEquals(200, revokeNedAdminOnRoot.getStatusCode()); Integer idNedFoundAfterNoLongerAdmin = printDatasetId(dataset1, ned); assertEquals(null, idNedFoundAfterNoLongerAdmin); Response disableNonPublicSearch = deleteSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, disableNonPublicSearch.getStatusCode()); } @Test public void homerGivesNedPermissionAtNewDv() { if (disableTestPermsOnNewDv) { return; } Response enableNonPublicSearch = enableSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, enableNonPublicSearch.getStatusCode()); TestDataverse dataverseToCreate = new TestDataverse(dvForPermsTesting, dvForPermsTesting, Dataverse.DataverseType.ORGANIZATIONS_INSTITUTIONS); Response createDvResponse = createDataverse(dataverseToCreate, homer); assertEquals(201, createDvResponse.getStatusCode()); String xmlIn = getDatasetXml(homer.getUsername(), homer.getUsername(), homer.getUsername()); Response createDataset1Response = createDataset(xmlIn, dvForPermsTesting, homer.getApiToken()); assertEquals(201, createDataset1Response.getStatusCode()); dataset2 = getGlobalId(createDataset1Response); Integer datasetIdHomerFound = printDatasetId(dataset2, homer); assertEquals(true, datasetIdHomerFound != null); dataset2Id = datasetIdHomerFound; Map<String, String> datasetTimestampsAfterCreate = checkPermissionsOnDvObject(datasetIdHomerFound, homer.apiToken).jsonPath().getMap("data.timestamps", String.class, String.class); assertEquals(true, datasetTimestampsAfterCreate.get(Index.contentChanged) != null); assertEquals(true, datasetTimestampsAfterCreate.get(Index.contentIndexed) != null); assertEquals(true, datasetTimestampsAfterCreate.get(Index.permsChanged) != null); assertEquals(true, datasetTimestampsAfterCreate.get(Index.permsIndexed) != null); // String zipFileName = "noSuchFile.zip"; String zipFileName = "trees.zip"; // String zipFileName = "100files.zip"; // String zipFileName = "1000files.zip"; timer = Stopwatch.createStarted(); if (haveToUseCurlForUpload) { Process uploadZipFileProcess = uploadZipFileWithCurl(dataset2, zipFileName, homer.getApiToken()); // printCommandOutput(uploadZipFileProcess); } else { Response uploadZipFileResponse; try { uploadZipFileResponse = uploadZipFile(dataset2, zipFileName, homer.getApiToken()); } catch (FileNotFoundException ex) { System.out.println("Problem uploading " + zipFileName + ": " + ex.getMessage()); } } System.out.println("Uploading zip file took " + timer.stop()); List<Integer> idsOfFilesUploaded = getIdsOfFilesUploaded(dataset2, datasetIdHomerFound, homer.getApiToken()); int numFilesFound = idsOfFilesUploaded.size(); System.out.println("num files found: " + numFilesFound); Integer idNedFoundBeforeRoleGranted = printDatasetId(dataset2, ned); assertEquals(null, idNedFoundBeforeRoleGranted); String roleToAssign = "admin"; timer = Stopwatch.createStarted(); Response grantNedAdmin = grantRole(dvForPermsTesting, roleToAssign, ned.getUsername(), homer.getApiToken()); // System.out.println(grantNedAdmin.prettyPrint()); System.out.println("granting role took " + timer.stop()); assertEquals(200, grantNedAdmin.getStatusCode()); Integer idNedFoundAfterRoleGranted = printDatasetId(dataset2, ned); assertEquals(datasetIdHomerFound, idNedFoundAfterRoleGranted); clearIndexTimesOnDvObject(datasetIdHomerFound); reindexDataset(datasetIdHomerFound); Map<String, String> datasetTimestampsAfterReindex = checkPermissionsOnDvObject(datasetIdHomerFound, homer.apiToken).jsonPath().getMap("data.timestamps", String.class, String.class); assertEquals(true, datasetTimestampsAfterReindex.get(Index.contentChanged) != null); assertEquals(true, datasetTimestampsAfterReindex.get(Index.contentIndexed) != null); assertEquals(true, datasetTimestampsAfterReindex.get(Index.permsChanged) != null); assertEquals(true, datasetTimestampsAfterReindex.get(Index.permsIndexed) != null); if (!idsOfFilesUploaded.isEmpty()) { Random random = new Random(); int randomFileIndex = random.nextInt(numFilesFound); System.out.println("picking random file with index of " + randomFileIndex + " from list of " + numFilesFound); int randomFileId = idsOfFilesUploaded.get(randomFileIndex); Set<String> expectedSet = new HashSet<>(); expectedSet.add(IndexServiceBean.getGroupPerUserPrefix() + homer.getId()); expectedSet.add(IndexServiceBean.getGroupPerUserPrefix() + ned.getId()); Response checkPermsReponse = checkPermissionsOnDvObject(randomFileId, homer.getApiToken()); // checkPermsReponse.prettyPrint(); // [0] because there's only one "permissions" Solr doc (a draft) List<String> permListFromDebugEndpoint = JsonPath.from(checkPermsReponse.getBody().asString()).get("data.perms[0]." + SearchFields.DISCOVERABLE_BY); Set<String> setFoundFromPermsDebug = new TreeSet<>(); for (String perm : permListFromDebugEndpoint) { setFoundFromPermsDebug.add(perm); } Map<String, String> timeStamps = JsonPath.from(checkPermsReponse.getBody().asString()).get("data.timestamps"); for (Map.Entry<String, String> entry : timeStamps.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); System.out.println(key + ":" + value); } assertEquals(expectedSet, setFoundFromPermsDebug); Response solrQueryPerms = querySolr(SearchFields.DEFINITION_POINT_DVOBJECT_ID + ":" + randomFileId); // solrQueryPerms.prettyPrint(); Set<String> setFoundFromSolr = new TreeSet<>(); List<String> perms = JsonPath.from(solrQueryPerms.getBody().asString()).getList("response.docs[0]." + SearchFields.DISCOVERABLE_BY); for (String perm : perms) { setFoundFromSolr.add(perm); } // System.out.println(setFoundFromSolr + " found"); assertEquals(expectedSet, setFoundFromSolr); Response solrQueryContent = querySolr(SearchFields.ENTITY_ID + ":" + randomFileId); // solrQueryContent.prettyPrint(); } long rootDataverseId = 1; String rootDataverseAlias = getDataverseAlias(rootDataverseId, homer.getApiToken()); Response publishRootDataverseResponse = publishDataverseAsCreator(rootDataverseId); // publishRootDataverseResponse.prettyPrint(); Response publishDataverseResponse = publishDataverse(dvForPermsTesting, homer.apiToken); // publishDataverseResponse.prettyPrint(); Response publishDatasetResponse = publishDatasetViaNative(datasetIdHomerFound, homer.apiToken); // publishDatasetResponse.prettyPrint(); Integer idClancyFoundAfterPublished = printDatasetId(dataset2, clancy); assertEquals(datasetIdHomerFound, idClancyFoundAfterPublished); if (!idsOfFilesUploaded.isEmpty()) { Random random = new Random(); int randomFileIndex = random.nextInt(numFilesFound); System.out.println("picking random file with index of " + randomFileIndex + " from list of " + numFilesFound); int randomFileId = idsOfFilesUploaded.get(randomFileIndex); Set<String> expectedSet = new HashSet<>(); expectedSet.add(IndexServiceBean.getPublicGroupString()); Response checkPermsReponse = checkPermissionsOnDvObject(randomFileId, homer.getApiToken()); // checkPermsReponse.prettyPrint(); // [0] because there's only one "permissions" Solr doc (a published file) List<String> permListFromDebugEndpoint = JsonPath.from(checkPermsReponse.getBody().asString()).get("data.perms[0]." + SearchFields.DISCOVERABLE_BY); Set<String> setFoundFromPermsDebug = new TreeSet<>(); for (String perm : permListFromDebugEndpoint) { setFoundFromPermsDebug.add(perm); } assertEquals(expectedSet, setFoundFromPermsDebug); Response solrQueryPerms = querySolr(SearchFields.DEFINITION_POINT_DVOBJECT_ID + ":" + randomFileId); // solrQueryPerms.prettyPrint(); Set<String> setFoundFromSolr = new TreeSet<>(); String publishedId = IndexServiceBean.solrDocIdentifierFile + randomFileId + IndexServiceBean.discoverabilityPermissionSuffix; List<Map> docs = with(solrQueryPerms.getBody().asString()).param("name", publishedId).get("response.docs.findAll { docs -> docs.id == name }"); List<String> permsPublished = with(solrQueryPerms.getBody().asString()).param("name", publishedId).getList("response.docs.findAll { docs -> docs.id == name }[0]." + SearchFields.DISCOVERABLE_BY); for (String perm : permsPublished) { setFoundFromSolr.add(perm); } assertEquals(expectedSet, setFoundFromSolr); String draftId = IndexServiceBean.solrDocIdentifierFile + randomFileId + IndexServiceBean.draftSuffix + IndexServiceBean.discoverabilityPermissionSuffix; /** * @todo The fact that we're able to find the permissions document * for a file that has been published is a bug. It should be * deleted, ideally, when the dataset goes from draft to published. */ List<String> permsFormerDraft = with(solrQueryPerms.getBody().asString()).param("name", draftId).getList("response.docs.findAll { docs -> docs.id == name }[0]." + SearchFields.DISCOVERABLE_BY); // System.out.println("permsDraft: " + permsFormerDraft); Response solrQueryContent = querySolr(SearchFields.ENTITY_ID + ":" + randomFileId); // solrQueryContent.prettyPrint(); } Response disableNonPublicSearch = deleteSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, disableNonPublicSearch.getStatusCode()); } @Test public void homerPublishesVersion2AfterDeletingFile() throws InterruptedException { if (homerPublishesVersion2AfterDeletingFile) { return; } Response enableNonPublicSearch = enableSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, enableNonPublicSearch.getStatusCode()); long rootDataverseId = 1; String rootDataverseAlias = getDataverseAlias(rootDataverseId, homer.getApiToken()); if (rootDataverseAlias != null) { dataverseToCreateDataset1In = rootDataverseAlias; } String xmlIn = getDatasetXml(homer.getUsername(), homer.getUsername(), homer.getUsername()); Response createDatasetResponse = createDataset(xmlIn, dataverseToCreateDataset1In, homer.getApiToken()); // createDatasetResponse.prettyPrint(); assertEquals(201, createDatasetResponse.getStatusCode()); dataset3 = getGlobalId(createDatasetResponse); // System.out.println("dataset persistent id: " + dataset3); String zipFileName = "3files.zip"; Process uploadZipFileProcess = uploadZipFileWithCurl(dataset3, zipFileName, homer.getApiToken()); // printCommandOutput(uploadZipFileProcess); sleep(200); Integer datasetIdHomerFound = printDatasetId(dataset3, homer); assertEquals(true, datasetIdHomerFound != null); dataset3Id = datasetIdHomerFound; List<Integer> idsOfFilesUploaded = getIdsOfFilesUploaded(dataset3, datasetIdHomerFound, homer.getApiToken()); // System.out.println("file IDs: " + idsOfFilesUploaded); Set<String> expectedInitialFilesHomer = new HashSet<String>() { { add("file1.txt"); add("file2.txt"); add("file3.txt"); } }; String DRAFT = "DRAFT"; Response fileDataBeforePublishingV1Homer = getFileSearchData(dataset3, DRAFT, homer.getApiToken()); // System.out.println("Files before publishing 1.0 as seen by creator..."); // fileDataBeforePublishingV1Homer.prettyPrint(); Set<String> actualInitialFilesHomer = getFileData(fileDataBeforePublishingV1Homer); assertEquals(expectedInitialFilesHomer, actualInitialFilesHomer); // System.out.println("Files before publishing 1.0 as seen by non-creator..."); Response fileDataBeforePublishingV1Ned = getFileSearchData(dataset3, DRAFT, ned.getApiToken()); // fileDataBeforePublishingV1Ned.prettyPrint(); Set<String> actualInitialFilesed = getFileData(fileDataBeforePublishingV1Ned); assertEquals(new HashSet<String>(), actualInitialFilesed); Response publishDatasetResponse = publishDatasetViaSword(dataset3, homer.getApiToken()); // publishDatasetResponse.prettyPrint(); Response datasetAsJson = getDatasetAsJson(dataset3Id, homer.getApiToken()); // datasetAsJson.prettyPrint(); // Response fileDataAfterPublishingV1Ned = getFileSearchData(dataset3, ned.getApiToken()); Response fileDataAfterPublishingV1Guest = getFileSearchData(dataset3, DRAFT, EMPTY_STRING); // System.out.println("Files after publishing 1.0 as seen by non-creator..."); // fileDataAfterPublishingV1Guest.prettyPrint(); Set<String> actualFilesAfterPublishingV1Guest = getFileData(fileDataAfterPublishingV1Guest); assertEquals(expectedInitialFilesHomer, actualFilesAfterPublishingV1Guest); // getSwordStatement(dataset3, homer.getApiToken()).prettyPrint(); // List<String> getfiles = getFileNameFromSearchDebug(dataset3, homer.getApiToken()); // System.out.println("some files: " + getfiles); Response datasetFiles = getDatasetFilesEndpoint(dataset3Id, homer.getApiToken()); // datasetFiles.prettyPrint(); String fileToDelete = "file2.txt"; // getSwordStatement(dataset3, homer.getApiToken()).prettyPrint(); // System.out.println("### BEFORE TOUCHING PUBLISHED DATASET"); Response atomEntryBeforeDeleteReponse = getSwordAtomEntry(dataset3, homer.getApiToken()); // atomEntryBeforeDeleteReponse.prettyPrint(); /** * @todo The "SWORD: deleting a file from a published version (not a * draft) creates a draft but doesn't delete the file" bug at * https://github.com/IQSS/dataverse/issues/2464 means we must first * create a draft via the "update metadata" endpoint before deleting the * file. Otherwise, the file won't be properly deleted! */ System.out.println("Updating metadata before delete because of https://github.com/IQSS/dataverse/issues/2464"); Response updateMetadataResponse = updateDatasetMetadataViaSword(dataset3, xmlIn, homer.getApiToken()); // updateMetadataResponse.prettyPrint(); // System.out.println("### AFTER UPDATING METADATA"); Response atomEntryAfterDeleteReponse = getSwordAtomEntry(dataset3, homer.getApiToken()); // atomEntryAfterDeleteReponse.prettyPrint(); int fileId = getFileIdFromDatasetEndpointFileListing(datasetFiles, fileToDelete); Response deleteFileResponse = deleteFile(fileId, homer.getApiToken()); // deleteFileResponse.prettyPrint(); assertEquals(204, deleteFileResponse.statusCode()); // System.out.println("### AFTER DELETING FILE"); Response swordStatementAfterDelete = getSwordStatement(dataset3, homer.getApiToken()); // swordStatementAfterDelete.prettyPrint(); XmlPath xmlPath = new XmlPath(swordStatementAfterDelete.body().asString()); String firstFileName = xmlPath.get("feed.entry[0].id").toString().split("/")[11]; // System.out.println("first file name:" + firstFileName); String secondFileName = xmlPath.get("feed.entry[1].id").toString().split("/")[11]; // System.out.println("second file name: " + secondFileName); Set<String> filesFoundInSwordStatement = new HashSet<>(); filesFoundInSwordStatement.add(firstFileName); filesFoundInSwordStatement.add(secondFileName); Set<String> expectedFilesInSwordStatementAfterDelete = new HashSet<String>() { { add("file1.txt"); add("file3.txt"); } }; assertEquals(expectedFilesInSwordStatementAfterDelete, filesFoundInSwordStatement); NodeChildrenImpl thirdFileNode = xmlPath.get("feed.entry[2].id"); /** * If you get "java.lang.String cannot be cast to * com.jayway.restassured.internal.path.xml.NodeChildrenImpl" here it * means that the third file was found and not deleted! See the note * above about https://github.com/IQSS/dataverse/issues/2464 */ assertEquals(true, thirdFileNode.isEmpty()); Set<String> expectedV1FilesAfterDeleteGuest = new HashSet<String>() { { add("file1.txt"); add("file2.txt"); add("file3.txt"); } }; String v1dot0 = "1.0"; Response fileDataAfterDelete = getFileSearchData(dataset3, v1dot0, EMPTY_STRING); // System.out.println("Files guest sees after Homer deletes a file from 1.0, creating a draft..."); // fileDataAfterDelete.prettyPrint(); Set<String> actualFilesAfterDelete = getFileData(fileDataAfterDelete); assertEquals(expectedV1FilesAfterDeleteGuest, actualFilesAfterDelete); Set<String> expectedDraftFilesAfterDeleteHomerAfterIssue2455Implemented = expectedFilesInSwordStatementAfterDelete; Response fileDataAfterDeleteHomer = getFileSearchData(dataset3, DRAFT, homer.getApiToken()); // System.out.println("Files Homer sees in draft after deleting a file from v1.0..."); // fileDataAfterDeleteHomer.prettyPrint(); Set<String> actualDraftFilesAfterDeleteHomer = getFileData(fileDataAfterDeleteHomer); Response querySolrResponse = querySolr(SearchFields.PARENT_ID + ":" + dataset3Id); // querySolrResponse.prettyPrint(); logger.info("files found: " + JsonPath.from(querySolrResponse.asString()).get("response.docs.name").toString()); /** * @todo In order for this test to pass we'll probably need to change * the indexing rules defined in "Only show draft file card if file has * changed from published version" * https://github.com/IQSS/dataverse/issues/528 . From the "Use Solr for * file listing on dataset page" issue at * https://github.com/IQSS/dataverse/issues/2455 we'd like Homer to be * able to look at a post v1 draft and see that one of his three files * has been deleted in that draft. With current indexing rules, this is * not possible. There are only three files indexed into Solr and they * all belong to the publish v1 dataset. We don't index drafts unless * the content has changed (again per issue 528). */ System.out.println(new TreeSet(expectedDraftFilesAfterDeleteHomerAfterIssue2455Implemented) + " expected after issue 2455 implemented"); System.out.println(new TreeSet(actualDraftFilesAfterDeleteHomer) + " actual"); // assertEquals(expectedDraftFilesAfterDeleteHomer, actualDraftFilesAfterDeleteHomer); Response disableNonPublicSearch = deleteSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, disableNonPublicSearch.getStatusCode()); } @Test public void dataverseCategory() { if (disableTestCategory) { return; } Response enableNonPublicSearch = enableSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, enableNonPublicSearch.getStatusCode()); /** * Unfortunately, it appears that the ability to specify the category of * a dataverse when creating it is a GUI-only feature. It can't * currently be done via the API, to our knowledge. You also can't tell * from the API which category was persisted but it always seems to be * "UNCATEGORIZED" */ TestDataverse dataverseToCreate = new TestDataverse(categoryTestDataverse, categoryTestDataverse, Dataverse.DataverseType.ORGANIZATIONS_INSTITUTIONS); Response createDvResponse = createDataverse(dataverseToCreate, homer); assertEquals(201, createDvResponse.getStatusCode()); TestSearchQuery query = new TestSearchQuery(categoryTestDataverse); Response searchResponse = search(query, homer); // searchResponse.prettyPrint(); JsonPath jsonPath = JsonPath.from(searchResponse.body().asString()); String category = jsonPath.get("data.facets." + SearchFields.DATAVERSE_CATEGORY).toString(); String msg = "category: " + category; assertEquals("category: [null]", msg); Response disableNonPublicSearch = deleteSetting(SettingsServiceBean.Key.SearchApiNonPublicAllowed); assertEquals(200, disableNonPublicSearch.getStatusCode()); } @AfterClass public static void cleanup() { boolean enabled = true; if (!enabled) { return; } logger.info("Running cleanup..."); /** * We revoke roles here just in case an assertion failed because role * assignments are currently not deleted when you delete a user per * https://github.com/IQSS/dataverse/issues/1929 * * You can also delete the role assignments manually like this: * * "DELETE FROM roleassignment WHERE assigneeidentifier='@ned';" */ // Response revokeNedAdminOnRoot = revokeRole(dataverseToCreateDataset1In, nedAdminOnRootAssignment, homer.getApiToken()); // System.out.println(revokeNedAdminOnRoot.prettyPrint()); // System.out.println("cleanup - status code revoking admin on root from ned: " + revokeNedAdminOnRoot.getStatusCode()); /** * */ if (!disableTestPermsonRootDv) { Response deleteDataset1Response = deleteDataset(dataset1, homer.getApiToken()); assertEquals(204, deleteDataset1Response.getStatusCode()); } if (!disableTestPermsOnNewDv) { Response destroyDatasetResponse = destroyDataset(dataset2Id, homer.getApiToken()); assertEquals(200, destroyDatasetResponse.getStatusCode()); } if (!homerPublishesVersion2AfterDeletingFile) { Response destroyDataset = destroyDataset(dataset3Id, homer.getApiToken()); assertEquals(200, destroyDataset.getStatusCode()); } if (!disableTestCategory) { Response deleteCategoryDataverseResponse = deleteDataverse(categoryTestDataverse, homer); assertEquals(200, deleteCategoryDataverseResponse.getStatusCode()); } if (!disableTestPermsOnNewDv) { Response deleteDvResponse = deleteDataverse(dvForPermsTesting, homer); assertEquals(200, deleteDvResponse.getStatusCode()); } deleteUser(homer.getUsername()); deleteUser(ned.getUsername()); deleteUser(clancy.getUsername()); } private Response enableSetting(SettingsServiceBean.Key settingKey) { Response response = given().body("true").when().put("/api/admin/settings/" + settingKey); return response; } private Response deleteSetting(SettingsServiceBean.Key settingKey) { Response response = given().when().delete("/api/admin/settings/" + settingKey); return response; } private Response checkSetting(SettingsServiceBean.Key settingKey) { Response response = given().when().get("/api/admin/settings/" + settingKey); return response; } private static String getDataverseAlias(long dataverseId, String apiToken) { Response getDataverse = given() .get("api/dataverses/" + dataverseId + "?key=" + apiToken); JsonPath jsonPath = JsonPath.from(getDataverse.body().asString()); String dataverseAlias = jsonPath.get("data.alias"); return dataverseAlias; } private static Response createDataverse(TestDataverse dataverseToCreate, TestUser creator) { JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", creator.getEmail())); JsonArrayBuilder subjectArrayBuilder = Json.createArrayBuilder(); subjectArrayBuilder.add("Other"); JsonObject dvData = Json.createObjectBuilder() .add("alias", dataverseToCreate.alias) .add("name", dataverseToCreate.name) .add("dataverseContacts", contactArrayBuilder) .add("dataverseSubjects", subjectArrayBuilder) .build(); Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/:root?key=" + creator.apiToken); return createDataverseResponse; } private Response createDataset(String xmlIn, String dataverseToCreateDatasetIn, String apiToken) { Response createDatasetResponse = given() .auth().basic(apiToken, EMPTY_STRING) .body(xmlIn) .contentType("application/atom+xml") .post("/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/" + dataverseToCreateDatasetIn); return createDatasetResponse; } private Response updateDatasetMetadataViaSword(String persistentId, String xmlIn, String apiToken) { return given() .auth().basic(apiToken, EMPTY_STRING) .body(xmlIn) .contentType("application/atom+xml") .put("/dvn/api/data-deposit/v1.1/swordv2/edit/study/" + persistentId); } private Response querySolr(String query) { Response querySolrResponse = given().get("http://localhost:8983/solr/collection1/select?wt=json&indent=true&q=" + query); return querySolrResponse; } private static JsonObject createUser(String jsonStr) { JsonObjectBuilder createdUser = Json.createObjectBuilder(); Response response = createUserViaApi(jsonStr, getPassword(jsonStr)); // response.prettyPrint(); Assert.assertEquals(200, response.getStatusCode()); JsonPath jsonPath = JsonPath.from(response.body().asString()); int userId = jsonPath.getInt("data.user." + idKey); createdUser.add(idKey, userId); String username = jsonPath.get("data.user." + usernameKey).toString(); createdUser.add(usernameKey, username); createdUser.add(apiTokenKey, jsonPath.get("data." + apiTokenKey).toString()); return createdUser.build(); } private static String getPassword(String jsonStr) { String password = JsonPath.from(jsonStr).get(usernameKey); return password; } private static String getUserAsJsonString(String username, String firstName, String lastName) { JsonObjectBuilder builder = Json.createObjectBuilder(); builder.add(usernameKey, username); builder.add("firstName", firstName); builder.add("lastName", lastName); builder.add(emailKey, getEmailFromUserName(username)); String userAsJson = builder.build().toString(); logger.fine("User to create: " + userAsJson); return userAsJson; } private static String getEmailFromUserName(String username) { return username + "@mailinator.com"; } private static Response createUserViaApi(String jsonStr, String password) { Response response = given().body(jsonStr).contentType(ContentType.JSON).when().post("/api/builtin-users?key=" + builtinUserKey + "&password=" + password); return response; } private static Response makeSuperuser(String userToMakeSuperuser) { Response response = given().post("/api/admin/superuser/" + userToMakeSuperuser); return response; } private Response grantRole(String definitionPoint, String role, String roleAssignee, String apiToken) { JsonObjectBuilder roleBuilder = Json.createObjectBuilder(); roleBuilder.add("assignee", "@" + roleAssignee); roleBuilder.add("role", role); JsonObject roleObject = roleBuilder.build(); System.out.println("Granting role on dataverse alias \"" + definitionPoint + "\": " + roleObject); return given() .body(roleObject).contentType(ContentType.JSON) .post("api/dataverses/" + definitionPoint + "/assignments?key=" + apiToken); } private static Response revokeRole(String definitionPoint, long doomed, String apiToken) { System.out.println("Attempting to revoke role assignment id " + doomed); /** * OUTPUT=`curl -s -X DELETE * "http://localhost:8080/api/dataverses/$BIRDS_DATAVERSE/assignments/$SPRUCE_ADMIN_ON_BIRDS?key=$FINCHKEY"` */ return given() .delete("api/dataverses/" + definitionPoint + "/assignments/" + doomed + "?key=" + apiToken); } private String getGlobalId(Response createDatasetResponse) { String xml = createDatasetResponse.body().asString(); String datasetSwordIdUrl = from(xml).get("entry.id"); /** * @todo stop assuming the last 22 characters are the doi/globalId */ return datasetSwordIdUrl.substring(datasetSwordIdUrl.length() - 22); } /** * Assumes you have turned on experimental non-public search * https://github.com/IQSS/dataverse/issues/1299 * * curl -X PUT -d true * http://localhost:8080/api/admin/settings/:SearchApiNonPublicAllowed * * @return The Integer found or null. */ private static Integer findDatasetIdFromGlobalId(String globalId, String apiToken) { Response searchForGlobalId = given() .get("api/search?key=" + apiToken + "&q=dsPersistentId:\"" + globalId.replace(":", "\\:") + "\"&show_entity_ids=true"); JsonPath jsonPath = JsonPath.from(searchForGlobalId.body().asString()); int id; try { id = jsonPath.get("data.items[0].entity_id"); } catch (IllegalArgumentException ex) { return null; } return id; } private String getDatasetXml(String title, String author, String description) { String xmlIn = "<?xml version=\"1.0\"?>\n" + "<entry xmlns=\"http://www.w3.org/2005/Atom\" xmlns:dcterms=\"http://purl.org/dc/terms/\">\n" + " <dcterms:title>" + title + "</dcterms:title>\n" + " <dcterms:creator>" + author + "</dcterms:creator>\n" + " <dcterms:description>" + description + "</dcterms:description>\n" + "</entry>\n" + ""; return xmlIn; } private static Response deleteDataverse(String doomed, TestUser user) { // System.out.println("deletingn dataverse " + doomed); return given().delete("/api/dataverses/" + doomed + "?key=" + user.getApiToken()); } private static Response deleteDataset(String globalId, String apiToken) { return given() .auth().basic(apiToken, EMPTY_STRING) .relaxedHTTPSValidation() .delete("/dvn/api/data-deposit/v1.1/swordv2/edit/study/" + globalId); } private static Response destroyDataset(Integer datasetId, String apiToken) { return given() .header(keyString, apiToken) .delete("/api/datasets/" + datasetId + "/destroy"); } private static void deleteUser(String username) { Response deleteUserResponse = given().delete("/api/admin/authenticatedUsers/" + username + "/"); assertEquals(200, deleteUserResponse.getStatusCode()); } private static int getUserIdFromDatabase(String username) { Response getUserResponse = given().get("/api/admin/authenticatedUsers/" + username + "/"); JsonPath getUserJson = JsonPath.from(getUserResponse.body().asString()); int userIdFromDatabase = getUserJson.getInt("data.id"); return userIdFromDatabase; } private long getRoleAssignmentId(Response response) { JsonPath jsonPath = JsonPath.from(response.body().asString()); return jsonPath.getInt("data.id"); } private Integer printDatasetId(String dataset1, TestUser user) { Integer datasetIdFound = findDatasetIdFromGlobalId(dataset1, user.getApiToken()); // System.out.println(dataset1 + " id " + datasetIdFound + " found by " + user); return datasetIdFound; } private Response search(TestSearchQuery query, TestUser user) { return given() .get("api/search?key=" + user.getApiToken() + "&q=" + query.getQuery() + "&show_facets=" + true ); } /** * @todo Get this version that doesn't require curl working. Use body * instead of multiPart? */ private Response uploadZipFile(String persistentId, String zipFileName, String apiToken) throws FileNotFoundException { String pathToFileName = "scripts/search/data/binary/" + zipFileName; Path path = Paths.get(pathToFileName); byte[] data = null; Response swordStatementResponse = given() .multiPart(new File(pathToFileName)) .header("Packaging", "http://purl.org/net/sword/package/SimpleZip") .header("Content-Disposition", "filename=" + zipFileName) .auth().basic(apiToken, EMPTY_STRING) .post("/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/" + persistentId); return swordStatementResponse; } /** * @todo Delete this once you get the REST-assured version working */ private Process uploadZipFileWithCurl(String globalId, String zipfilename, String apiToken) { Process p = null; try { p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "curl -s --insecure --data-binary @scripts/search/data/binary/" + zipfilename + " -H \"Content-Disposition: filename=trees.zip\" -H \"Content-Type: application/zip\" -H \"Packaging: http://purl.org/net/sword/package/SimpleZip\" -u " + apiToken + ": https://localhost:8181/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/" + globalId}); } catch (IOException ex) { Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, null, ex); } return p; } private void printCommandOutput(Process p) { try { p.waitFor(); } catch (InterruptedException ex) { Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, null, ex); } BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream())); String line; try { while ((line = input.readLine()) != null) { System.out.println(line); } } catch (IOException ex) { Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, null, ex); } try { input.close(); } catch (IOException ex) { Logger.getLogger(SearchIT.class.getName()).log(Level.SEVERE, null, ex); } } private List<Integer> getIdsOfFilesUploaded(String persistentId, Integer datasetId, String apiToken) { Response swordStatentResponse = getSwordStatement(persistentId, apiToken); // swordStatentResponse.prettyPrint(); if (datasetId != null) { List<Integer> fileList = getFilesFromDatasetEndpoint(datasetId, apiToken); if (!fileList.isEmpty()) { return fileList; } } return Collections.emptyList(); } private Response getSwordAtomEntry(String persistentId, String apiToken) { Response response = given() .auth().basic(apiToken, EMPTY_STRING) .get("/dvn/api/data-deposit/v1.1/swordv2/edit/study/" + persistentId); return response; } private Response getSwordStatement(String persistentId, String apiToken) { Response swordStatementResponse = given() .auth().basic(apiToken, EMPTY_STRING) .get("/dvn/api/data-deposit/v1.1/swordv2/statement/study/" + persistentId); return swordStatementResponse; } private List<Integer> getFilesFromDatasetEndpoint(Integer datasetId, String apiToken) { List<Integer> fileList = new ArrayList<>(); Response getDatasetFilesResponse = getDatasetFilesEndpoint(datasetId, apiToken); // getDatasetFilesResponse.prettyPrint(); JsonPath jsonPath = JsonPath.from(getDatasetFilesResponse.body().asString()); List<Map> filesMap = jsonPath.get("data.datafile"); for (Map map : filesMap) { int fileId = (int) map.get("id"); fileList.add(fileId); } return fileList; } private Response getDatasetFilesEndpoint(Integer datasetId, String apiToken) { Response getDatasetFilesResponse = given() .get("api/datasets/" + datasetId + "/versions/:latest/files?key=" + apiToken); return getDatasetFilesResponse; } private Response checkPermissionsOnDvObject(int dvObjectId, String apiToken) { Response debugPermsResponse = given() .get("api/admin/index/permsDebug/?id=" + dvObjectId + "&key=" + apiToken); // debugPermsResponse.prettyPrint(); return debugPermsResponse; } private Response clearIndexTimesOnDvObject(int dvObjectId) { Response debugPermsResponse = given() .delete("api/admin/index/timestamps/" + dvObjectId); return debugPermsResponse; } private Response reindexDataset(int datasetId) { return given().get("api/admin/index/datasets/" + datasetId); } private Response publishDataverse(String alias, String apiToken) { return given() .header(keyString, apiToken) .urlEncodingEnabled(false) .post("/api/dataverses/" + alias + "/actions/:publish"); } private Response publishDataverseAsCreator(long id) { return given() .post("/api/admin/publishDataverseAsCreator/" + id); } private Response getDatasetAsJson(long datasetId, String apiToken) { return given() .header(keyString, apiToken) .urlEncodingEnabled(false) .get("/api/datasets/" + datasetId); } private Response publishDatasetViaSword(String persistentId, String apiToken) { return given() .auth().basic(apiToken, EMPTY_STRING) .header("In-Progress", "false") .post("/dvn/api/data-deposit/v1.1/swordv2/edit/study/" + persistentId); } private Response publishDatasetViaNative(long datasetId, String apiToken) { /** * This should probably be a POST rather than a GET: * https://github.com/IQSS/dataverse/issues/2431 * * Allows version less than v1.0 to be published (i.e. v0.1): * https://github.com/IQSS/dataverse/issues/2461 * */ return given() .header(keyString, apiToken) .urlEncodingEnabled(false) .get("/api/datasets/" + datasetId + "/actions/:publish?type=minor"); } private Response getFileSearchData(String persistentId, String semanticVersion, String apiToken) { /** * Note In all commands below, dataset versions can be referred to as: * * :draft the draft version, if any * * :latest either a draft (if exists) or the latest published version. * * :latest-published the latest published version * * x.y a specific version, where x is the major version number and y is * the minor version number. * * x same as x.0 * * http://guides.dataverse.org/en/latest/api/native-api.html#datasets */ // String semanticVersion = null; return given() .header(keyString, apiToken) .urlEncodingEnabled(false) .get("/api/admin/index/filesearch?persistentId=" + persistentId + "&semanticVersion=" + semanticVersion); } private Response deleteFile(int fileId, String apiToken) { // System.out.println("deleting file id " + fileId); return given() .auth().basic(apiToken, EMPTY_STRING) .delete("/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/" + fileId); } private List<String> getFileNameFromSearchDebug(String datasetPersistentId, String apiToken) { Response fileDataResponse = getFileSearchData(datasetPersistentId, "DRAFT", apiToken); // fileDataResponse.prettyPrint(); return JsonPath.from(fileDataResponse.body().asString()).getList("data.cards"); } private int getFileIdFromDatasetEndpointFileListing(Response datasetFiles, String filename) { return with(datasetFiles.getBody().asString()) .param("name", filename) .getInt("data.findAll { data -> data.label == name }[0].datafile.id"); } private Set<String> getFileData(Response fileDataResponse) { Set<String> filesFound = new HashSet<>(); List<String> files1 = JsonPath.from(fileDataResponse.body().asString()).getList("data.cards"); for (String file : files1) { filesFound.add(file); } return filesFound; } private static class TestUser { private long id; private String username; private String apiToken; private TestUser(JsonObject json) { this.id = json.getInt(idKey); this.username = json.getString(usernameKey); this.apiToken = json.getString(apiTokenKey); } public long getId() { return id; } public void setId(long id) { this.id = id; } public String getUsername() { return username; } public String getApiToken() { return apiToken; } public String getEmail() { return getEmailFromUserName(username); } @Override public String toString() { return "TestUser{" + "id=" + id + ", username=" + username + '}'; } } private static class TestDataverse { String alias; String name; Dataverse.DataverseType category; public TestDataverse(String alias, String name, Dataverse.DataverseType category) { this.alias = alias; this.name = name; this.category = category; } } private static class TestSearchQuery { private String query; private List<String> filterQueries = new ArrayList<>(); private TestSearchQuery(String query) { this.query = query; } public TestSearchQuery(String query, List<String> filterQueries) { this.query = query; if (!filterQueries.isEmpty()) { this.filterQueries = filterQueries; } } public String getQuery() { return query; } public List<String> getFilterQueries() { return filterQueries; } } }
package com.compositesw.services.system.admin; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.Holder; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; import com.compositesw.services.system.admin.archive.ArchiveContents; import com.compositesw.services.system.admin.archive.ExportSettings; import com.compositesw.services.system.admin.archive.ImportSettings; import com.compositesw.services.system.util.common.MessageEntryList; import com.compositesw.services.system.util.common.OperationStatus; /** * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.1-b01- * Generated source version: 2.2 * */ @WebService(name = "archivePortType", targetNamespace = "http://www.compositesw.com/services/system/admin") @XmlSeeAlso({ com.compositesw.services.system.admin.archive.ObjectFactory.class, com.compositesw.services.system.util.common.ObjectFactory.class, com.compositesw.services.system.util.security.ObjectFactory.class, com.compositesw.services.system.admin.resource.ObjectFactory.class, com.compositesw.services.system.admin.user.ObjectFactory.class, com.compositesw.services.system.admin.server.ObjectFactory.class, com.compositesw.services.system.admin.execute.ObjectFactory.class }) public interface ArchivePortType { /** * * @param status * @param archiveId * @param archiveReport * @throws CancelArchiveSoapFault */ @WebMethod(action = "cancelArchive") @RequestWrapper(localName = "cancelArchive", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CancelArchiveRequest") @ResponseWrapper(localName = "cancelArchiveResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CancelArchiveResponse") public void cancelArchive( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "status", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<OperationStatus> status, @WebParam(name = "archiveReport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<MessageEntryList> archiveReport) throws CancelArchiveSoapFault ; /** * * @param settings * @return * returns java.lang.String * @throws CreateExportArchiveSoapFault */ @WebMethod(action = "createExportArchive") @WebResult(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") @RequestWrapper(localName = "createExportArchive", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CreateExportArchiveRequest") @ResponseWrapper(localName = "createExportArchiveResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CreateExportArchiveResponse") public String createExportArchive( @WebParam(name = "settings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") ExportSettings settings) throws CreateExportArchiveSoapFault ; /** * * @param data * @return * returns java.lang.String * @throws CreateImportArchiveSoapFault */ @WebMethod(action = "createImportArchive") @WebResult(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") @RequestWrapper(localName = "createImportArchive", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CreateImportArchiveRequest") @ResponseWrapper(localName = "createImportArchiveResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.CreateImportArchiveResponse") public String createImportArchive( @WebParam(name = "data", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") byte[] data) throws CreateImportArchiveSoapFault ; /** * * @param archiveId * @return * returns com.compositesw.services.system.admin.archive.ArchiveContents * @throws GetArchiveContentsSoapFault */ @WebMethod(action = "getArchiveContents") @WebResult(name = "contents", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") @RequestWrapper(localName = "getArchiveContents", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveContentsRequest") @ResponseWrapper(localName = "getArchiveContentsResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveContentsResponse") public ArchiveContents getArchiveContents( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId) throws GetArchiveContentsSoapFault ; /** * * @param status * @param archiveId * @param archiveReport * @param data * @param maxBytes * @throws GetArchiveExportDataSoapFault */ @WebMethod(action = "getArchiveExportData") @RequestWrapper(localName = "getArchiveExportData", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveExportDataRequest") @ResponseWrapper(localName = "getArchiveExportDataResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveExportDataResponse") public void getArchiveExportData( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "maxBytes", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") Integer maxBytes, @WebParam(name = "status", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<OperationStatus> status, @WebParam(name = "archiveReport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<MessageEntryList> archiveReport, @WebParam(name = "data", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<byte[]> data) throws GetArchiveExportDataSoapFault ; /** * * @param archiveId * @return * returns com.compositesw.services.system.admin.archive.ExportSettings * @throws GetArchiveExportSettingsSoapFault */ @WebMethod(action = "getArchiveExportSettings") @WebResult(name = "settings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") @RequestWrapper(localName = "getArchiveExportSettings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveExportSettingsRequest") @ResponseWrapper(localName = "getArchiveExportSettingsResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveExportSettingsResponse") public ExportSettings getArchiveExportSettings( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId) throws GetArchiveExportSettingsSoapFault ; /** * * @param status * @param archiveId * @param archiveReport * @param isBlocking * @throws GetArchiveImportReportSoapFault */ @WebMethod(action = "getArchiveImportReport") @RequestWrapper(localName = "getArchiveImportReport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveImportReportRequest") @ResponseWrapper(localName = "getArchiveImportReportResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveImportReportResponse") public void getArchiveImportReport( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "isBlocking", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") boolean isBlocking, @WebParam(name = "status", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<OperationStatus> status, @WebParam(name = "archiveReport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<MessageEntryList> archiveReport) throws GetArchiveImportReportSoapFault ; /** * * @param archiveId * @return * returns com.compositesw.services.system.admin.archive.ImportSettings * @throws GetArchiveImportSettingsSoapFault */ @WebMethod(action = "getArchiveImportSettings") @WebResult(name = "settings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") @RequestWrapper(localName = "getArchiveImportSettings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveImportSettingsRequest") @ResponseWrapper(localName = "getArchiveImportSettingsResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.GetArchiveImportSettingsResponse") public ImportSettings getArchiveImportSettings( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId) throws GetArchiveImportSettingsSoapFault ; /** * * @param status * @param archiveId * @param archiveReport * @param isBlocking * @throws PerformArchiveImportSoapFault */ @WebMethod(action = "performArchiveImport") @RequestWrapper(localName = "performArchiveImport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.PerformArchiveImportRequest") @ResponseWrapper(localName = "performArchiveImportResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.PerformArchiveImportResponse") public void performArchiveImport( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "isBlocking", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") boolean isBlocking, @WebParam(name = "status", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<OperationStatus> status, @WebParam(name = "archiveReport", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", mode = WebParam.Mode.OUT) Holder<MessageEntryList> archiveReport) throws PerformArchiveImportSoapFault ; /** * * @param archiveId * @param settings * @throws UpdateArchiveExportSettingsSoapFault */ @WebMethod(action = "updateArchiveExportSettings") @RequestWrapper(localName = "updateArchiveExportSettings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.UpdateArchiveExportSettingsRequest") @ResponseWrapper(localName = "updateArchiveExportSettingsResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.UpdateArchiveExportSettingsResponse") public void updateArchiveExportSettings( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "settings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") ExportSettings settings) throws UpdateArchiveExportSettingsSoapFault ; /** * * @param archiveId * @param settings * @throws UpdateArchiveImportSettingsSoapFault */ @WebMethod(action = "updateArchiveImportSettings") @RequestWrapper(localName = "updateArchiveImportSettings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.UpdateArchiveImportSettingsRequest") @ResponseWrapper(localName = "updateArchiveImportSettingsResponse", targetNamespace = "http://www.compositesw.com/services/system/admin/archive", className = "com.compositesw.services.system.admin.archive.UpdateArchiveImportSettingsResponse") public void updateArchiveImportSettings( @WebParam(name = "archiveId", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") String archiveId, @WebParam(name = "settings", targetNamespace = "http://www.compositesw.com/services/system/admin/archive") ImportSettings settings) throws UpdateArchiveImportSettingsSoapFault ; }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.actionSystem.impl; import com.intellij.ide.DataManager; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.actionholder.ActionRef; import com.intellij.openapi.application.impl.LaterInvocator; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.StatusBar; import com.intellij.ui.components.JBMenu; import com.intellij.ui.plaf.beg.IdeaMenuUI; import com.intellij.ui.plaf.gtk.GtkMenuUI; import com.intellij.util.ReflectionUtil; import com.intellij.util.SingleAlarm; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.MenuEvent; import javax.swing.event.MenuListener; import javax.swing.plaf.MenuItemUI; import javax.swing.plaf.synth.SynthMenuUI; import java.awt.*; import java.awt.event.AWTEventListener; import java.awt.event.ComponentEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; public final class ActionMenu extends JBMenu { private final String myPlace; private DataContext myContext; private final ActionRef<ActionGroup> myGroup; private final PresentationFactory myPresentationFactory; private final Presentation myPresentation; private boolean myMnemonicEnabled; private MenuItemSynchronizer myMenuItemSynchronizer; private StubItem myStubItem; // A PATCH!!! Do not remove this code, otherwise you will lose all keyboard navigation in JMenuBar. private final boolean myTopLevel; private Disposable myDisposable; public ActionMenu(final DataContext context, @NotNull final String place, final ActionGroup group, final PresentationFactory presentationFactory, final boolean enableMnemonics, final boolean topLevel) { myContext = context; myPlace = place; myGroup = ActionRef.fromAction(group); myPresentationFactory = presentationFactory; myPresentation = myPresentationFactory.getPresentation(group); myMnemonicEnabled = enableMnemonics; myTopLevel = topLevel; updateUI(); init(); // addNotify won't be called for menus in MacOS system menu if (SystemInfo.isMacSystemMenu) { installSynchronizer(); } if (UIUtil.isUnderIntelliJLaF()) { setOpaque(true); } // Triggering initialization of private field "popupMenu" from JMenu with our own JBPopupMenu getPopupMenu(); } public void updateContext(DataContext context) { myContext = context; } @Override public void addNotify() { super.addNotify(); installSynchronizer(); } private void installSynchronizer() { if (myMenuItemSynchronizer == null) { myMenuItemSynchronizer = new MenuItemSynchronizer(); myGroup.getAction().addPropertyChangeListener(myMenuItemSynchronizer); myPresentation.addPropertyChangeListener(myMenuItemSynchronizer); } } @Override public void removeNotify() { uninstallSynchronizer(); super.removeNotify(); if (myDisposable != null) { Disposer.dispose(myDisposable); myDisposable = null; } } private void uninstallSynchronizer() { if (myMenuItemSynchronizer != null) { myGroup.getAction().removePropertyChangeListener(myMenuItemSynchronizer); myPresentation.removePropertyChangeListener(myMenuItemSynchronizer); myMenuItemSynchronizer = null; } } private JPopupMenu mySpecialMenu; @Override public JPopupMenu getPopupMenu() { if (mySpecialMenu == null) { mySpecialMenu = new JBPopupMenu(); mySpecialMenu.setInvoker(this); popupListener = createWinListener(mySpecialMenu); ReflectionUtil.setField(JMenu.class, this, JPopupMenu.class, "popupMenu", mySpecialMenu); } return super.getPopupMenu(); } @Override public void updateUI() { boolean isAmbiance = UIUtil.isUnderGTKLookAndFeel() && "Ambiance".equalsIgnoreCase(UIUtil.getGtkThemeName()); if (myTopLevel && !isAmbiance && UIUtil.GTK_AMBIANCE_TEXT_COLOR.equals(getForeground())) { setForeground(null); } if (UIUtil.isStandardMenuLAF()) { super.updateUI(); } else { setUI(IdeaMenuUI.createUI(this)); setFont(UIUtil.getMenuFont()); JPopupMenu popupMenu = getPopupMenu(); if (popupMenu != null) { popupMenu.updateUI(); } } if (myTopLevel && isAmbiance) { setForeground(UIUtil.GTK_AMBIANCE_TEXT_COLOR); } if (myTopLevel && UIUtil.isUnderGTKLookAndFeel()) { Insets insets = getInsets(); @SuppressWarnings("UseDPIAwareInsets") Insets newInsets = new Insets(insets.top, insets.left, insets.bottom, insets.right); if (insets.top + insets.bottom < JBUI.scale(6)) { newInsets.top = newInsets.bottom = JBUI.scale(3); } if (insets.left + insets.right < JBUI.scale(12)) { newInsets.left = newInsets.right = JBUI.scale(6); } if (!newInsets.equals(insets)) { setBorder(BorderFactory.createEmptyBorder(newInsets.top, newInsets.left, newInsets.bottom, newInsets.right)); } } } @Override public void setUI(MenuItemUI ui) { MenuItemUI newUi = !myTopLevel && UIUtil.isUnderGTKLookAndFeel() && ui instanceof SynthMenuUI ? new GtkMenuUI((SynthMenuUI)ui) : ui; super.setUI(newUi); } private void init() { boolean macSystemMenu = SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU); myStubItem = macSystemMenu ? null : new StubItem(); addStubItem(); addMenuListener(new MenuListenerImpl()); setBorderPainted(false); setVisible(myPresentation.isVisible()); setEnabled(myPresentation.isEnabled()); setText(myPresentation.getText()); updateIcon(); setMnemonicEnabled(myMnemonicEnabled); } private void addStubItem() { if (myStubItem != null) { add(myStubItem); } } public void setMnemonicEnabled(boolean enable) { myMnemonicEnabled = enable; setMnemonic(myPresentation.getMnemonic()); setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex()); } @Override public void setDisplayedMnemonicIndex(final int index) throws IllegalArgumentException { super.setDisplayedMnemonicIndex(myMnemonicEnabled ? index : -1); } @Override public void setMnemonic(int mnemonic) { super.setMnemonic(myMnemonicEnabled ? mnemonic : 0); } private void updateIcon() { UISettings settings = UISettings.getInstance(); if (settings != null && settings.SHOW_ICONS_IN_MENUS) { final Presentation presentation = myPresentation; final Icon icon = presentation.getIcon(); setIcon(icon); if (presentation.getDisabledIcon() != null) { setDisabledIcon(presentation.getDisabledIcon()); } else { setDisabledIcon(IconLoader.getDisabledIcon(icon)); } } } @Override public void menuSelectionChanged(boolean isIncluded) { super.menuSelectionChanged(isIncluded); showDescriptionInStatusBar(isIncluded, this, myPresentation.getDescription()); } public static void showDescriptionInStatusBar(boolean isIncluded, Component component, String description) { IdeFrame frame = (IdeFrame)(component instanceof IdeFrame ? component : SwingUtilities.getAncestorOfClass(IdeFrame.class, component)); StatusBar statusBar; if (frame != null && (statusBar = frame.getStatusBar()) != null) { statusBar.setInfo(isIncluded ? description : null); } } private class MenuListenerImpl implements MenuListener { @Override public void menuCanceled(MenuEvent e) { clearItems(); addStubItem(); } @Override public void menuDeselected(MenuEvent e) { if (myDisposable != null) { Disposer.dispose(myDisposable); myDisposable = null; } clearItems(); addStubItem(); } @Override public void menuSelected(MenuEvent e) { UsabilityHelper helper = new UsabilityHelper(ActionMenu.this); if (myDisposable == null) { myDisposable = Disposer.newDisposable(); } Disposer.register(myDisposable, helper); fillMenu(); } } private void clearItems() { if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) { for (Component menuComponent : getMenuComponents()) { if (menuComponent instanceof ActionMenu) { ((ActionMenu)menuComponent).clearItems(); if (SystemInfo.isMacSystemMenu) { // hideNotify is not called on Macs ((ActionMenu)menuComponent).uninstallSynchronizer(); } } else if (menuComponent instanceof ActionMenuItem) { // Looks like an old-fashioned ugly workaround // JDK 1.7 on Mac works wrong with such functional keys if (!(SystemInfo.isJavaVersionAtLeast("1.7") && SystemInfo.isMac)) { ((ActionMenuItem)menuComponent).setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F24, 0)); } } } } removeAll(); validate(); } private void fillMenu() { DataContext context; boolean mayContextBeInvalid; if (myContext != null) { context = myContext; mayContextBeInvalid = false; } else { @SuppressWarnings("deprecation") DataContext contextFromFocus = DataManager.getInstance().getDataContext(); context = contextFromFocus; if (PlatformDataKeys.CONTEXT_COMPONENT.getData(context) == null) { IdeFrame frame = UIUtil.getParentOfType(IdeFrame.class, this); context = DataManager.getInstance().getDataContext(IdeFocusManager.getGlobalInstance().getLastFocusedFor(frame)); } mayContextBeInvalid = true; } Utils.fillMenu(myGroup.getAction(), this, myMnemonicEnabled, myPresentationFactory, context, myPlace, true, mayContextBeInvalid, LaterInvocator.isInModalContext()); } private class MenuItemSynchronizer implements PropertyChangeListener { @Override public void propertyChange(PropertyChangeEvent e) { String name = e.getPropertyName(); if (Presentation.PROP_VISIBLE.equals(name)) { setVisible(myPresentation.isVisible()); if (SystemInfo.isMacSystemMenu && myPlace.equals(ActionPlaces.MAIN_MENU)) { validateTree(); } } else if (Presentation.PROP_ENABLED.equals(name)) { setEnabled(myPresentation.isEnabled()); } else if (Presentation.PROP_MNEMONIC_KEY.equals(name)) { setMnemonic(myPresentation.getMnemonic()); } else if (Presentation.PROP_MNEMONIC_INDEX.equals(name)) { setDisplayedMnemonicIndex(myPresentation.getDisplayedMnemonicIndex()); } else if (Presentation.PROP_TEXT.equals(name)) { setText(myPresentation.getText()); } else if (Presentation.PROP_ICON.equals(name) || Presentation.PROP_DISABLED_ICON.equals(name)) { updateIcon(); } } } private static class UsabilityHelper implements IdeEventQueue.EventDispatcher, AWTEventListener, Disposable { private Component myComponent; private Point myLastMousePoint; private Point myUpperTargetPoint; private Point myLowerTargetPoint; private SingleAlarm myCallbackAlarm; private MouseEvent myEventToRedispatch; private long myLastEventTime = 0L; private boolean myInBounds = false; private SingleAlarm myCheckAlarm; private UsabilityHelper(Component component) { myCallbackAlarm = new SingleAlarm(() -> { Disposer.dispose(myCallbackAlarm); myCallbackAlarm = null; if (myEventToRedispatch != null) { IdeEventQueue.getInstance().dispatchEvent(myEventToRedispatch); } }, 50, this); myCheckAlarm = new SingleAlarm(() -> { if (myLastEventTime > 0 && System.currentTimeMillis() - myLastEventTime > 1500) { if (!myInBounds && myCallbackAlarm != null && !myCallbackAlarm.isDisposed()) { myCallbackAlarm.request(); } } myCheckAlarm.request(); }, 100, this); myComponent = component; PointerInfo info = MouseInfo.getPointerInfo(); myLastMousePoint = info != null ? info.getLocation() : null; if (myLastMousePoint != null) { Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.COMPONENT_EVENT_MASK); IdeEventQueue.getInstance().addDispatcher(this, this); } } @Override public void eventDispatched(AWTEvent event) { if (event instanceof ComponentEvent) { ComponentEvent componentEvent = (ComponentEvent)event; Component component = componentEvent.getComponent(); JPopupMenu popup = UIUtil.getParentOfType(JPopupMenu.class, component); if (popup != null && popup.getInvoker() == myComponent) { Rectangle bounds = popup.getBounds(); if (bounds.isEmpty()) return; bounds.setLocation(popup.getLocationOnScreen()); if (myLastMousePoint.x < bounds.x) { myUpperTargetPoint = new Point(bounds.x, bounds.y); myLowerTargetPoint = new Point(bounds.x, bounds.y + bounds.height); } if (myLastMousePoint.x > bounds.x + bounds.width) { myUpperTargetPoint = new Point(bounds.x + bounds.width, bounds.y); myLowerTargetPoint = new Point(bounds.x + bounds.width, bounds.y + bounds.height); } } } } @Override public boolean dispatch(AWTEvent e) { if (e instanceof MouseEvent && myUpperTargetPoint != null && myLowerTargetPoint != null && myCallbackAlarm != null) { if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) { return false; } Point point = ((MouseEvent)e).getLocationOnScreen(); Rectangle bounds = myComponent.getBounds(); bounds.setLocation(myComponent.getLocationOnScreen()); myInBounds = bounds.contains(point); boolean isMouseMovingTowardsSubmenu = myInBounds || new Polygon( new int[]{myLastMousePoint.x, myUpperTargetPoint.x, myLowerTargetPoint.x}, new int[]{myLastMousePoint.y, myUpperTargetPoint.y, myLowerTargetPoint.y}, 3).contains(point); myEventToRedispatch = (MouseEvent)e; myLastEventTime = System.currentTimeMillis(); if (!isMouseMovingTowardsSubmenu) { myCallbackAlarm.request(); } else { myCallbackAlarm.cancel(); } myLastMousePoint = point; return true; } return false; } @Override public void dispose() { myComponent = null; myEventToRedispatch = null; myLastMousePoint = myUpperTargetPoint = myLowerTargetPoint = null; Toolkit.getDefaultToolkit().removeAWTEventListener(this); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.util.EnumSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.WrappedJvmID; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus.State; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.client.MRClientService; import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEvent; import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.TaskStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl; import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; import org.apache.hadoop.mapreduce.v2.app.rm.preemption.AMPreemptionPolicy; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.Service; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; import org.junit.Assert; /** * Mock MRAppMaster. Doesn't start RPC servers. * No threads are started except of the event Dispatcher thread. */ @SuppressWarnings("unchecked") public class MRApp extends MRAppMaster { private static final Log LOG = LogFactory.getLog(MRApp.class); /** * The available resource of each container allocated. */ private Resource resource; int maps; int reduces; private File testWorkDir; private Path testAbsPath; private ClusterInfo clusterInfo; // Queue to pretend the RM assigned us private String assignedQueue; public static String NM_HOST = "localhost"; public static int NM_PORT = 1234; public static int NM_HTTP_PORT = 8042; //if true, tasks complete automatically as soon as they are launched protected boolean autoComplete = false; static ApplicationId applicationId; static { applicationId = ApplicationId.newInstance(0, 0); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, Clock clock) { this(maps, reduces, autoComplete, testName, cleanOnStart, 1, clock, null); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, Clock clock, boolean unregistered) { this(maps, reduces, autoComplete, testName, cleanOnStart, 1, clock, unregistered); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart) { this(maps, reduces, autoComplete, testName, cleanOnStart, 1); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, String assignedQueue) { this(maps, reduces, autoComplete, testName, cleanOnStart, 1, SystemClock.getInstance(), assignedQueue); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, boolean unregistered) { this(maps, reduces, autoComplete, testName, cleanOnStart, 1, unregistered); } @Override protected void initJobCredentialsAndUGI(Configuration conf) { // Fake a shuffle secret that normally is provided by the job client. String shuffleSecret = "fake-shuffle-secret"; TokenCache.setShuffleSecretKey(shuffleSecret.getBytes(), getCredentials()); } private static ApplicationAttemptId getApplicationAttemptId( ApplicationId applicationId, int startCount) { ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, startCount); return applicationAttemptId; } private static ContainerId getContainerId(ApplicationId applicationId, int startCount) { ApplicationAttemptId appAttemptId = getApplicationAttemptId(applicationId, startCount); ContainerId containerId = ContainerId.newContainerId(appAttemptId, startCount); return containerId; } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount) { this(maps, reduces, autoComplete, testName, cleanOnStart, startCount, SystemClock.getInstance(), null); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount, boolean unregistered) { this(maps, reduces, autoComplete, testName, cleanOnStart, startCount, SystemClock.getInstance(), unregistered); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount, Clock clock, boolean unregistered) { this(getApplicationAttemptId(applicationId, startCount), getContainerId( applicationId, startCount), maps, reduces, autoComplete, testName, cleanOnStart, startCount, clock, unregistered, null); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount, Clock clock, String assignedQueue) { this(getApplicationAttemptId(applicationId, startCount), getContainerId( applicationId, startCount), maps, reduces, autoComplete, testName, cleanOnStart, startCount, clock, true, assignedQueue); } public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount, boolean unregistered) { this(appAttemptId, amContainerId, maps, reduces, autoComplete, testName, cleanOnStart, startCount, SystemClock.getInstance(), unregistered, null); } public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount) { this(appAttemptId, amContainerId, maps, reduces, autoComplete, testName, cleanOnStart, startCount, SystemClock.getInstance(), true, null); } public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount, Clock clock, boolean unregistered, String assignedQueue) { super(appAttemptId, amContainerId, NM_HOST, NM_PORT, NM_HTTP_PORT, clock, System.currentTimeMillis()); this.testWorkDir = new File("target", testName); testAbsPath = new Path(testWorkDir.getAbsolutePath()); LOG.info("PathUsed: " + testAbsPath); if (cleanOnStart) { testAbsPath = new Path(testWorkDir.getAbsolutePath()); try { FileContext.getLocalFSFileContext().delete(testAbsPath, true); } catch (Exception e) { LOG.warn("COULD NOT CLEANUP: " + testAbsPath, e); throw new YarnRuntimeException("could not cleanup test dir", e); } } this.maps = maps; this.reduces = reduces; this.autoComplete = autoComplete; // If safeToReportTerminationToUser is set to true, we can verify whether // the job can reaches the final state when MRAppMaster shuts down. this.successfullyUnregistered.set(unregistered); this.assignedQueue = assignedQueue; this.resource = Resource.newInstance(1234, 2); } @Override protected void serviceInit(Configuration conf) throws Exception { try { //Create the staging directory if it does not exist String user = UserGroupInformation.getCurrentUser().getShortUserName(); Path stagingDir = MRApps.getStagingAreaDir(conf, user); FileSystem fs = getFileSystem(conf); fs.mkdirs(stagingDir); } catch (Exception e) { throw new YarnRuntimeException("Error creating staging dir", e); } super.serviceInit(conf); if (this.clusterInfo != null) { getContext().getClusterInfo().setMaxContainerCapability( this.clusterInfo.getMaxContainerCapability()); } else { getContext().getClusterInfo().setMaxContainerCapability( Resource.newInstance(10240, 1)); } } public Job submit(Configuration conf) throws Exception { //TODO: fix the bug where the speculator gets events with //not-fully-constructed objects. For now, disable speculative exec return submit(conf, false, false); } public Job submit(Configuration conf, boolean mapSpeculative, boolean reduceSpeculative) throws Exception { String user = conf.get(MRJobConfig.USER_NAME, UserGroupInformation .getCurrentUser().getShortUserName()); conf.set(MRJobConfig.USER_NAME, user); conf.set(MRJobConfig.MR_AM_STAGING_DIR, testAbsPath.toString()); conf.setBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true); // TODO: fix the bug where the speculator gets events with // not-fully-constructed objects. For now, disable speculative exec conf.setBoolean(MRJobConfig.MAP_SPECULATIVE, mapSpeculative); conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, reduceSpeculative); init(conf); start(); DefaultMetricsSystem.shutdown(); Job job = getContext().getAllJobs().values().iterator().next(); if (assignedQueue != null) { job.setQueueName(assignedQueue); } // Write job.xml String jobFile = MRApps.getJobFile(conf, user, TypeConverter.fromYarn(job.getID())); LOG.info("Writing job conf to " + jobFile); new File(jobFile).getParentFile().mkdirs(); conf.writeXml(new FileOutputStream(jobFile)); return job; } public void waitForInternalState(JobImpl job, JobStateInternal finalState) throws Exception { int timeoutSecs = 0; JobStateInternal iState = job.getInternalState(); while (!finalState.equals(iState) && timeoutSecs++ < 20) { System.out.println("Job Internal State is : " + iState + " Waiting for Internal state : " + finalState); Thread.sleep(500); iState = job.getInternalState(); } System.out.println("Task Internal State is : " + iState); Assert.assertEquals("Task Internal state is not correct (timedout)", finalState, iState); } public void waitForInternalState(TaskImpl task, TaskStateInternal finalState) throws Exception { int timeoutSecs = 0; TaskReport report = task.getReport(); TaskStateInternal iState = task.getInternalState(); while (!finalState.equals(iState) && timeoutSecs++ < 20) { System.out.println("Task Internal State is : " + iState + " Waiting for Internal state : " + finalState + " progress : " + report.getProgress()); Thread.sleep(500); report = task.getReport(); iState = task.getInternalState(); } System.out.println("Task Internal State is : " + iState); Assert.assertEquals("Task Internal state is not correct (timedout)", finalState, iState); } public void waitForInternalState(TaskAttemptImpl attempt, TaskAttemptStateInternal finalState) throws Exception { int timeoutSecs = 0; TaskAttemptReport report = attempt.getReport(); TaskAttemptStateInternal iState = attempt.getInternalState(); while (!finalState.equals(iState) && timeoutSecs++ < 20) { System.out.println("TaskAttempt Internal State is : " + iState + " Waiting for Internal state : " + finalState + " progress : " + report.getProgress()); Thread.sleep(500); report = attempt.getReport(); iState = attempt.getInternalState(); } System.out.println("TaskAttempt Internal State is : " + iState); Assert.assertEquals("TaskAttempt Internal state is not correct (timedout)", finalState, iState); } public void waitForState(TaskAttempt attempt, TaskAttemptState finalState) throws Exception { int timeoutSecs = 0; TaskAttemptReport report = attempt.getReport(); while (!finalState.equals(report.getTaskAttemptState()) && timeoutSecs++ < 20) { System.out.println("TaskAttempt State is : " + report.getTaskAttemptState() + " Waiting for state : " + finalState + " progress : " + report.getProgress()); report = attempt.getReport(); Thread.sleep(500); } System.out.println("TaskAttempt State is : " + report.getTaskAttemptState()); Assert.assertEquals("TaskAttempt state is not correct (timedout)", finalState, report.getTaskAttemptState()); } public void waitForState(Task task, TaskState finalState) throws Exception { int timeoutSecs = 0; TaskReport report = task.getReport(); while (!finalState.equals(report.getTaskState()) && timeoutSecs++ < 20) { System.out.println("Task State for " + task.getID() + " is : " + report.getTaskState() + " Waiting for state : " + finalState + " progress : " + report.getProgress()); report = task.getReport(); Thread.sleep(500); } System.out.println("Task State is : " + report.getTaskState()); Assert.assertEquals("Task state is not correct (timedout)", finalState, report.getTaskState()); } public void waitForState(Job job, JobState finalState) throws Exception { int timeoutSecs = 0; JobReport report = job.getReport(); while (!finalState.equals(report.getJobState()) && timeoutSecs++ < 20) { System.out.println("Job State is : " + report.getJobState() + " Waiting for state : " + finalState + " map progress : " + report.getMapProgress() + " reduce progress : " + report.getReduceProgress()); report = job.getReport(); Thread.sleep(500); } System.out.println("Job State is : " + report.getJobState()); Assert.assertEquals("Job state is not correct (timedout)", finalState, job.getState()); } public void waitForState(Service.STATE finalState) throws Exception { if (finalState == Service.STATE.STOPPED) { Assert.assertTrue("Timeout while waiting for MRApp to stop", waitForServiceToStop(20 * 1000)); } else { int timeoutSecs = 0; while (!finalState.equals(getServiceState()) && timeoutSecs++ < 20) { System.out.println("MRApp State is : " + getServiceState() + " Waiting for state : " + finalState); Thread.sleep(500); } System.out.println("MRApp State is : " + getServiceState()); Assert.assertEquals("MRApp state is not correct (timedout)", finalState, getServiceState()); } } public void verifyCompleted() { for (Job job : getContext().getAllJobs().values()) { JobReport jobReport = job.getReport(); System.out.println("Job start time :" + jobReport.getStartTime()); System.out.println("Job finish time :" + jobReport.getFinishTime()); Assert.assertTrue("Job start time is not less than finish time", jobReport.getStartTime() <= jobReport.getFinishTime()); Assert.assertTrue("Job finish time is in future", jobReport.getFinishTime() <= System.currentTimeMillis()); for (Task task : job.getTasks().values()) { TaskReport taskReport = task.getReport(); System.out.println("Task start time : " + taskReport.getStartTime()); System.out.println("Task finish time : " + taskReport.getFinishTime()); Assert.assertTrue("Task start time is not less than finish time", taskReport.getStartTime() <= taskReport.getFinishTime()); for (TaskAttempt attempt : task.getAttempts().values()) { TaskAttemptReport attemptReport = attempt.getReport(); Assert.assertTrue("Attempt start time is not less than finish time", attemptReport.getStartTime() <= attemptReport.getFinishTime()); } } } } @Override protected Job createJob(Configuration conf, JobStateInternal forcedState, String diagnostic) { UserGroupInformation currentUser = null; try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), getTaskAttemptListener(), getContext().getClock(), getCommitter(), isNewApiCommitter(), currentUser.getUserName(), getContext(), forcedState, diagnostic); ((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob); getDispatcher().register(JobFinishEvent.Type.class, new EventHandler<JobFinishEvent>() { @Override public void handle(JobFinishEvent event) { stop(); } }); return newJob; } @Override protected TaskAttemptFinishingMonitor createTaskAttemptFinishingMonitor( EventHandler eventHandler) { return new TaskAttemptFinishingMonitor(eventHandler) { @Override public synchronized void register(TaskAttemptId attemptID) { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_CONTAINER_COMPLETED)); } }; } @Override protected TaskAttemptListener createTaskAttemptListener( AppContext context, AMPreemptionPolicy policy) { return new TaskAttemptListener(){ @Override public InetSocketAddress getAddress() { return NetUtils.createSocketAddr("localhost:54321"); } @Override public void registerLaunchedTask(TaskAttemptId attemptID, WrappedJvmID jvmID) { } @Override public void unregister(TaskAttemptId attemptID, WrappedJvmID jvmID) { } @Override public void registerPendingTask(org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) { } }; } @Override protected EventHandler<JobHistoryEvent> createJobHistoryHandler( AppContext context) {//disable history return new EventHandler<JobHistoryEvent>() { @Override public void handle(JobHistoryEvent event) { } }; } @Override protected ContainerLauncher createContainerLauncher(AppContext context) { return new MockContainerLauncher(); } protected class MockContainerLauncher implements ContainerLauncher { //We are running locally so set the shuffle port to -1 int shufflePort = -1; public MockContainerLauncher() { } @Override public void handle(ContainerLauncherEvent event) { switch (event.getType()) { case CONTAINER_REMOTE_LAUNCH: containerLaunched(event.getTaskAttemptID(), shufflePort); attemptLaunched(event.getTaskAttemptID()); break; case CONTAINER_REMOTE_CLEANUP: getContext().getEventHandler().handle( new TaskAttemptEvent(event.getTaskAttemptID(), TaskAttemptEventType.TA_CONTAINER_CLEANED)); break; case CONTAINER_COMPLETED: break; } } } protected void containerLaunched(TaskAttemptId attemptID, int shufflePort) { getContext().getEventHandler().handle( new TaskAttemptContainerLaunchedEvent(attemptID, shufflePort)); } protected void attemptLaunched(TaskAttemptId attemptID) { if (autoComplete) { // send the done event getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); } } @Override protected ContainerAllocator createContainerAllocator( ClientService clientService, final AppContext context) { return new MRAppContainerAllocator(); } protected class MRAppContainerAllocator implements ContainerAllocator, RMHeartbeatHandler { private int containerCount; @Override public void handle(ContainerAllocatorEvent event) { ContainerId cId = ContainerId.newContainerId(getContext().getApplicationAttemptId(), containerCount++); NodeId nodeId = NodeId.newInstance(NM_HOST, NM_PORT); ContainerTokenIdentifier containerTokenIdentifier = new ContainerTokenIdentifier(cId, nodeId.toString(), "user", resource, System.currentTimeMillis() + 10000, 42, 42, Priority.newInstance(0), 0); Token containerToken = newContainerToken(nodeId, "password".getBytes(), containerTokenIdentifier); Container container = Container.newInstance(cId, nodeId, NM_HOST + ":" + NM_HTTP_PORT, resource, null, containerToken); JobID id = TypeConverter.fromYarn(applicationId); JobId jobId = TypeConverter.toYarn(id); getContext().getEventHandler().handle(new JobHistoryEvent(jobId, new NormalizedResourceEvent( org.apache.hadoop.mapreduce.TaskType.REDUCE, 100))); getContext().getEventHandler().handle(new JobHistoryEvent(jobId, new NormalizedResourceEvent( org.apache.hadoop.mapreduce.TaskType.MAP, 100))); getContext().getEventHandler().handle( new TaskAttemptContainerAssignedEvent(event.getAttemptID(), container, null)); } @Override public long getLastHeartbeatTime() { return getContext().getClock().getTime(); } @Override public void runOnNextHeartbeat(Runnable callback) { callback.run(); } } @Override protected EventHandler<CommitterEvent> createCommitterEventHandler( AppContext context, final OutputCommitter committer) { // create an output committer with the task methods stubbed out OutputCommitter stubbedCommitter = new OutputCommitter() { @Override public void setupJob(JobContext jobContext) throws IOException { committer.setupJob(jobContext); } @SuppressWarnings("deprecation") @Override public void cleanupJob(JobContext jobContext) throws IOException { committer.cleanupJob(jobContext); } @Override public void commitJob(JobContext jobContext) throws IOException { committer.commitJob(jobContext); } @Override public void abortJob(JobContext jobContext, State state) throws IOException { committer.abortJob(jobContext, state); } @Override public boolean isRecoverySupported(JobContext jobContext) throws IOException{ return committer.isRecoverySupported(jobContext); } @SuppressWarnings("deprecation") @Override public boolean isRecoverySupported() { return committer.isRecoverySupported(); } @Override public void setupTask(TaskAttemptContext taskContext) throws IOException { } @Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { return false; } @Override public void commitTask(TaskAttemptContext taskContext) throws IOException { } @Override public void abortTask(TaskAttemptContext taskContext) throws IOException { } @Override public void recoverTask(TaskAttemptContext taskContext) throws IOException { } }; return new CommitterEventHandler(context, stubbedCommitter, getRMHeartbeatHandler()); } @Override protected ClientService createClientService(AppContext context) { return new MRClientService(context) { @Override public InetSocketAddress getBindAddress() { return NetUtils.createSocketAddr("localhost:9876"); } @Override public int getHttpPort() { return -1; } }; } public void setClusterInfo(ClusterInfo clusterInfo) { // Only useful if set before a job is started. if (getServiceState() == Service.STATE.NOTINITED || getServiceState() == Service.STATE.INITED) { this.clusterInfo = clusterInfo; } else { throw new IllegalStateException( "ClusterInfo can only be set before the App is STARTED"); } } public void setAllocatedContainerResource(Resource resource) { this.resource = resource; } class TestJob extends JobImpl { //override the init transition private final TestInitTransition initTransition = new TestInitTransition( maps, reduces); StateMachineFactory<JobImpl, JobStateInternal, JobEventType, JobEvent> localFactory = stateMachineFactory.addTransition(JobStateInternal.NEW, EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED), JobEventType.JOB_INIT, // This is abusive. initTransition); private final StateMachine<JobStateInternal, JobEventType, JobEvent> localStateMachine; @Override protected StateMachine<JobStateInternal, JobEventType, JobEvent> getStateMachine() { return localStateMachine; } @SuppressWarnings("rawtypes") public TestJob(JobId jobId, ApplicationAttemptId applicationAttemptId, Configuration conf, EventHandler eventHandler, TaskAttemptListener taskAttemptListener, Clock clock, OutputCommitter committer, boolean newApiCommitter, String user, AppContext appContext, JobStateInternal forcedState, String diagnostic) { super(jobId, getApplicationAttemptId(applicationId, getStartCount()), conf, eventHandler, taskAttemptListener, new JobTokenSecretManager(), new Credentials(), clock, getCompletedTaskFromPreviousRun(), metrics, committer, newApiCommitter, user, System.currentTimeMillis(), getAllAMInfos(), appContext, forcedState, diagnostic); // This "this leak" is okay because the retained pointer is in an // instance variable. localStateMachine = localFactory.make(this); } } //Override InitTransition to not look for split files etc static class TestInitTransition extends JobImpl.InitTransition { private int maps; private int reduces; TestInitTransition(int maps, int reduces) { this.maps = maps; this.reduces = reduces; } @Override protected void setup(JobImpl job) throws IOException { super.setup(job); job.conf.setInt(MRJobConfig.NUM_REDUCES, reduces); job.remoteJobConfFile = new Path("test"); } @Override protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) { TaskSplitMetaInfo[] splits = new TaskSplitMetaInfo[maps]; for (int i = 0; i < maps ; i++) { splits[i] = new TaskSplitMetaInfo(); } return splits; } } public static Token newContainerToken(NodeId nodeId, byte[] password, ContainerTokenIdentifier tokenIdentifier) { // RPC layer client expects ip:port as service for tokens InetSocketAddress addr = NetUtils.createSocketAddrForHost(nodeId.getHost(), nodeId.getPort()); // NOTE: use SecurityUtil.setTokenService if this becomes a "real" token Token containerToken = Token.newInstance(tokenIdentifier.getBytes(), ContainerTokenIdentifier.KIND.toString(), password, SecurityUtil .buildTokenService(addr).toString()); return containerToken; } public static ContainerId newContainerId(int appId, int appAttemptId, long timestamp, int containerId) { ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, appAttemptId); return ContainerId.newContainerId(applicationAttemptId, containerId); } public static ContainerTokenIdentifier newContainerTokenIdentifier( Token containerToken) throws IOException { org.apache.hadoop.security.token.Token<ContainerTokenIdentifier> token = new org.apache.hadoop.security.token.Token<ContainerTokenIdentifier>( containerToken.getIdentifier() .array(), containerToken.getPassword().array(), new Text( containerToken.getKind()), new Text(containerToken.getService())); return token.decodeIdentifier(); } @Override protected void shutdownTaskLog() { // Avoid closing the logging system during unit tests, // otherwise subsequent MRApp instances in the same test // will fail to log anything. } @Override protected void shutdownLogManager() { // Avoid closing the logging system during unit tests, // otherwise subsequent MRApp instances in the same test // will fail to log anything. } }
/** */ package isostdisois_13584_32ed_1techxmlschemaontomlSimplified; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.eclipse.emf.common.util.Enumerator; /** * <!-- begin-user-doc --> * A representation of the literals of the enumeration '<em><b>SIPREFIX Type</b></em>', * and utility methods for working with them. * <!-- end-user-doc --> * @see isostdisois_13584_32ed_1techxmlschemaontomlSimplified.Isostdisois_13584_32ed_1techxmlschemaontomlSimplifiedPackage#getSIPREFIXType() * @model * @generated */ public enum SIPREFIXType implements Enumerator { /** * The '<em><b>EXA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #EXA_VALUE * @generated * @ordered */ EXA(0, "EXA", "EXA"), /** * The '<em><b>PETA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #PETA_VALUE * @generated * @ordered */ PETA(1, "PETA", "PETA"), /** * The '<em><b>TERA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #TERA_VALUE * @generated * @ordered */ TERA(2, "TERA", "TERA"), /** * The '<em><b>GIGA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #GIGA_VALUE * @generated * @ordered */ GIGA(3, "GIGA", "GIGA"), /** * The '<em><b>MEGA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #MEGA_VALUE * @generated * @ordered */ MEGA(4, "MEGA", "MEGA"), /** * The '<em><b>KILO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #KILO_VALUE * @generated * @ordered */ KILO(5, "KILO", "KILO"), /** * The '<em><b>HECTO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #HECTO_VALUE * @generated * @ordered */ HECTO(6, "HECTO", "HECTO"), /** * The '<em><b>DECA</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #DECA_VALUE * @generated * @ordered */ DECA(7, "DECA", "DECA"), /** * The '<em><b>DECI</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #DECI_VALUE * @generated * @ordered */ DECI(8, "DECI", "DECI"), /** * The '<em><b>CENTI</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #CENTI_VALUE * @generated * @ordered */ CENTI(9, "CENTI", "CENTI"), /** * The '<em><b>MILLI</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #MILLI_VALUE * @generated * @ordered */ MILLI(10, "MILLI", "MILLI"), /** * The '<em><b>MICRO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #MICRO_VALUE * @generated * @ordered */ MICRO(11, "MICRO", "MICRO"), /** * The '<em><b>NANO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #NANO_VALUE * @generated * @ordered */ NANO(12, "NANO", "NANO"), /** * The '<em><b>PICO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #PICO_VALUE * @generated * @ordered */ PICO(13, "PICO", "PICO"), /** * The '<em><b>FEMTO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #FEMTO_VALUE * @generated * @ordered */ FEMTO(14, "FEMTO", "FEMTO"), /** * The '<em><b>ATTO</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #ATTO_VALUE * @generated * @ordered */ ATTO(15, "ATTO", "ATTO"); /** * The '<em><b>EXA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>EXA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #EXA * @model * @generated * @ordered */ public static final int EXA_VALUE = 0; /** * The '<em><b>PETA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>PETA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #PETA * @model * @generated * @ordered */ public static final int PETA_VALUE = 1; /** * The '<em><b>TERA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>TERA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #TERA * @model * @generated * @ordered */ public static final int TERA_VALUE = 2; /** * The '<em><b>GIGA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>GIGA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #GIGA * @model * @generated * @ordered */ public static final int GIGA_VALUE = 3; /** * The '<em><b>MEGA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>MEGA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #MEGA * @model * @generated * @ordered */ public static final int MEGA_VALUE = 4; /** * The '<em><b>KILO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>KILO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #KILO * @model * @generated * @ordered */ public static final int KILO_VALUE = 5; /** * The '<em><b>HECTO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>HECTO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #HECTO * @model * @generated * @ordered */ public static final int HECTO_VALUE = 6; /** * The '<em><b>DECA</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>DECA</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #DECA * @model * @generated * @ordered */ public static final int DECA_VALUE = 7; /** * The '<em><b>DECI</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>DECI</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #DECI * @model * @generated * @ordered */ public static final int DECI_VALUE = 8; /** * The '<em><b>CENTI</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>CENTI</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #CENTI * @model * @generated * @ordered */ public static final int CENTI_VALUE = 9; /** * The '<em><b>MILLI</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>MILLI</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #MILLI * @model * @generated * @ordered */ public static final int MILLI_VALUE = 10; /** * The '<em><b>MICRO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>MICRO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #MICRO * @model * @generated * @ordered */ public static final int MICRO_VALUE = 11; /** * The '<em><b>NANO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>NANO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #NANO * @model * @generated * @ordered */ public static final int NANO_VALUE = 12; /** * The '<em><b>PICO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>PICO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #PICO * @model * @generated * @ordered */ public static final int PICO_VALUE = 13; /** * The '<em><b>FEMTO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>FEMTO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #FEMTO * @model * @generated * @ordered */ public static final int FEMTO_VALUE = 14; /** * The '<em><b>ATTO</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>ATTO</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #ATTO * @model * @generated * @ordered */ public static final int ATTO_VALUE = 15; /** * An array of all the '<em><b>SIPREFIX Type</b></em>' enumerators. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static final SIPREFIXType[] VALUES_ARRAY = new SIPREFIXType[] { EXA, PETA, TERA, GIGA, MEGA, KILO, HECTO, DECA, DECI, CENTI, MILLI, MICRO, NANO, PICO, FEMTO, ATTO, }; /** * A public read-only list of all the '<em><b>SIPREFIX Type</b></em>' enumerators. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final List<SIPREFIXType> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY)); /** * Returns the '<em><b>SIPREFIX Type</b></em>' literal with the specified literal value. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param literal the literal. * @return the matching enumerator or <code>null</code>. * @generated */ public static SIPREFIXType get(String literal) { for (int i = 0; i < VALUES_ARRAY.length; ++i) { SIPREFIXType result = VALUES_ARRAY[i]; if (result.toString().equals(literal)) { return result; } } return null; } /** * Returns the '<em><b>SIPREFIX Type</b></em>' literal with the specified name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param name the name. * @return the matching enumerator or <code>null</code>. * @generated */ public static SIPREFIXType getByName(String name) { for (int i = 0; i < VALUES_ARRAY.length; ++i) { SIPREFIXType result = VALUES_ARRAY[i]; if (result.getName().equals(name)) { return result; } } return null; } /** * Returns the '<em><b>SIPREFIX Type</b></em>' literal with the specified integer value. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the integer value. * @return the matching enumerator or <code>null</code>. * @generated */ public static SIPREFIXType get(int value) { switch (value) { case EXA_VALUE: return EXA; case PETA_VALUE: return PETA; case TERA_VALUE: return TERA; case GIGA_VALUE: return GIGA; case MEGA_VALUE: return MEGA; case KILO_VALUE: return KILO; case HECTO_VALUE: return HECTO; case DECA_VALUE: return DECA; case DECI_VALUE: return DECI; case CENTI_VALUE: return CENTI; case MILLI_VALUE: return MILLI; case MICRO_VALUE: return MICRO; case NANO_VALUE: return NANO; case PICO_VALUE: return PICO; case FEMTO_VALUE: return FEMTO; case ATTO_VALUE: return ATTO; } return null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final int value; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final String name; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final String literal; /** * Only this class can construct instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private SIPREFIXType(int value, String name, String literal) { this.value = value; this.name = name; this.literal = literal; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getLiteral() { return literal; } /** * Returns the literal value of the enumerator, which is its string representation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { return literal; } } //SIPREFIXType
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.InterruptedIOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.nio.channels.ClosedChannelException; import org.apache.commons.logging.Log; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.HdfsConstants; import org.apache.hadoop.hdfs.server.datanode.FSDatasetInterface.MetaDataInputStream; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.StringUtils; import static org.apache.hadoop.hdfs.server.datanode.DataNode.DN_CLIENTTRACE_FORMAT; /** * Thread for processing incoming/outgoing data stream. */ class DataXceiver extends Thread implements Runnable, FSConstants { public static final Log LOG = DataNode.LOG; static final Log ClientTraceLog = DataNode.ClientTraceLog; Socket s; final String remoteAddress; // address of remote side final String localAddress; // local address of this daemon DataNode datanode; DataXceiverServer dataXceiverServer; private int socketKeepaliveTimeout; public DataXceiver(Socket s, DataNode datanode, DataXceiverServer dataXceiverServer) { super(datanode.threadGroup, "DataXceiver (initializing)"); this.s = s; this.datanode = datanode; this.dataXceiverServer = dataXceiverServer; dataXceiverServer.childSockets.put(s, s); remoteAddress = s.getRemoteSocketAddress().toString(); localAddress = s.getLocalSocketAddress().toString(); socketKeepaliveTimeout = datanode.getConf().getInt( DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_KEY, DFSConfigKeys.DFS_DATANODE_SOCKET_REUSE_KEEPALIVE_DEFAULT); LOG.debug("Number of active connections is: " + datanode.getXceiverCount()); updateThreadName("waiting for handshake"); } /** * Update the thread name to contain the current status. */ private void updateThreadName(String status) { StringBuilder sb = new StringBuilder(); sb.append("DataXceiver for client ").append(remoteAddress); if (status != null) { sb.append(" [").append(status).append("]"); } this.setName(sb.toString()); } /** * Read/write data from/to the DataXceiverServer. */ public void run() { DataInputStream in=null; int opsProcessed = 0; try { in = new DataInputStream( new BufferedInputStream(NetUtils.getInputStream(s), SMALL_BUFFER_SIZE)); boolean local = s.getInetAddress().equals(s.getLocalAddress()); int stdTimeout = s.getSoTimeout(); // We process requests in a loop, and stay around for a short timeout. // This optimistic behaviour allows the other end to reuse connections. // Setting keepalive timeout to 0 disable this behavior. do { updateThreadName("Waiting for operation #" + (opsProcessed + 1)); byte op; try { if (opsProcessed != 0) { assert socketKeepaliveTimeout > 0; s.setSoTimeout(socketKeepaliveTimeout); } short version = in.readShort(); if ( version != DataTransferProtocol.DATA_TRANSFER_VERSION ) { throw new IOException( "Version Mismatch" ); } op = in.readByte(); } catch (InterruptedIOException ignored) { // Time out while waiting for client RPC break; } catch (IOException err) { // Since we optimistically expect the next op, it's quite normal to get EOF here. if (opsProcessed > 0 && (err instanceof EOFException || err instanceof ClosedChannelException || err.getMessage().contains("Connection reset by peer"))) { if (LOG.isDebugEnabled()) { LOG.debug("Cached " + s.toString() + " closing after " + opsProcessed + " ops"); } } else { throw err; } break; } // restore normal timeout if (opsProcessed != 0) { s.setSoTimeout(stdTimeout); } // Indentation is left alone here so that patches merge easier from 0.20.20x // Make sure the xciver count is not exceeded int curXceiverCount = datanode.getXceiverCount(); if (curXceiverCount > dataXceiverServer.maxXceiverCount) { throw new IOException("xceiverCount " + curXceiverCount + " exceeds the limit of concurrent xcievers " + dataXceiverServer.maxXceiverCount); } long startTime = DataNode.now(); switch ( op ) { case DataTransferProtocol.OP_READ_BLOCK: readBlock( in ); datanode.myMetrics.readBlockOp.inc(DataNode.now() - startTime); if (local) datanode.myMetrics.readsFromLocalClient.inc(); else datanode.myMetrics.readsFromRemoteClient.inc(); break; case DataTransferProtocol.OP_WRITE_BLOCK: writeBlock( in ); datanode.myMetrics.writeBlockOp.inc(DataNode.now() - startTime); if (local) datanode.myMetrics.writesFromLocalClient.inc(); else datanode.myMetrics.writesFromRemoteClient.inc(); break; case DataTransferProtocol.OP_REPLACE_BLOCK: // for balancing purpose; send to a destination replaceBlock(in); datanode.myMetrics.replaceBlockOp.inc(DataNode.now() - startTime); break; case DataTransferProtocol.OP_COPY_BLOCK: // for balancing purpose; send to a proxy source copyBlock(in); datanode.myMetrics.copyBlockOp.inc(DataNode.now() - startTime); break; case DataTransferProtocol.OP_BLOCK_CHECKSUM: //get the checksum of a block getBlockChecksum(in); datanode.myMetrics.blockChecksumOp.inc(DataNode.now() - startTime); break; default: throw new IOException("Unknown opcode " + op + " in data stream"); } ++opsProcessed; } while (!s.isClosed() && socketKeepaliveTimeout > 0); } catch (Throwable t) { LOG.error(datanode.dnRegistration + ":DataXceiver",t); } finally { LOG.debug(datanode.dnRegistration + ":Number of active connections is: " + datanode.getXceiverCount()); updateThreadName("Cleaning up"); IOUtils.closeStream(in); IOUtils.closeSocket(s); dataXceiverServer.childSockets.remove(s); } } /** * Read a block from the disk. * @param in The stream to read from * @throws IOException */ private void readBlock(DataInputStream in) throws IOException { // // Read in the header // long blockId = in.readLong(); Block block = new Block( blockId, 0 , in.readLong()); long startOffset = in.readLong(); long length = in.readLong(); String clientName = Text.readString(in); Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); OutputStream baseStream = NetUtils.getOutputStream(s, datanode.socketWriteTimeout); DataOutputStream out = new DataOutputStream( new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE)); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.READ); } catch (InvalidToken e) { try { out.writeShort(DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN); out.flush(); throw new IOException("Access token verification failed, for client " + remoteAddress + " for OP_READ_BLOCK for block " + block); } finally { IOUtils.closeStream(out); } } } // send the block BlockSender blockSender = null; final String clientTraceFmt = clientName.length() > 0 && ClientTraceLog.isInfoEnabled() ? String.format(DN_CLIENTTRACE_FORMAT, localAddress, remoteAddress, "%d", "HDFS_READ", clientName, "%d", datanode.dnRegistration.getStorageID(), block, "%d") : datanode.dnRegistration + " Served block " + block + " to " + s.getInetAddress(); updateThreadName("sending block " + block); try { try { blockSender = new BlockSender(block, startOffset, length, true, true, false, datanode, clientTraceFmt); } catch(IOException e) { sendResponse(s, (short)DataTransferProtocol.OP_STATUS_ERROR, datanode.socketWriteTimeout); throw e; } out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS); // send op status long read = blockSender.sendBlock(out, baseStream, null); // send data if (blockSender.didSendEntireByteRange()) { // If client verification succeeded, and if it's for the whole block, // tell the DataBlockScanner that it's good. This is an optional response // from client. If absent, we close the connection (which is what we // always do anyways). try { short status = in.readShort(); if (status == DataTransferProtocol.OP_STATUS_CHECKSUM_OK) { if (blockSender.isBlockReadFully() && datanode.blockScanner != null) { datanode.blockScanner.verifiedByClient(block); } } } catch (IOException ioe) { LOG.debug("Error reading client status response. Will close connection.", ioe); IOUtils.closeStream(out); } } else { LOG.info("didnt send entire byte range, closing"); IOUtils.closeStream(out); } datanode.myMetrics.bytesRead.inc((int) read); datanode.myMetrics.blocksRead.inc(); } catch ( SocketException ignored ) { // Its ok for remote side to close the connection anytime. datanode.myMetrics.blocksRead.inc(); IOUtils.closeStream(out); } catch ( IOException ioe ) { /* What exactly should we do here? * Earlier version shutdown() datanode if there is disk error. */ LOG.warn(datanode.dnRegistration + ":Got exception while serving " + block + " to " + s.getInetAddress() + ":\n" + StringUtils.stringifyException(ioe) ); throw ioe; } finally { IOUtils.closeStream(blockSender); } } /** * Write a block to disk. * * @param in The stream to read from * @throws IOException */ private void writeBlock(DataInputStream in) throws IOException { DatanodeInfo srcDataNode = null; LOG.debug("writeBlock receive buf size " + s.getReceiveBufferSize() + " tcp no delay " + s.getTcpNoDelay()); // // Read in the header // Block block = new Block(in.readLong(), dataXceiverServer.estimateBlockSize, in.readLong()); LOG.info("Receiving block " + block + " src: " + remoteAddress + " dest: " + localAddress); int pipelineSize = in.readInt(); // num of datanodes in entire pipeline boolean isRecovery = in.readBoolean(); // is this part of recovery? String client = Text.readString(in); // working on behalf of this client boolean hasSrcDataNode = in.readBoolean(); // is src node info present if (hasSrcDataNode) { srcDataNode = new DatanodeInfo(); srcDataNode.readFields(in); } int numTargets = in.readInt(); if (numTargets < 0) { throw new IOException("Mislabelled incoming datastream."); } DatanodeInfo targets[] = new DatanodeInfo[numTargets]; for (int i = 0; i < targets.length; i++) { DatanodeInfo tmp = new DatanodeInfo(); tmp.readFields(in); targets[i] = tmp; } Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); DataOutputStream replyOut = null; // stream to prev target replyOut = new DataOutputStream(new BufferedOutputStream( NetUtils.getOutputStream(s, datanode.socketWriteTimeout))); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.WRITE); } catch (InvalidToken e) { try { if (client.length() != 0) { replyOut.writeShort((short)DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN); Text.writeString(replyOut, datanode.dnRegistration.getName()); replyOut.flush(); } throw new IOException("Access token verification failed, for client " + remoteAddress + " for OP_WRITE_BLOCK for block " + block); } finally { IOUtils.closeStream(replyOut); } } } DataOutputStream mirrorOut = null; // stream to next target DataInputStream mirrorIn = null; // reply from next target Socket mirrorSock = null; // socket to next target BlockReceiver blockReceiver = null; // responsible for data handling String mirrorNode = null; // the name:port of next target String firstBadLink = ""; // first datanode that failed in connection setup updateThreadName("receiving block " + block + " client=" + client); short mirrorInStatus = (short)DataTransferProtocol.OP_STATUS_SUCCESS; try { // open a block receiver and check if the block does not exist blockReceiver = new BlockReceiver(block, in, s.getRemoteSocketAddress().toString(), s.getLocalSocketAddress().toString(), isRecovery, client, srcDataNode, datanode); // // Open network conn to backup machine, if // appropriate // if (targets.length > 0) { InetSocketAddress mirrorTarget = null; // Connect to backup machine mirrorNode = targets[0].getName(); mirrorTarget = NetUtils.createSocketAddr(mirrorNode); mirrorSock = datanode.newSocket(); try { int timeoutValue = datanode.socketTimeout + (HdfsConstants.READ_TIMEOUT_EXTENSION * numTargets); int writeTimeout = datanode.socketWriteTimeout + (HdfsConstants.WRITE_TIMEOUT_EXTENSION * numTargets); NetUtils.connect(mirrorSock, mirrorTarget, timeoutValue); mirrorSock.setSoTimeout(timeoutValue); mirrorSock.setSendBufferSize(DEFAULT_DATA_SOCKET_SIZE); mirrorOut = new DataOutputStream( new BufferedOutputStream( NetUtils.getOutputStream(mirrorSock, writeTimeout), SMALL_BUFFER_SIZE)); mirrorIn = new DataInputStream(NetUtils.getInputStream(mirrorSock)); // Write header: Copied from DFSClient.java! mirrorOut.writeShort( DataTransferProtocol.DATA_TRANSFER_VERSION ); mirrorOut.write( DataTransferProtocol.OP_WRITE_BLOCK ); mirrorOut.writeLong( block.getBlockId() ); mirrorOut.writeLong( block.getGenerationStamp() ); mirrorOut.writeInt( pipelineSize ); mirrorOut.writeBoolean( isRecovery ); Text.writeString( mirrorOut, client ); mirrorOut.writeBoolean(hasSrcDataNode); if (hasSrcDataNode) { // pass src node information srcDataNode.write(mirrorOut); } mirrorOut.writeInt( targets.length - 1 ); for ( int i = 1; i < targets.length; i++ ) { targets[i].write( mirrorOut ); } accessToken.write(mirrorOut); blockReceiver.writeChecksumHeader(mirrorOut); mirrorOut.flush(); // read connect ack (only for clients, not for replication req) if (client.length() != 0) { mirrorInStatus = mirrorIn.readShort(); firstBadLink = Text.readString(mirrorIn); if (LOG.isDebugEnabled() || mirrorInStatus != DataTransferProtocol.OP_STATUS_SUCCESS) { LOG.info("Datanode " + targets.length + " got response for connect ack " + " from downstream datanode with firstbadlink as " + firstBadLink); } } } catch (IOException e) { if (client.length() != 0) { replyOut.writeShort((short)DataTransferProtocol.OP_STATUS_ERROR); Text.writeString(replyOut, mirrorNode); replyOut.flush(); } IOUtils.closeStream(mirrorOut); mirrorOut = null; IOUtils.closeStream(mirrorIn); mirrorIn = null; IOUtils.closeSocket(mirrorSock); mirrorSock = null; if (client.length() > 0) { throw e; } else { LOG.info(datanode.dnRegistration + ":Exception transfering block " + block + " to mirror " + mirrorNode + ". continuing without the mirror.\n" + StringUtils.stringifyException(e)); } } } // send connect ack back to source (only for clients) if (client.length() != 0) { if (LOG.isDebugEnabled() || mirrorInStatus != DataTransferProtocol.OP_STATUS_SUCCESS) { LOG.info("Datanode " + targets.length + " forwarding connect ack to upstream firstbadlink is " + firstBadLink); } replyOut.writeShort(mirrorInStatus); Text.writeString(replyOut, firstBadLink); replyOut.flush(); } // receive the block and mirror to the next target String mirrorAddr = (mirrorSock == null) ? null : mirrorNode; blockReceiver.receiveBlock(mirrorOut, mirrorIn, replyOut, mirrorAddr, null, targets.length); // if this write is for a replication request (and not // from a client), then confirm block. For client-writes, // the block is finalized in the PacketResponder. if (client.length() == 0) { datanode.notifyNamenodeReceivedBlock(block, DataNode.EMPTY_DEL_HINT); LOG.info("Received block " + block + " src: " + remoteAddress + " dest: " + localAddress + " of size " + block.getNumBytes()); } if (datanode.blockScanner != null) { datanode.blockScanner.addBlock(block); } } catch (IOException ioe) { LOG.info("writeBlock " + block + " received exception " + ioe); throw ioe; } finally { // close all opened streams IOUtils.closeStream(mirrorOut); IOUtils.closeStream(mirrorIn); IOUtils.closeStream(replyOut); IOUtils.closeSocket(mirrorSock); IOUtils.closeStream(blockReceiver); } } /** * Get block checksum (MD5 of CRC32). * @param in */ void getBlockChecksum(DataInputStream in) throws IOException { final Block block = new Block(in.readLong(), 0 , in.readLong()); Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); DataOutputStream out = new DataOutputStream(NetUtils.getOutputStream(s, datanode.socketWriteTimeout)); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.READ); } catch (InvalidToken e) { try { out.writeShort(DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN); out.flush(); throw new IOException( "Access token verification failed, for client " + remoteAddress + " for OP_BLOCK_CHECKSUM for block " + block); } finally { IOUtils.closeStream(out); } } } final MetaDataInputStream metadataIn = datanode.data.getMetaDataInputStream(block); final DataInputStream checksumIn = new DataInputStream(new BufferedInputStream( metadataIn, BUFFER_SIZE)); updateThreadName("getting checksum for block " + block); try { //read metadata file final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn); final DataChecksum checksum = header.getChecksum(); final int bytesPerCRC = checksum.getBytesPerChecksum(); final long crcPerBlock = (metadataIn.getLength() - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize(); //compute block checksum final MD5Hash md5 = MD5Hash.digest(checksumIn); if (LOG.isDebugEnabled()) { LOG.debug("block=" + block + ", bytesPerCRC=" + bytesPerCRC + ", crcPerBlock=" + crcPerBlock + ", md5=" + md5); } //write reply out.writeShort(DataTransferProtocol.OP_STATUS_SUCCESS); out.writeInt(bytesPerCRC); out.writeLong(crcPerBlock); md5.write(out); out.flush(); } finally { IOUtils.closeStream(out); IOUtils.closeStream(checksumIn); IOUtils.closeStream(metadataIn); } } /** * Read a block from the disk and then sends it to a destination. * * @param in The stream to read from * @throws IOException */ private void copyBlock(DataInputStream in) throws IOException { // Read in the header long blockId = in.readLong(); // read block id Block block = new Block(blockId, 0, in.readLong()); Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.COPY); } catch (InvalidToken e) { LOG.warn("Invalid access token in request from " + remoteAddress + " for OP_COPY_BLOCK for block " + block); sendResponse(s, (short) DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN, datanode.socketWriteTimeout); return; } } if (!dataXceiverServer.balanceThrottler.acquire()) { // not able to start LOG.info("Not able to copy block " + blockId + " to " + s.getRemoteSocketAddress() + " because threads quota is exceeded."); sendResponse(s, (short)DataTransferProtocol.OP_STATUS_ERROR, datanode.socketWriteTimeout); return; } BlockSender blockSender = null; DataOutputStream reply = null; boolean isOpSuccess = true; updateThreadName("Copying block " + block); try { // check if the block exists or not blockSender = new BlockSender(block, 0, -1, false, false, false, datanode); // set up response stream OutputStream baseStream = NetUtils.getOutputStream( s, datanode.socketWriteTimeout); reply = new DataOutputStream(new BufferedOutputStream( baseStream, SMALL_BUFFER_SIZE)); // send status first reply.writeShort((short)DataTransferProtocol.OP_STATUS_SUCCESS); // send block content to the target long read = blockSender.sendBlock(reply, baseStream, dataXceiverServer.balanceThrottler); datanode.myMetrics.bytesRead.inc((int) read); datanode.myMetrics.blocksRead.inc(); LOG.info("Copied block " + block + " to " + s.getRemoteSocketAddress()); } catch (IOException ioe) { isOpSuccess = false; throw ioe; } finally { dataXceiverServer.balanceThrottler.release(); if (isOpSuccess) { try { // send one last byte to indicate that the resource is cleaned. reply.writeChar('d'); } catch (IOException ignored) { } } IOUtils.closeStream(reply); IOUtils.closeStream(blockSender); } } /** * Receive a block and write it to disk, it then notifies the namenode to * remove the copy from the source. * * @param in The stream to read from * @throws IOException */ private void replaceBlock(DataInputStream in) throws IOException { /* read header */ long blockId = in.readLong(); Block block = new Block(blockId, dataXceiverServer.estimateBlockSize, in.readLong()); // block id & generation stamp String sourceID = Text.readString(in); // read del hint DatanodeInfo proxySource = new DatanodeInfo(); // read proxy source proxySource.readFields(in); Token<BlockTokenIdentifier> accessToken = new Token<BlockTokenIdentifier>(); accessToken.readFields(in); if (datanode.isBlockTokenEnabled) { try { datanode.blockTokenSecretManager.checkAccess(accessToken, null, block, BlockTokenSecretManager.AccessMode.REPLACE); } catch (InvalidToken e) { LOG.warn("Invalid access token in request from " + remoteAddress + " for OP_REPLACE_BLOCK for block " + block); sendResponse(s, (short)DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN, datanode.socketWriteTimeout); return; } } if (!dataXceiverServer.balanceThrottler.acquire()) { // not able to start LOG.warn("Not able to receive block " + blockId + " from " + s.getRemoteSocketAddress() + " because threads quota is exceeded."); sendResponse(s, (short)DataTransferProtocol.OP_STATUS_ERROR, datanode.socketWriteTimeout); return; } Socket proxySock = null; DataOutputStream proxyOut = null; short opStatus = DataTransferProtocol.OP_STATUS_SUCCESS; BlockReceiver blockReceiver = null; DataInputStream proxyReply = null; updateThreadName("replacing block " + block + " from " + sourceID); try { // get the output stream to the proxy InetSocketAddress proxyAddr = NetUtils.createSocketAddr( proxySource.getName()); proxySock = datanode.newSocket(); NetUtils.connect(proxySock, proxyAddr, datanode.socketTimeout); proxySock.setSoTimeout(datanode.socketTimeout); OutputStream baseStream = NetUtils.getOutputStream(proxySock, datanode.socketWriteTimeout); proxyOut = new DataOutputStream( new BufferedOutputStream(baseStream, SMALL_BUFFER_SIZE)); /* send request to the proxy */ proxyOut.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); // transfer version proxyOut.writeByte(DataTransferProtocol.OP_COPY_BLOCK); // op code proxyOut.writeLong(block.getBlockId()); // block id proxyOut.writeLong(block.getGenerationStamp()); // block id accessToken.write(proxyOut); proxyOut.flush(); // receive the response from the proxy proxyReply = new DataInputStream(new BufferedInputStream( NetUtils.getInputStream(proxySock), BUFFER_SIZE)); short status = proxyReply.readShort(); if (status != DataTransferProtocol.OP_STATUS_SUCCESS) { if (status == DataTransferProtocol.OP_STATUS_ERROR_ACCESS_TOKEN) { throw new IOException("Copy block " + block + " from " + proxySock.getRemoteSocketAddress() + " failed due to access token error"); } throw new IOException("Copy block " + block + " from " + proxySock.getRemoteSocketAddress() + " failed"); } // open a block receiver and check if the block does not exist blockReceiver = new BlockReceiver( block, proxyReply, proxySock.getRemoteSocketAddress().toString(), proxySock.getLocalSocketAddress().toString(), false, "", null, datanode); // receive a block blockReceiver.receiveBlock(null, null, null, null, dataXceiverServer.balanceThrottler, -1); // notify name node datanode.notifyNamenodeReceivedBlock(block, sourceID); LOG.info("Moved block " + block + " from " + s.getRemoteSocketAddress()); } catch (IOException ioe) { opStatus = DataTransferProtocol.OP_STATUS_ERROR; throw ioe; } finally { // receive the last byte that indicates the proxy released its thread resource if (opStatus == DataTransferProtocol.OP_STATUS_SUCCESS) { try { proxyReply.readChar(); } catch (IOException ignored) { } } // now release the thread resource dataXceiverServer.balanceThrottler.release(); // send response back try { sendResponse(s, opStatus, datanode.socketWriteTimeout); } catch (IOException ioe) { LOG.warn("Error writing reply back to " + s.getRemoteSocketAddress()); } IOUtils.closeStream(proxyOut); IOUtils.closeStream(blockReceiver); IOUtils.closeStream(proxyReply); } } /** * Utility function for sending a response. * @param s socket to write to * @param opStatus status message to write * @param timeout send timeout **/ private void sendResponse(Socket s, short opStatus, long timeout) throws IOException { DataOutputStream reply = new DataOutputStream(NetUtils.getOutputStream(s, timeout)); reply.writeShort(opStatus); reply.flush(); } }
package pl.droidsonroids.gif; import android.content.ContentResolver; import android.content.res.AssetFileDescriptor; import android.content.res.AssetManager; import android.content.res.Resources; import android.net.Uri; import java.io.File; import java.io.FileDescriptor; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Locale; /** * Lightweight version of {@link GifDrawable} used to retrieve metadata of GIF only, * without having to allocate the memory for its pixels. */ public class GifAnimationMetaData implements Serializable { private static final long serialVersionUID = 6518019337497570800L; //[w,h,imageCount,loopCount,duration] private final int[] mMetaData = new int[5]; /** * Retrieves from resource. * * @param res Resources to read from * @param id resource id * @throws android.content.res.Resources.NotFoundException if the given ID does not exist. * @throws java.io.IOException when opening failed * @throws NullPointerException if res is null */ public GifAnimationMetaData(Resources res, int id) throws Resources.NotFoundException, IOException { this(res.openRawResourceFd(id)); } /** * Retrieves metadata from asset. * * @param assets AssetManager to read from * @param assetName name of the asset * @throws java.io.IOException when opening failed * @throws NullPointerException if assets or assetName is null */ public GifAnimationMetaData(AssetManager assets, String assetName) throws IOException { this(assets.openFd(assetName)); } /** * Constructs metadata from given file path.<br> * Only metadata is read, no graphic data is decoded here. * In practice can be called from main thread. However it will violate * {@link android.os.StrictMode} policy if disk reads detection is enabled.<br> * * @param filePath path to the GIF file * @throws java.io.IOException when opening failed * @throws NullPointerException if filePath is null */ public GifAnimationMetaData(String filePath) throws IOException { if (filePath == null) throw new NullPointerException("Source is null"); init(GifDrawable.openFile(mMetaData, filePath, true)); } /** * Equivalent to {@code} GifMetadata(file.getPath())} * * @param file the GIF file * @throws java.io.IOException when opening failed * @throws NullPointerException if file is null */ public GifAnimationMetaData(File file) throws IOException { if (file == null) throw new NullPointerException("Source is null"); init(GifDrawable.openFile(mMetaData, file.getPath(), true)); } /** * Retrieves metadata from InputStream. * InputStream must support marking, IllegalArgumentException will be thrown otherwise. * * @param stream stream to read from * @throws java.io.IOException when opening failed * @throws IllegalArgumentException if stream does not support marking * @throws NullPointerException if stream is null */ public GifAnimationMetaData(InputStream stream) throws IOException { if (stream == null) throw new NullPointerException("Source is null"); if (!stream.markSupported()) throw new IllegalArgumentException("InputStream does not support marking"); init(GifDrawable.openStream(mMetaData, stream, true)); } /** * Retrieves metadata from AssetFileDescriptor. * Convenience wrapper for {@link pl.droidsonroids.gif.GifAnimationMetaData#GifAnimationMetaData(java.io.FileDescriptor)} * * @param afd source * @throws NullPointerException if afd is null * @throws java.io.IOException when opening failed */ public GifAnimationMetaData(AssetFileDescriptor afd) throws IOException { if (afd == null) throw new NullPointerException("Source is null"); FileDescriptor fd = afd.getFileDescriptor(); try { init(GifDrawable.openFd(mMetaData, fd, afd.getStartOffset(), true)); } catch (IOException ex) { afd.close(); throw ex; } } /** * Retrieves metadata from FileDescriptor * * @param fd source * @throws java.io.IOException when opening failed * @throws NullPointerException if fd is null */ public GifAnimationMetaData(FileDescriptor fd) throws IOException { if (fd == null) throw new NullPointerException("Source is null"); init(GifDrawable.openFd(mMetaData, fd, 0, true)); } /** * Retrieves metadata from byte array.<br> * It can be larger than size of the GIF data. Bytes beyond GIF terminator are not accessed. * * @param bytes raw GIF bytes * @throws java.io.IOException if bytes does not contain valid GIF data * @throws NullPointerException if bytes are null */ public GifAnimationMetaData(byte[] bytes) throws IOException { if (bytes == null) throw new NullPointerException("Source is null"); init(GifDrawable.openByteArray(mMetaData, bytes, true)); } /** * Retrieves metadata from {@link java.nio.ByteBuffer}. Only direct buffers are supported. * Buffer can be larger than size of the GIF data. Bytes beyond GIF terminator are not accessed. * * @param buffer buffer containing GIF data * @throws java.io.IOException if buffer does not contain valid GIF data * @throws IllegalArgumentException if buffer is indirect * @throws NullPointerException if buffer is null */ public GifAnimationMetaData(ByteBuffer buffer) throws IOException { if (buffer == null) throw new NullPointerException("Source is null"); if (!buffer.isDirect()) throw new IllegalArgumentException("ByteBuffer is not direct"); init(GifDrawable.openDirectByteBuffer(mMetaData, buffer, true)); } /** * Retrieves metadata from {@link android.net.Uri} which is resolved using {@code resolver}. * {@link android.content.ContentResolver#openAssetFileDescriptor(android.net.Uri, String)} * is used to open an Uri. * * @param uri GIF Uri, cannot be null. * @param resolver resolver, cannot be null. * @throws java.io.IOException if resolution fails or destination is not a GIF. */ public GifAnimationMetaData(ContentResolver resolver, Uri uri) throws IOException { this(resolver.openAssetFileDescriptor(uri, "r")); } private void init(final int gifInfoPtr) { mMetaData[3] = GifDrawable.getLoopCount(gifInfoPtr); mMetaData[4] = GifDrawable.getDuration(gifInfoPtr); GifDrawable.free(gifInfoPtr); } /** * @return width od the GIF canvas in pixels */ public int getWidth() { return mMetaData[0]; } /** * @return height od the GIF canvas in pixels */ public int getHeight() { return mMetaData[1]; } /** * @return number of frames in GIF, at least one */ public int getNumberOfFrames() { return mMetaData[2]; } /** * Returns loop count previously read from GIF's application extension block. * Defaults to 0 (infinite loop) if there is no such extension. * * @return loop count, 0 means that animation is infinite */ public int getLoopCount() { return mMetaData[3]; } /** * Retrieves duration of one loop of the animation. * If there is no data (no Graphics Control Extension blocks) 0 is returned. * Note that one-frame GIFs can have non-zero duration defined in Graphics Control Extension block, * use {@link #getNumberOfFrames()} to determine if there is one or more frames. * * @return duration of of one loop the animation in milliseconds. Result is always multiple of 10. */ public int getDuration() { return mMetaData[4]; } /** * @return true if GIF is animated (has at least 2 frames and positive duration), false otherwise */ public boolean isAnimated() { return mMetaData[2]>1 && mMetaData[4]>0; } @Override public String toString() { String loopCount=mMetaData[3] == 0 ? "Infinity" : Integer.toString(mMetaData[3]); String suffix= String.format(Locale.US, "GIF: size: %dx%d, frames: %d, loops: %s, duration: %d", mMetaData[0], mMetaData[1], mMetaData[2], loopCount, mMetaData[4]); return isAnimated() ? "Animated " + suffix : suffix; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_09_01.implementation; import com.microsoft.azure.arm.resources.models.implementation.GroupableResourceCoreImpl; import com.microsoft.azure.management.network.v2019_09_01.VirtualNetworkGateway; import rx.Observable; import java.util.List; import com.microsoft.azure.management.network.v2019_09_01.VirtualNetworkGatewayIPConfiguration; import com.microsoft.azure.management.network.v2019_09_01.VirtualNetworkGatewayType; import com.microsoft.azure.management.network.v2019_09_01.VpnType; import com.microsoft.azure.management.network.v2019_09_01.VpnGatewayGeneration; import com.microsoft.azure.SubResource; import com.microsoft.azure.management.network.v2019_09_01.VirtualNetworkGatewaySku; import com.microsoft.azure.management.network.v2019_09_01.VpnClientConfiguration; import com.microsoft.azure.management.network.v2019_09_01.BgpSettings; import com.microsoft.azure.management.network.v2019_09_01.AddressSpace; import com.microsoft.azure.management.network.v2019_09_01.ProvisioningState; class VirtualNetworkGatewayImpl extends GroupableResourceCoreImpl<VirtualNetworkGateway, VirtualNetworkGatewayInner, VirtualNetworkGatewayImpl, NetworkManager> implements VirtualNetworkGateway, VirtualNetworkGateway.Definition, VirtualNetworkGateway.Update { VirtualNetworkGatewayImpl(String name, VirtualNetworkGatewayInner inner, NetworkManager manager) { super(name, inner, manager); } @Override public Observable<VirtualNetworkGateway> createResourceAsync() { VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways(); return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner()) .map(innerToFluentMap(this)); } @Override public Observable<VirtualNetworkGateway> updateResourceAsync() { VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways(); return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner()) .map(innerToFluentMap(this)); } @Override protected Observable<VirtualNetworkGatewayInner> getInnerAsync() { VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways(); return client.getByResourceGroupAsync(this.resourceGroupName(), this.name()); } @Override public boolean isInCreateMode() { return this.inner().id() == null; } @Override public Boolean activeActive() { return this.inner().activeActive(); } @Override public BgpSettings bgpSettings() { return this.inner().bgpSettings(); } @Override public AddressSpace customRoutes() { return this.inner().customRoutes(); } @Override public Boolean enableBgp() { return this.inner().enableBgp(); } @Override public Boolean enableDnsForwarding() { return this.inner().enableDnsForwarding(); } @Override public String etag() { return this.inner().etag(); } @Override public SubResource gatewayDefaultSite() { return this.inner().gatewayDefaultSite(); } @Override public VirtualNetworkGatewayType gatewayType() { return this.inner().gatewayType(); } @Override public String inboundDnsForwardingEndpoint() { return this.inner().inboundDnsForwardingEndpoint(); } @Override public List<VirtualNetworkGatewayIPConfiguration> ipConfigurations() { return this.inner().ipConfigurations(); } @Override public ProvisioningState provisioningState() { return this.inner().provisioningState(); } @Override public String resourceGuid() { return this.inner().resourceGuid(); } @Override public VirtualNetworkGatewaySku sku() { return this.inner().sku(); } @Override public VpnClientConfiguration vpnClientConfiguration() { return this.inner().vpnClientConfiguration(); } @Override public VpnGatewayGeneration vpnGatewayGeneration() { return this.inner().vpnGatewayGeneration(); } @Override public VpnType vpnType() { return this.inner().vpnType(); } @Override public VirtualNetworkGatewayImpl withActiveActive(Boolean activeActive) { this.inner().withActiveActive(activeActive); return this; } @Override public VirtualNetworkGatewayImpl withBgpSettings(BgpSettings bgpSettings) { this.inner().withBgpSettings(bgpSettings); return this; } @Override public VirtualNetworkGatewayImpl withCustomRoutes(AddressSpace customRoutes) { this.inner().withCustomRoutes(customRoutes); return this; } @Override public VirtualNetworkGatewayImpl withEnableBgp(Boolean enableBgp) { this.inner().withEnableBgp(enableBgp); return this; } @Override public VirtualNetworkGatewayImpl withEnableDnsForwarding(Boolean enableDnsForwarding) { this.inner().withEnableDnsForwarding(enableDnsForwarding); return this; } @Override public VirtualNetworkGatewayImpl withGatewayDefaultSite(SubResource gatewayDefaultSite) { this.inner().withGatewayDefaultSite(gatewayDefaultSite); return this; } @Override public VirtualNetworkGatewayImpl withGatewayType(VirtualNetworkGatewayType gatewayType) { this.inner().withGatewayType(gatewayType); return this; } @Override public VirtualNetworkGatewayImpl withIpConfigurations(List<VirtualNetworkGatewayIPConfiguration> ipConfigurations) { this.inner().withIpConfigurations(ipConfigurations); return this; } @Override public VirtualNetworkGatewayImpl withSku(VirtualNetworkGatewaySku sku) { this.inner().withSku(sku); return this; } @Override public VirtualNetworkGatewayImpl withVpnClientConfiguration(VpnClientConfiguration vpnClientConfiguration) { this.inner().withVpnClientConfiguration(vpnClientConfiguration); return this; } @Override public VirtualNetworkGatewayImpl withVpnGatewayGeneration(VpnGatewayGeneration vpnGatewayGeneration) { this.inner().withVpnGatewayGeneration(vpnGatewayGeneration); return this; } @Override public VirtualNetworkGatewayImpl withVpnType(VpnType vpnType) { this.inner().withVpnType(vpnType); return this; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.model.BuildFileTree; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetPattern; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.HasBuildTarget; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.coercer.TypeCoercerFactory; import com.facebook.buck.util.ExceptionWithHumanReadableMessage; import com.facebook.buck.util.HumanReadableException; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import com.google.common.hash.HashCode; import java.lang.reflect.Field; import java.nio.file.Path; /** * A {@link TargetNode} represents a node in the target graph which is created by the * {@link com.facebook.buck.parser.Parser} as a result of parsing BUCK files in a project. It is * responsible for processing the raw (python) inputs of a build rule, and gathering any build * targets and paths referenced from those inputs. */ public class TargetNode<T> implements Comparable<TargetNode<?>>, HasBuildTarget { private final HashCode rawInputsHashCode; private final TypeCoercerFactory typeCoercerFactory; private final BuildRuleFactoryParams ruleFactoryParams; private final Function<Optional<String>, Path> cellRoots; private final Description<T> description; private final T constructorArg; private final ImmutableSet<Path> pathsReferenced; private final ImmutableSet<BuildTarget> declaredDeps; private final ImmutableSortedSet<BuildTarget> extraDeps; private final ImmutableSet<BuildTargetPattern> visibilityPatterns; @SuppressWarnings("unchecked") public TargetNode( HashCode rawInputsHashCode, Description<T> description, T constructorArg, TypeCoercerFactory typeCoercerFactory, BuildRuleFactoryParams params, ImmutableSet<BuildTarget> declaredDeps, ImmutableSet<BuildTargetPattern> visibilityPatterns, Function<Optional<String>, Path> cellRoots) throws NoSuchBuildTargetException, InvalidSourcePathInputException { this.rawInputsHashCode = rawInputsHashCode; this.description = description; this.constructorArg = constructorArg; this.typeCoercerFactory = typeCoercerFactory; this.ruleFactoryParams = params; this.cellRoots = cellRoots; final ImmutableSet.Builder<Path> paths = ImmutableSet.builder(); final ImmutableSortedSet.Builder<BuildTarget> extraDeps = ImmutableSortedSet.naturalOrder(); // Scan the input to find possible BuildTargets, necessary for loading dependent rules. T arg = description.createUnpopulatedConstructorArg(); for (Field field : arg.getClass().getFields()) { ParamInfo info = new ParamInfo(typeCoercerFactory, field); if (info.isDep() && info.isInput() && info.hasElementTypes(BuildTarget.class, SourcePath.class, Path.class)) { detectBuildTargetsAndPathsForConstructorArg(extraDeps, paths, info, constructorArg); } } if (description instanceof ImplicitDepsInferringDescription) { extraDeps .addAll( ((ImplicitDepsInferringDescription<T>) description) .findDepsForTargetFromConstructorArgs(params.target, cellRoots, constructorArg)); } if (description instanceof ImplicitInputsInferringDescription) { paths .addAll( ((ImplicitInputsInferringDescription<T>) description) .inferInputsFromConstructorArgs( params.target.getUnflavoredBuildTarget(), constructorArg)); } this.extraDeps = ImmutableSortedSet.copyOf(Sets.difference(extraDeps.build(), declaredDeps)); this.pathsReferenced = ruleFactoryParams.enforceBuckPackageBoundary() ? verifyPaths(paths.build()) : paths.build(); this.declaredDeps = declaredDeps; this.visibilityPatterns = visibilityPatterns; } /** * @return A hash of the raw input from the build file used to construct the node. */ public HashCode getRawInputsHashCode() { return rawInputsHashCode; } public Description<T> getDescription() { return description; } public BuildRuleType getType() { return description.getBuildRuleType(); } public T getConstructorArg() { return constructorArg; } @Override public BuildTarget getBuildTarget() { return ruleFactoryParams.target; } public ImmutableSet<Path> getInputs() { return pathsReferenced; } public ImmutableSet<BuildTarget> getDeclaredDeps() { return declaredDeps; } public ImmutableSet<BuildTarget> getExtraDeps() { return extraDeps; } public ImmutableSet<BuildTarget> getDeps() { ImmutableSet.Builder<BuildTarget> builder = ImmutableSet.builder(); builder.addAll(getDeclaredDeps()); builder.addAll(getExtraDeps()); return builder.build(); } public BuildRuleFactoryParams getRuleFactoryParams() { return ruleFactoryParams; } /** * TODO(andrewjcg): It'd be nice to eventually move this implementation to an * `AbstractDescription` base class, so that the various types of descriptions * can install their own implementations. However, we'll probably want to move * most of what is now `BuildRuleParams` to `DescriptionParams` and set them up * while building the target graph. */ public boolean isVisibleTo(BuildTarget other) { return BuildTargets.isVisibleTo( getBuildTarget(), visibilityPatterns, other); } public void checkVisibility(BuildTarget other) { if (!isVisibleTo(other)) { throw new HumanReadableException( "%s depends on %s, which is not visible", other, getBuildTarget()); } } /** * Type safe checked cast of the constructor arg. */ @SuppressWarnings("unchecked") public <U> Optional<TargetNode<U>> castArg(Class<U> cls) { if (cls.isInstance(constructorArg)) { return Optional.of((TargetNode<U>) this); } else { return Optional.absent(); } } private void detectBuildTargetsAndPathsForConstructorArg( final ImmutableSet.Builder<BuildTarget> depsBuilder, final ImmutableSet.Builder<Path> pathsBuilder, ParamInfo info, T constructorArg) throws NoSuchBuildTargetException { // We'll make no test for optionality here. Let's assume it's done elsewhere. try { info.traverse( new ParamInfo.Traversal() { @Override public void traverse(Object object) { if (object instanceof PathSourcePath) { pathsBuilder.add(((PathSourcePath) object).getRelativePath()); } else if (object instanceof BuildTargetSourcePath) { depsBuilder.add(((BuildTargetSourcePath) object).getTarget()); } else if (object instanceof Path) { pathsBuilder.add((Path) object); } else if (object instanceof BuildTarget) { depsBuilder.add((BuildTarget) object); } } }, constructorArg); } catch (RuntimeException e) { if (e.getCause() instanceof NoSuchBuildTargetException) { throw (NoSuchBuildTargetException) e.getCause(); } } } private ImmutableSet<Path> verifyPaths(ImmutableSet<Path> paths) throws InvalidSourcePathInputException { Path basePath = getBuildTarget().getBasePath(); BuildFileTree buildFileTree = ruleFactoryParams.getBuildFileTree(); for (Path path : paths) { if (!basePath.toString().isEmpty() && !path.startsWith(basePath)) { throw new InvalidSourcePathInputException( "'%s' in '%s' refers to a parent directory.", basePath.relativize(path), getBuildTarget()); } Optional<Path> ancestor = buildFileTree.getBasePathOfAncestorTarget(path); // It should not be possible for us to ever get an Optional.absent() for this because that // would require one of two conditions: // 1) The source path references parent directories, which we check for above. // 2) You don't have a build file above this file, which is impossible if it is referenced in // a build file *unless* you happen to be referencing something that is ignored. if (!ancestor.isPresent()) { throw new InvalidSourcePathInputException( "'%s' in '%s' crosses a buck package boundary. This is probably caused by " + "specifying one of the folders in '%s' in your .buckconfig under `project.ignore`.", path, getBuildTarget(), path); } if (!ancestor.get().equals(basePath)) { throw new InvalidSourcePathInputException( "'%s' in '%s' crosses a buck package boundary. This file is owned by '%s'. Find " + "the owning rule that references '%s', and use a reference to that rule instead " + "of referencing the desired file directly.", path, getBuildTarget(), ancestor.get(), path); } } return paths; } @Override public int compareTo(TargetNode<?> o) { return getBuildTarget().compareTo(o.getBuildTarget()); } @Override public final String toString() { return getBuildTarget().getFullyQualifiedName(); } /** * Return a copy of the current TargetNode, with the {@link Description} used for creating * {@link BuildRule} instances switched out. */ @SuppressWarnings({"rawtypes", "unchecked"}) public TargetNode<?> withDescription(Description<?> description) { try { return new TargetNode( rawInputsHashCode, description, constructorArg, typeCoercerFactory, ruleFactoryParams, declaredDeps, visibilityPatterns, cellRoots); } catch (InvalidSourcePathInputException | NoSuchBuildTargetException e) { // This is extremely unlikely to happen --- we've already created a TargetNode with these // values before. throw new RuntimeException(e); } } public TargetNode<T> withFlavors(Iterable<Flavor> flavors) { try { return new TargetNode<>( rawInputsHashCode, description, constructorArg, typeCoercerFactory, ruleFactoryParams.withFlavors(flavors), declaredDeps, visibilityPatterns, cellRoots); } catch (InvalidSourcePathInputException | NoSuchBuildTargetException e) { // This is extremely unlikely to happen --- we've already created a TargetNode with these // values before. throw new RuntimeException(e); } } public Function<Optional<String>, Path> getCellNames() { return cellRoots; } @SuppressWarnings("serial") public static class InvalidSourcePathInputException extends Exception implements ExceptionWithHumanReadableMessage{ private InvalidSourcePathInputException(String message, Object...objects) { super(String.format(message, objects)); } @Override public String getHumanReadableErrorMessage() { return getMessage(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.apache.kafka.clients.consumer.internals; import org.apache.kafka.clients.ClientResponse; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.MetricName; import org.apache.kafka.common.Node; import org.apache.kafka.common.errors.DisconnectException; import org.apache.kafka.common.errors.IllegalGenerationException; import org.apache.kafka.common.errors.RebalanceInProgressException; import org.apache.kafka.common.errors.UnknownMemberIdException; import org.apache.kafka.common.metrics.Measurable; import org.apache.kafka.common.metrics.MetricConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.Sensor; import org.apache.kafka.common.metrics.stats.Avg; import org.apache.kafka.common.metrics.stats.Count; import org.apache.kafka.common.metrics.stats.Max; import org.apache.kafka.common.metrics.stats.Rate; import org.apache.kafka.common.protocol.ApiKeys; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.requests.GroupMetadataRequest; import org.apache.kafka.common.requests.GroupMetadataResponse; import org.apache.kafka.common.requests.HeartbeatRequest; import org.apache.kafka.common.requests.HeartbeatResponse; import org.apache.kafka.common.requests.JoinGroupRequest; import org.apache.kafka.common.requests.JoinGroupResponse; import org.apache.kafka.common.requests.OffsetCommitRequest; import org.apache.kafka.common.requests.SyncGroupRequest; import org.apache.kafka.common.requests.SyncGroupResponse; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** * AbstractCoordinator implements group management for a single group member by interacting with * a designated Kafka broker (the coordinator). Group semantics are provided by extending this class. * See {@link ConsumerCoordinator} for example usage. * * From a high level, Kafka's group management protocol consists of the following sequence of actions: * * <ol> * <li>Group Registration: Group members register with the coordinator providing their own metadata * (such as the set of topics they are interested in).</li> * <li>Group/Leader Selection: The coordinator select the members of the group and chooses one member * as the leader.</li> * <li>State Assignment: The leader collects the metadata from all the members of the group and * assigns state.</li> * <li>Group Stabilization: Each member receives the state assigned by the leader and begins * processing.</li> * </ol> * * To leverage this protocol, an implementation must define the format of metadata provided by each * member for group registration in {@link #metadata()} and the format of the state assignment provided * by the leader in {@link #performAssignment(String, String, Map)} and becomes available to members in * {@link #onJoinComplete(int, String, String, ByteBuffer)}. * */ public abstract class AbstractCoordinator { private static final Logger log = LoggerFactory.getLogger(AbstractCoordinator.class); private final Heartbeat heartbeat; private final HeartbeatTask heartbeatTask; private final int sessionTimeoutMs; private final GroupCoordinatorMetrics sensors; protected final String groupId; protected final ConsumerNetworkClient client; protected final Time time; protected final long retryBackoffMs; protected final long requestTimeoutMs; private boolean needsJoinPrepare = true; private boolean rejoinNeeded = true; protected Node coordinator; protected String memberId; protected String protocol; protected int generation; /** * Initialize the coordination manager. */ public AbstractCoordinator(ConsumerNetworkClient client, String groupId, int sessionTimeoutMs, int heartbeatIntervalMs, Metrics metrics, String metricGrpPrefix, Map<String, String> metricTags, Time time, long requestTimeoutMs, long retryBackoffMs) { this.client = client; this.time = time; this.generation = OffsetCommitRequest.DEFAULT_GENERATION_ID; this.memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID; this.groupId = groupId; this.coordinator = null; this.sessionTimeoutMs = sessionTimeoutMs; this.heartbeat = new Heartbeat(this.sessionTimeoutMs, heartbeatIntervalMs, time.milliseconds()); this.heartbeatTask = new HeartbeatTask(); this.sensors = new GroupCoordinatorMetrics(metrics, metricGrpPrefix, metricTags); this.requestTimeoutMs = requestTimeoutMs; this.retryBackoffMs = retryBackoffMs; } /** * Unique identifier for the class of protocols implements (e.g. "consumer" or "copycat"). * @return Non-null protocol type name */ protected abstract String protocolType(); /** * Get the current list of protocols and their associated metadata supported * by the local member. The order of the protocols in the map indicates the preference * of the protocol (the first entry is the most preferred). The coordinator takes this * preference into account when selecting the generation protocol (generally more preferred * protocols will be selected as long as all members support them and there is no disagreement * on the preference). * @return Non-empty map of supported protocols and metadata */ protected abstract LinkedHashMap<String, ByteBuffer> metadata(); /** * Invoked prior to each group join or rejoin. This is typically used to perform any * cleanup from the previous generation (such as committing offsets for the consumer) * @param generation The previous generation or -1 if there was none * @param memberId The identifier of this member in the previous group or "" if there was none */ protected abstract void onJoinPrepare(int generation, String memberId); /** * Perform assignment for the group. This is used by the leader to push state to all the members * of the group (e.g. to push partition assignments in the case of the new consumer) * @param leaderId The id of the leader (which is this member) * @param allMemberMetadata Metadata from all members of the group * @return A map from each member to their state assignment */ protected abstract Map<String, ByteBuffer> performAssignment(String leaderId, String protocol, Map<String, ByteBuffer> allMemberMetadata); /** * Invoked when a group member has successfully joined a group. * @param generation The generation that was joined * @param memberId The identifier for the local member in the group * @param protocol The protocol selected by the coordinator * @param memberAssignment The assignment propagated from the group leader */ protected abstract void onJoinComplete(int generation, String memberId, String protocol, ByteBuffer memberAssignment); /** * Block until the coordinator for this group is known. */ public void ensureCoordinatorKnown() { while (coordinatorUnknown()) { RequestFuture<Void> future = sendGroupMetadataRequest(); client.poll(future, requestTimeoutMs); if (future.failed()) { if (future.isRetriable()) client.awaitMetadataUpdate(); else throw future.exception(); } } } /** * Check whether the group should be rejoined (e.g. if metadata changes) * @return true if it should, false otherwise */ protected boolean needRejoin() { return rejoinNeeded; } /** * Reset the generation/memberId tracked by this member */ public void resetGeneration() { this.generation = OffsetCommitRequest.DEFAULT_GENERATION_ID; this.memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID; rejoinNeeded = true; } /** * Ensure that the group is active (i.e. joined and synced) */ public void ensureActiveGroup() { if (!needRejoin()) return; if (needsJoinPrepare) { onJoinPrepare(generation, memberId); needsJoinPrepare = false; } while (needRejoin()) { ensureCoordinatorKnown(); // ensure that there are no pending requests to the coordinator. This is important // in particular to avoid resending a pending JoinGroup request. if (client.pendingRequestCount(this.coordinator) > 0) { client.awaitPendingRequests(this.coordinator); continue; } RequestFuture<ByteBuffer> future = performGroupJoin(); client.poll(future); if (future.succeeded()) { onJoinComplete(generation, memberId, protocol, future.value()); needsJoinPrepare = true; heartbeatTask.reset(); } else { RuntimeException exception = future.exception(); if (exception instanceof UnknownMemberIdException || exception instanceof RebalanceInProgressException || exception instanceof IllegalGenerationException) continue; else if (!future.isRetriable()) throw exception; Utils.sleep(retryBackoffMs); } } } private class HeartbeatTask implements DelayedTask { public void reset() { // start or restart the heartbeat task to be executed at the next chance long now = time.milliseconds(); heartbeat.resetSessionTimeout(now); client.unschedule(this); client.schedule(this, now); } @Override public void run(final long now) { if (generation < 0 || needRejoin() || coordinatorUnknown()) { // no need to send the heartbeat we're not using auto-assignment or if we are // awaiting a rebalance return; } if (heartbeat.sessionTimeoutExpired(now)) { // we haven't received a successful heartbeat in one session interval // so mark the coordinator dead coordinatorDead(); return; } if (!heartbeat.shouldHeartbeat(now)) { // we don't need to heartbeat now, so reschedule for when we do client.schedule(this, now + heartbeat.timeToNextHeartbeat(now)); } else { heartbeat.sentHeartbeat(now); RequestFuture<Void> future = sendHeartbeatRequest(); future.addListener(new RequestFutureListener<Void>() { @Override public void onSuccess(Void value) { long now = time.milliseconds(); heartbeat.receiveHeartbeat(now); long nextHeartbeatTime = now + heartbeat.timeToNextHeartbeat(now); client.schedule(HeartbeatTask.this, nextHeartbeatTime); } @Override public void onFailure(RuntimeException e) { client.schedule(HeartbeatTask.this, time.milliseconds() + retryBackoffMs); } }); } } } /** * Join the group and return the assignment for the next generation. This function handles both * JoinGroup and SyncGroup, delegating to {@link #performAssignment(String, String, Map)} if * elected leader by the coordinator. * @return A request future which wraps the assignment returned from the group leader */ private RequestFuture<ByteBuffer> performGroupJoin() { if (coordinatorUnknown()) return RequestFuture.coordinatorNotAvailable(); // send a join group request to the coordinator log.debug("(Re-)joining group {}", groupId); List<JoinGroupRequest.GroupProtocol> protocols = new ArrayList<>(); for (Map.Entry<String, ByteBuffer> metadataEntry : metadata().entrySet()) protocols.add(new JoinGroupRequest.GroupProtocol(metadataEntry.getKey(), metadataEntry.getValue())); JoinGroupRequest request = new JoinGroupRequest( groupId, this.sessionTimeoutMs, this.memberId, protocolType(), protocols); // create the request for the coordinator log.debug("Issuing request ({}: {}) to coordinator {}", ApiKeys.JOIN_GROUP, request, this.coordinator.id()); return client.send(coordinator, ApiKeys.JOIN_GROUP, request) .compose(new JoinGroupResponseHandler()); } private class JoinGroupResponseHandler extends CoordinatorResponseHandler<JoinGroupResponse, ByteBuffer> { @Override public JoinGroupResponse parse(ClientResponse response) { return new JoinGroupResponse(response.responseBody()); } @Override public void handle(JoinGroupResponse joinResponse, RequestFuture<ByteBuffer> future) { // process the response short errorCode = joinResponse.errorCode(); if (errorCode == Errors.NONE.code()) { log.debug("Joined group: {}", joinResponse.toStruct()); AbstractCoordinator.this.memberId = joinResponse.memberId(); AbstractCoordinator.this.generation = joinResponse.generationId(); AbstractCoordinator.this.rejoinNeeded = false; AbstractCoordinator.this.protocol = joinResponse.groupProtocol(); sensors.joinLatency.record(response.requestLatencyMs()); if (joinResponse.isLeader()) { onJoinLeader(joinResponse).chain(future); } else { onJoinFollower().chain(future); } } else if (errorCode == Errors.UNKNOWN_MEMBER_ID.code()) { // reset the member id and retry immediately AbstractCoordinator.this.memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID; log.info("Attempt to join group {} failed due to unknown member id, resetting and retrying.", groupId); future.raise(Errors.UNKNOWN_MEMBER_ID); } else if (errorCode == Errors.GROUP_COORDINATOR_NOT_AVAILABLE.code() || errorCode == Errors.NOT_COORDINATOR_FOR_GROUP.code()) { // re-discover the coordinator and retry with backoff coordinatorDead(); log.info("Attempt to join group {} failed due to obsolete coordinator information, retrying.", groupId); future.raise(Errors.forCode(errorCode)); } else if (errorCode == Errors.INCONSISTENT_GROUP_PROTOCOL.code() || errorCode == Errors.INVALID_SESSION_TIMEOUT.code() || errorCode == Errors.INVALID_GROUP_ID.code()) { // log the error and re-throw the exception Errors error = Errors.forCode(errorCode); log.error("Attempt to join group {} failed due to: {}", groupId, error.exception().getMessage()); future.raise(error); } else { // unexpected error, throw the exception future.raise(new KafkaException("Unexpected error in join group response: " + Errors.forCode(joinResponse.errorCode()).exception().getMessage())); } } } private RequestFuture<ByteBuffer> onJoinFollower() { // send follower's sync group with an empty assignment SyncGroupRequest request = new SyncGroupRequest(groupId, generation, memberId, Collections.<String, ByteBuffer>emptyMap()); log.debug("Issuing follower SyncGroup ({}: {}) to coordinator {}", ApiKeys.SYNC_GROUP, request, this.coordinator.id()); return sendSyncGroupRequest(request); } private RequestFuture<ByteBuffer> onJoinLeader(JoinGroupResponse joinResponse) { try { // perform the leader synchronization and send back the assignment for the group Map<String, ByteBuffer> groupAssignment = performAssignment(joinResponse.leaderId(), joinResponse.groupProtocol(), joinResponse.members()); SyncGroupRequest request = new SyncGroupRequest(groupId, generation, memberId, groupAssignment); log.debug("Issuing leader SyncGroup ({}: {}) to coordinator {}", ApiKeys.SYNC_GROUP, request, this.coordinator.id()); return sendSyncGroupRequest(request); } catch (RuntimeException e) { return RequestFuture.failure(e); } } private RequestFuture<ByteBuffer> sendSyncGroupRequest(SyncGroupRequest request) { if (coordinatorUnknown()) return RequestFuture.coordinatorNotAvailable(); return client.send(coordinator, ApiKeys.SYNC_GROUP, request) .compose(new SyncGroupRequestHandler()); } private class SyncGroupRequestHandler extends CoordinatorResponseHandler<SyncGroupResponse, ByteBuffer> { @Override public SyncGroupResponse parse(ClientResponse response) { return new SyncGroupResponse(response.responseBody()); } @Override public void handle(SyncGroupResponse syncResponse, RequestFuture<ByteBuffer> future) { short errorCode = syncResponse.errorCode(); if (errorCode == Errors.NONE.code()) { future.complete(syncResponse.memberAssignment()); sensors.syncLatency.record(response.requestLatencyMs()); } else { AbstractCoordinator.this.rejoinNeeded = true; future.raise(Errors.forCode(errorCode)); } } } /** * Discover the current coordinator for the group. Sends a GroupMetadata request to * one of the brokers. The returned future should be polled to get the result of the request. * @return A request future which indicates the completion of the metadata request */ private RequestFuture<Void> sendGroupMetadataRequest() { // initiate the group metadata request // find a node to ask about the coordinator Node node = this.client.leastLoadedNode(); if (node == null) { // TODO: If there are no brokers left, perhaps we should use the bootstrap set // from configuration? return RequestFuture.noBrokersAvailable(); } else { // create a group metadata request log.debug("Issuing group metadata request to broker {}", node.id()); GroupMetadataRequest metadataRequest = new GroupMetadataRequest(this.groupId); return client.send(node, ApiKeys.GROUP_METADATA, metadataRequest) .compose(new RequestFutureAdapter<ClientResponse, Void>() { @Override public void onSuccess(ClientResponse response, RequestFuture<Void> future) { handleGroupMetadataResponse(response, future); } }); } } private void handleGroupMetadataResponse(ClientResponse resp, RequestFuture<Void> future) { log.debug("Group metadata response {}", resp); // parse the response to get the coordinator info if it is not disconnected, // otherwise we need to request metadata update if (resp.wasDisconnected()) { future.raise(new DisconnectException()); } else if (!coordinatorUnknown()) { // We already found the coordinator, so ignore the request future.complete(null); } else { GroupMetadataResponse groupMetadataResponse = new GroupMetadataResponse(resp.responseBody()); // use MAX_VALUE - node.id as the coordinator id to mimic separate connections // for the coordinator in the underlying network client layer // TODO: this needs to be better handled in KAFKA-1935 if (groupMetadataResponse.errorCode() == Errors.NONE.code()) { this.coordinator = new Node(Integer.MAX_VALUE - groupMetadataResponse.node().id(), groupMetadataResponse.node().host(), groupMetadataResponse.node().port()); client.tryConnect(coordinator); // start sending heartbeats only if we have a valid generation if (generation > 0) heartbeatTask.reset(); future.complete(null); } else { future.raise(Errors.forCode(groupMetadataResponse.errorCode())); } } } /** * Check if we know who the coordinator is and we have an active connection * @return true if the coordinator is unknown */ public boolean coordinatorUnknown() { if (coordinator == null) return true; if (client.connectionFailed(coordinator)) { coordinatorDead(); return true; } return false; } /** * Mark the current coordinator as dead. */ protected void coordinatorDead() { if (this.coordinator != null) { log.info("Marking the coordinator {} dead.", this.coordinator.id()); this.coordinator = null; } } /** * Send a heartbeat request now (visible only for testing). */ public RequestFuture<Void> sendHeartbeatRequest() { HeartbeatRequest req = new HeartbeatRequest(this.groupId, this.generation, this.memberId); return client.send(coordinator, ApiKeys.HEARTBEAT, req) .compose(new HeartbeatCompletionHandler()); } private class HeartbeatCompletionHandler extends CoordinatorResponseHandler<HeartbeatResponse, Void> { @Override public HeartbeatResponse parse(ClientResponse response) { return new HeartbeatResponse(response.responseBody()); } @Override public void handle(HeartbeatResponse heartbeatResponse, RequestFuture<Void> future) { sensors.heartbeatLatency.record(response.requestLatencyMs()); short error = heartbeatResponse.errorCode(); if (error == Errors.NONE.code()) { log.debug("Received successful heartbeat response."); future.complete(null); } else if (error == Errors.GROUP_COORDINATOR_NOT_AVAILABLE.code() || error == Errors.NOT_COORDINATOR_FOR_GROUP.code()) { log.info("Attempt to heart beat failed since coordinator is either not started or not valid, marking it as dead."); coordinatorDead(); future.raise(Errors.forCode(error)); } else if (error == Errors.REBALANCE_IN_PROGRESS.code()) { log.info("Attempt to heart beat failed since the group is rebalancing, try to re-join group."); AbstractCoordinator.this.rejoinNeeded = true; future.raise(Errors.REBALANCE_IN_PROGRESS); } else if (error == Errors.ILLEGAL_GENERATION.code()) { log.info("Attempt to heart beat failed since generation id is not legal, try to re-join group."); AbstractCoordinator.this.rejoinNeeded = true; future.raise(Errors.ILLEGAL_GENERATION); } else if (error == Errors.UNKNOWN_MEMBER_ID.code()) { log.info("Attempt to heart beat failed since member id is not valid, reset it and try to re-join group."); memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID; AbstractCoordinator.this.rejoinNeeded = true; future.raise(Errors.UNKNOWN_MEMBER_ID); } else { future.raise(new KafkaException("Unexpected error in heartbeat response: " + Errors.forCode(error).exception().getMessage())); } } } protected abstract class CoordinatorResponseHandler<R, T> extends RequestFutureAdapter<ClientResponse, T> { protected ClientResponse response; public abstract R parse(ClientResponse response); public abstract void handle(R response, RequestFuture<T> future); @Override public void onSuccess(ClientResponse clientResponse, RequestFuture<T> future) { this.response = clientResponse; if (clientResponse.wasDisconnected()) { int correlation = response.request().request().header().correlationId(); log.debug("Cancelled request {} with correlation id {} due to coordinator {} being disconnected", response.request(), correlation, response.request().request().destination()); // mark the coordinator as dead coordinatorDead(); future.raise(new DisconnectException()); return; } try { R response = parse(clientResponse); handle(response, future); } catch (RuntimeException e) { if (!future.isDone()) future.raise(e); } } } private class GroupCoordinatorMetrics { public final Metrics metrics; public final String metricGrpName; public final Sensor heartbeatLatency; public final Sensor joinLatency; public final Sensor syncLatency; public GroupCoordinatorMetrics(Metrics metrics, String metricGrpPrefix, Map<String, String> tags) { this.metrics = metrics; this.metricGrpName = metricGrpPrefix + "-coordinator-metrics"; this.heartbeatLatency = metrics.sensor("heartbeat-latency"); this.heartbeatLatency.add(new MetricName("heartbeat-response-time-max", this.metricGrpName, "The max time taken to receive a response to a heartbeat request", tags), new Max()); this.heartbeatLatency.add(new MetricName("heartbeat-rate", this.metricGrpName, "The average number of heartbeats per second", tags), new Rate(new Count())); this.joinLatency = metrics.sensor("join-latency"); this.joinLatency.add(new MetricName("join-time-avg", this.metricGrpName, "The average time taken for a group rejoin", tags), new Avg()); this.joinLatency.add(new MetricName("join-time-max", this.metricGrpName, "The max time taken for a group rejoin", tags), new Avg()); this.joinLatency.add(new MetricName("join-rate", this.metricGrpName, "The number of group joins per second", tags), new Rate(new Count())); this.syncLatency = metrics.sensor("sync-latency"); this.syncLatency.add(new MetricName("sync-time-avg", this.metricGrpName, "The average time taken for a group sync", tags), new Avg()); this.syncLatency.add(new MetricName("sync-time-max", this.metricGrpName, "The max time taken for a group sync", tags), new Avg()); this.syncLatency.add(new MetricName("sync-rate", this.metricGrpName, "The number of group syncs per second", tags), new Rate(new Count())); Measurable lastHeartbeat = new Measurable() { public double measure(MetricConfig config, long now) { return TimeUnit.SECONDS.convert(now - heartbeat.lastHeartbeatSend(), TimeUnit.MILLISECONDS); } }; metrics.addMetric(new MetricName("last-heartbeat-seconds-ago", this.metricGrpName, "The number of seconds since the last controller heartbeat", tags), lastHeartbeat); } } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.server.service; import java.io.IOException; import java.sql.SQLException; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.UUID; import java.util.concurrent.Semaphore; import com.thoughtworks.go.config.*; import com.thoughtworks.go.config.GoConfigDao; import com.thoughtworks.go.config.materials.MaterialConfigs; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.materials.mercurial.HgMaterial; import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig; import com.thoughtworks.go.domain.AgentInstance; import com.thoughtworks.go.domain.Stages; import com.thoughtworks.go.domain.activity.AgentAssignment; import com.thoughtworks.go.domain.builder.Builder; import com.thoughtworks.go.domain.DefaultSchedulingContext; import com.thoughtworks.go.domain.EnvironmentPipelineMatcher; import com.thoughtworks.go.domain.builder.FetchArtifactBuilder; import com.thoughtworks.go.domain.JobInstance; import com.thoughtworks.go.domain.JobPlan; import com.thoughtworks.go.domain.JobResult; import com.thoughtworks.go.domain.JobState; import com.thoughtworks.go.domain.MaterialRevision; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.Pipeline; import com.thoughtworks.go.domain.Stage; import com.thoughtworks.go.domain.StageIdentifier; import com.thoughtworks.go.domain.StageResult; import com.thoughtworks.go.domain.StageState; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.domain.materials.Material; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.svn.Subversion; import com.thoughtworks.go.domain.materials.svn.SvnCommand; import com.thoughtworks.go.fixture.PipelineWithTwoStages; import com.thoughtworks.go.helper.AgentMother; import com.thoughtworks.go.helper.SvnTestRepo; import com.thoughtworks.go.helper.TestRepo; import com.thoughtworks.go.remote.AgentIdentifier; import com.thoughtworks.go.remote.work.BuildWork; import com.thoughtworks.go.remote.work.DeniedAgentWork; import com.thoughtworks.go.remote.work.NoWork; import com.thoughtworks.go.remote.work.Work; import com.thoughtworks.go.server.cache.GoCache; import com.thoughtworks.go.server.dao.DatabaseAccessHelper; import com.thoughtworks.go.server.dao.JobInstanceDao; import com.thoughtworks.go.server.dao.PipelineDao; import com.thoughtworks.go.server.dao.StageDao; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.persistence.MaterialRepository; import com.thoughtworks.go.server.scheduling.ScheduleHelper; import com.thoughtworks.go.server.service.builders.BuilderFactory; import com.thoughtworks.go.server.transaction.TransactionTemplate; import com.thoughtworks.go.util.*; import com.thoughtworks.go.util.GoConfigFileHelper; import com.thoughtworks.go.utils.SerializationTester; import org.hamcrest.Matchers; import org.junit.*; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static com.thoughtworks.go.helper.ModificationsMother.modifyNoFiles; import static com.thoughtworks.go.helper.ModificationsMother.modifySomeFiles; import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY; import static com.thoughtworks.go.util.TestUtils.sleepQuietly; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:WEB-INF/applicationContext-global.xml", "classpath:WEB-INF/applicationContext-dataLocalAccess.xml", "classpath:WEB-INF/applicationContext-acegi-security.xml" }) public class BuildAssignmentServiceTest { @Autowired private BuildAssignmentService buildAssignmentService; @Autowired private GoConfigService goConfigService; @Autowired private GoConfigDao goConfigDao; @Autowired private PipelineDao pipelineDao; @Autowired private JobInstanceDao jobInstanceDao; @Autowired private AgentService agentService; @Autowired private AgentAssignment agentAssignment; @Autowired private ScheduleService scheduleService; @Autowired private MaterialRepository materialRepository; @Autowired private DatabaseAccessHelper dbHelper; @Autowired private ScheduleHelper scheduleHelper; @Autowired private GoCache goCache; @Autowired private StageDao stageDao; @Autowired private JobInstanceService jobInstanceService; @Autowired private PipelineService pipelineService; @Autowired private EnvironmentConfigService environmentConfigService; @Autowired private TimeProvider timeProvider; @Autowired private TransactionTemplate transactionTemplate; @Autowired private BuilderFactory builderFactory; @Autowired private InstanceFactory instanceFactory; private PipelineConfig evolveConfig; private static final String STAGE_NAME = "dev"; private GoConfigFileHelper configHelper; private ScheduleTestUtil u; public Subversion repository; public static TestRepo testRepo; private PipelineWithTwoStages fixture; private String md5 = "md5-test"; private Username loserUser = new Username(new CaseInsensitiveString("loser")); @BeforeClass public static void setupRepos() throws IOException { testRepo = new SvnTestRepo("testSvnRepo"); } @AfterClass public static void tearDownConfigFileLocation() throws IOException { TestRepo.internalTearDown(); } @Before public void setUp() throws Exception { configHelper = new GoConfigFileHelper().usingCruiseConfigDao(goConfigDao); configHelper.onSetUp(); dbHelper.onSetUp(); fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate); fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp(); repository = new SvnCommand(null, testRepo.projectRepositoryUrl()); evolveConfig = configHelper.addPipeline("evolve", STAGE_NAME, repository, "unit"); configHelper.addPipeline("anotherPipeline", STAGE_NAME, repository, "anotherTest"); configHelper.addPipeline("thirdPipeline", STAGE_NAME, repository, "yetAnotherTest"); goConfigService.forceNotifyListeners(); goCache.clear(); u = new ScheduleTestUtil(transactionTemplate, materialRepository, dbHelper, configHelper); } @After public void teardown() throws Exception { goCache.clear(); agentService.clearAll(); fixture.onTearDown(); dbHelper.onTearDown(); configHelper.onTearDown(); FileUtil.deleteFolder(goConfigService.artifactsDir()); agentAssignment.clear(); } @Test public void shouldRescheduleAbandonedBuild() throws SQLException { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext( DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); buildAssignmentService.onTimer(); buildAssignmentService.assignWorkToAgent(instance); long firstAssignedBuildId = buildOf(pipeline).getId(); //somehow agent abandoned its original build... buildAssignmentService.assignWorkToAgent(instance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(firstAssignedBuildId); assertThat(reloaded.getState(), is(JobState.Rescheduled)); assertThat(reloaded.isIgnored(), is(true)); } @Test public void shouldNotAssignWorkToDeniedAgent() throws Exception { AgentConfig deniedAgentConfig = AgentMother.localAgent(); deniedAgentConfig.disable(); Work assignedWork = buildAssignmentService.assignWorkToAgent(agent(deniedAgentConfig)); assertThat(assignedWork, instanceOf(DeniedAgentWork.class)); } @Test public void shouldNotAssignWorkWhenPipelineScheduledWithStaleMaterials() { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); evolveConfig.setMaterialConfigs(new MaterialConfigs(new HgMaterialConfig("foo", null))); configHelper.removePipeline(CaseInsensitiveString.str(evolveConfig.name())); configHelper.addPipeline(evolveConfig); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); jobInstanceDao.updateStateAndResult(job); assertThat(buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); } @Test public void shouldNotAssignCancelledJob() throws Exception { AgentIdentifier instance = agent(AgentMother.localAgent()); Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); JobInstance job = buildOf(pipeline); job.cancel(); jobInstanceDao.updateStateAndResult(job); assertThat(buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK)); } @Test public void shouldUpdateNumberOfActiveRemoteAgentsAfterAssigned() { AgentConfig agentConfig = AgentMother.remoteAgent(); configHelper.addAgent(agentConfig); fixture.createPipelineWithFirstStageScheduled(); buildAssignmentService.onTimer(); int before = agentService.numberOfActiveRemoteAgents(); Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig)); assertThat(work, instanceOf(BuildWork.class)); assertThat(agentService.numberOfActiveRemoteAgents(), is(before + 1)); } @Test public void shouldCancelOutOfDateBuilds() throws Exception { fixture.createPipelineWithFirstStageScheduled(); buildAssignmentService.onTimer(); configHelper.removeStage(fixture.pipelineName, fixture.devStage); buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig()); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); assertThat(job.getState(), is(JobState.Completed)); assertThat(job.getResult(), is(JobResult.Cancelled)); } @Test public void shouldCancelBuildBelongingToNonExistentPipeline() throws Exception { fixture.createPipelineWithFirstStageScheduled(); buildAssignmentService.onTimer(); configHelper.removePipeline(fixture.pipelineName); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-other-resource")); assertThat((NoWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig)), Matchers.is(BuildAssignmentService.NO_WORK)); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); assertThat(job.getState(), is(JobState.Completed)); assertThat(job.getResult(), is(JobResult.Cancelled)); Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier()); assertThat(stage.getState(), is(StageState.Cancelled)); assertThat(stage.getResult(), is(StageResult.Cancelled)); } @Test public void shouldNotReloadScheduledJobPlansWhenAgentWorkAssignmentIsInProgress() throws Exception { fixture.createPipelineWithFirstStageScheduled(); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); final JobInstanceService mockJobInstanceService = mock(JobInstanceService.class); final Pipeline pipeline1 = pipeline; final Semaphore sem = new Semaphore(1); sem.acquire(); when(mockJobInstanceService.orderedScheduledBuilds()).thenReturn(jobInstanceService.orderedScheduledBuilds()); when(mockJobInstanceService.buildByIdWithTransitions(job.getId())).thenReturn(jobInstanceService.buildByIdWithTransitions(job.getId())); ScheduledPipelineLoader scheduledPipelineLoader = new ScheduledPipelineLoader(null, null, null, null, null, null, null, null) { @Override public Pipeline pipelineWithPasswordAwareBuildCauseByBuildId(long buildId) { sem.release(); sleepQuietly(1000); verify(mockJobInstanceService, times(1)).orderedScheduledBuilds(); return pipeline1; } }; final BuildAssignmentService buildAssignmentServiceUnderTest = new BuildAssignmentService(goConfigService, mockJobInstanceService, scheduleService, agentService, environmentConfigService, timeProvider, transactionTemplate, scheduledPipelineLoader, pipelineService, builderFactory); final Throwable[] fromThread = new Throwable[1]; buildAssignmentServiceUnderTest.onTimer(); Thread assigner = new Thread(new Runnable() { public void run() { try { final AgentConfig agentConfig = AgentMother.localAgentWithResources("some-other-resource"); buildAssignmentServiceUnderTest.assignWorkToAgent(agent(agentConfig)); } catch (Throwable e) { e.printStackTrace(); fromThread[0] = e; } finally { } } }, "assignmentThread"); assigner.start(); sem.acquire(); buildAssignmentServiceUnderTest.onTimer(); assigner.join(); assertThat(fromThread[0], is(nullValue())); } @Test public void shouldCancelBuildBelongingToNonExistentPipelineWhenCreatingWork() throws Exception { fixture.createPipelineWithFirstStageScheduled(); Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); ScheduledPipelineLoader scheduledPipelineLoader = mock(ScheduledPipelineLoader.class); when(scheduledPipelineLoader.pipelineWithPasswordAwareBuildCauseByBuildId(pipeline.getFirstStage().getJobInstances().first().getId())).thenThrow( new PipelineNotFoundException("thrown by mockPipelineService")); GoConfigService mockGoConfigService = mock(GoConfigService.class); CruiseConfig config = configHelper.currentConfig(); configHelper.removePipeline(fixture.pipelineName, config); when(mockGoConfigService.getCurrentConfig()).thenReturn(config); buildAssignmentService = new BuildAssignmentService(mockGoConfigService, jobInstanceService, scheduleService, agentService, environmentConfigService, timeProvider, transactionTemplate, scheduledPipelineLoader, pipelineService, builderFactory); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-other-resource")); try { buildAssignmentService.assignWorkToAgent(agent(agentConfig)); fail("should have thrown PipelineNotFoundException"); } catch (PipelineNotFoundException e) { // ok } pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName); JobInstance job = pipeline.getFirstStage().getJobInstances().first(); assertThat(job.getState(), is(JobState.Completed)); assertThat(job.getResult(), is(JobResult.Cancelled)); Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier()); assertThat(stage.getState(), is(StageState.Cancelled)); assertThat(stage.getResult(), is(StageResult.Cancelled)); } @Test public void shouldBeAbleToSerializeAndDeserializeBuildWork() throws Exception { Pipeline pipeline1 = instanceFactory.createPipelineInstance(evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline1); buildAssignmentService.onTimer(); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent())); BuildWork deserialized = (BuildWork) SerializationTester.serializeAndDeserialize(work); assertThat(deserialized.getAssignment().materialRevisions(), is(work.getAssignment().materialRevisions())); assertThat(deserialized.getAssignment(), is(work.getAssignment())); assertThat(deserialized, is(work)); } @Test public void shouldCreateWorkWithFetchMaterialsFlagFromStageConfig() throws Exception { evolveConfig.getFirstStageConfig().setFetchMaterials(true); Pipeline pipeline1 = instanceFactory.createPipelineInstance(evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline1); buildAssignmentService.onTimer(); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent())); assertThat("should have set fetchMaterials on assignment", work.getAssignment().getPlan().shouldFetchMaterials(), is(true)); } /** * (uppest/2/uppest-stage/1) * |------------------> upper-peer ------- * | ...................................|............................................... * | . | . * [ uppest-stage ............................|...................... {bar.zip uppest/upper-peer/downer} * V . * uppest uppest-stage-2 ------> upper ------> downer ------> downest {foo.zip uppest/upper/downer} * (uppest/1/uppest-stage-2/1) * uppest-stage-3 ] * <p/> * .... :: fetch artifact call * ---> :: material dependency */ @Test public void shouldCreateWork_withAncestorFetchArtifactCalls_resolvedToRelevantStage() throws Exception { configHelper.addPipeline("uppest", "uppest-stage"); configHelper.addStageToPipeline("uppest", "uppest-stage-2"); PipelineConfig uppest = configHelper.addStageToPipeline("uppest", "uppest-stage-3"); configHelper.addPipeline("upper", "upper-stage"); DependencyMaterial upper_sMaterial = new DependencyMaterial(new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage-2")); PipelineConfig upper = configHelper.setMaterialConfigForPipeline("upper", upper_sMaterial.config()); configHelper.addPipeline("upper-peer", "upper-peer-stage"); DependencyMaterial upperPeer_sMaterial = new DependencyMaterial(new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage")); PipelineConfig upperPeer = configHelper.setMaterialConfigForPipeline("upper-peer", upperPeer_sMaterial.config()); configHelper.addPipeline("downer", "downer-stage"); DependencyMaterial downer_sUpperMaterial = new DependencyMaterial(new CaseInsensitiveString("upper"), new CaseInsensitiveString("upper-stage")); configHelper.setMaterialConfigForPipeline("downer", downer_sUpperMaterial.config()); DependencyMaterial downer_sUpperPeerMaterial = new DependencyMaterial(new CaseInsensitiveString("upper-peer"), new CaseInsensitiveString("upper-peer-stage")); PipelineConfig downer = configHelper.addMaterialToPipeline("downer", downer_sUpperPeerMaterial.config()); configHelper.addPipeline("downest", "downest-stage"); DependencyMaterial downest_sMaterial = new DependencyMaterial(new CaseInsensitiveString("downer"), new CaseInsensitiveString("downer-stage")); configHelper.setMaterialConfigForPipeline("downest", downest_sMaterial.config()); Tasks allFetchTasks = new Tasks(); allFetchTasks.add(new FetchTask(new CaseInsensitiveString("uppest/upper/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "foo.zip", "bar")); allFetchTasks.add(new FetchTask(new CaseInsensitiveString("uppest/upper-peer/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "bar.zip", "baz")); configHelper.replaceAllJobsInStage("downest", "downest-stage", new JobConfig(new CaseInsensitiveString("fetcher"), new Resources("fetcher"), new ArtifactPlans(), allFetchTasks)); PipelineConfig downest = goConfigService.getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString("downest")); DefaultSchedulingContext defaultSchedulingCtx = new DefaultSchedulingContext(DEFAULT_APPROVED_BY); Pipeline uppestInstanceForUpper = instanceFactory.createPipelineInstance(uppest, modifySomeFiles(uppest), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpper); dbHelper.passStage(uppestInstanceForUpper.findStage("uppest-stage")); Stage upper_sMaterialStage = dbHelper.scheduleStage(uppestInstanceForUpper, uppest.getStage(new CaseInsensitiveString("uppest-stage-2"))); dbHelper.passStage(upper_sMaterialStage); Pipeline uppestInstanceForUpperPeer = instanceFactory.createPipelineInstance(uppest, modifySomeFiles(uppest), new DefaultSchedulingContext("super-hero"), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpperPeer); Stage upperPeer_sMaterialStage = uppestInstanceForUpperPeer.findStage("uppest-stage"); dbHelper.passStage(upperPeer_sMaterialStage); Pipeline upperInstance = instanceFactory.createPipelineInstance(upper, buildCauseForDependency(upper_sMaterial, upper_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperInstance); Stage downer_sUpperMaterialStage = upperInstance.findStage("upper-stage"); dbHelper.passStage(downer_sUpperMaterialStage); Pipeline upperPeerInstance = instanceFactory.createPipelineInstance(upperPeer, buildCauseForDependency(upperPeer_sMaterial, upperPeer_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(upperPeerInstance); Stage downer_sUpperPeerMaterialStage = upperPeerInstance.findStage("upper-peer-stage"); dbHelper.passStage(downer_sUpperPeerMaterialStage); MaterialRevisions downer_sMaterialRevisions = new MaterialRevisions( materialRevisionForDownstream(downer_sUpperMaterial, downer_sUpperMaterialStage), materialRevisionForDownstream(downer_sUpperPeerMaterial, downer_sUpperPeerMaterialStage)); Pipeline downerInstance = instanceFactory.createPipelineInstance(downer, BuildCause.createManualForced(downer_sMaterialRevisions, loserUser), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downerInstance); Stage downest_sMaterialStage = downerInstance.findStage("downer-stage"); dbHelper.passStage(downest_sMaterialStage); Pipeline downestInstance = instanceFactory.createPipelineInstance(downest, buildCauseForDependency(downest_sMaterial, downest_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(downestInstance); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("fetcher")); BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig)); List<Builder> builders = work.getAssignment().getBuilders(); FetchArtifactBuilder fooZipFetch = (FetchArtifactBuilder) builders.get(0); assertThat(fooZipFetch.artifactLocator(), is("uppest/1/uppest-stage/latest/unit/foo.zip")); FetchArtifactBuilder barZipFetch = (FetchArtifactBuilder) builders.get(1); assertThat(barZipFetch.artifactLocator(), is("uppest/2/uppest-stage/1/unit/bar.zip")); } private BuildCause buildCauseForDependency(DependencyMaterial material, Stage upstreamStage) { return BuildCause.createManualForced(new MaterialRevisions(materialRevisionForDownstream(material, upstreamStage)), loserUser); } private MaterialRevision materialRevisionForDownstream(DependencyMaterial material, Stage upstreamStage) { StageIdentifier identifier = upstreamStage.getIdentifier(); String rev = identifier.getStageLocator(); String pipelineLabel = identifier.getPipelineLabel(); return new MaterialRevision(material, new Modification(new Date(), rev, pipelineLabel, upstreamStage.getPipelineId())); } private AgentIdentifier agent(AgentConfig agentConfig) { agentService.sync(new Agents(agentConfig)); agentService.approve(agentConfig.getUuid()); return agentService.findAgent(agentConfig.getUuid()).getAgentIdentifier(); } @Test public void shouldNotScheduleIfAgentDoesNotHaveResources() throws Exception { JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true); plan.addResource("some-resource"); scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY); Work work = buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent())); Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name())); JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit"); assertThat(work, is((Work) BuildAssignmentService.NO_WORK)); assertThat(job.getState(), is(JobState.Scheduled)); assertThat(job.getAgentUuid(), is(nullValue())); } @Test public void shouldNotScheduleIfAgentDoesNotHaveMatchingResources() throws Exception { JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true); plan.addResource("some-resource"); scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-other-resource")); Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig)); assertThat(work, is((Work) BuildAssignmentService.NO_WORK)); Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name())); JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit"); assertThat(job.getState(), is(JobState.Scheduled)); assertThat(job.getAgentUuid(), is(nullValue())); } @Test public void shouldScheduleIfAgentMatchingResources() throws Exception { JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true); plan.addResource("some-resource"); scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-resource")); buildAssignmentService.onTimer(); Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig)); assertThat(work, is(not((Work) BuildAssignmentService.NO_WORK))); Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name())); JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit"); JobPlan loadedPlan = jobInstanceDao.loadPlan(job.getId()); assertThat(loadedPlan.getResources(), is((List<Resource>) plan.resources())); assertThat(job.getState(), is(JobState.Assigned)); assertThat(job.getAgentUuid(), is(agentConfig.getUuid())); } @Test public void shouldReScheduleToCorrectAgent() throws Exception { JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true); plan.addResource("some-resource"); scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY); buildAssignmentService.onTimer(); AgentConfig agentConfig = AgentMother.localAgent(); agentConfig.addResource(new Resource("some-resource")); Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig)); assertThat(work, is(not((Work) BuildAssignmentService.NO_WORK))); Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name())); JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit"); JobInstance runningJob = jobInstanceDao.buildByIdWithTransitions(job.getId()); scheduleService.rescheduleJob(runningJob); pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name())); JobInstance rescheduledJob = pipeline.findStage(STAGE_NAME).findJob("unit"); assertThat(rescheduledJob.getId(), not(runningJob.getId())); buildAssignmentService.onTimer(); Work noResourcesWork = buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgentWithResources("WITHOUT_RESOURCES"))); assertThat(noResourcesWork, is((Work) BuildAssignmentService.NO_WORK)); buildAssignmentService.onTimer(); Work correctAgentWork = buildAssignmentService.assignWorkToAgent(agent(agentConfig)); assertThat(correctAgentWork, is(not((Work) BuildAssignmentService.NO_WORK))); } @Test public void shouldRemoveAllJobPlansThatAreNotInConfig() { CruiseConfig oldConfig = goConfigService.getCurrentConfig(); ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", "s1", u.m(new HgMaterial("hg", null))); Pipeline p1_1 = instanceFactory.createPipelineInstance(p1.config, modifyNoFiles(p1.config), new DefaultSchedulingContext( DEFAULT_APPROVED_BY), md5, new TimeProvider()); ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", "s1", u.m(new HgMaterial("hg", null))); Pipeline p2_1 = instanceFactory.createPipelineInstance(p2.config, modifyNoFiles(p2.config), new DefaultSchedulingContext( DEFAULT_APPROVED_BY), md5, new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(p1_1); dbHelper.savePipelineWithStagesAndMaterials(p2_1); CruiseConfig cruiseConfig = goConfigService.getCurrentConfig(); buildAssignmentService.onConfigChange(cruiseConfig); buildAssignmentService.onTimer(); List<JobPlan> plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat(plans.isEmpty(), is(false)); assertThat(plans.size(), is(2)); configHelper.writeConfigFile(oldConfig); plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans"); assertThat("Actual size is " + plans.size(), plans.isEmpty(), is(true)); } @Test public void shouldCancelAScheduledJobInCaseThePipelineIsRemovedFromTheConfig_SpecificallyAPipelineRenameToADifferentCaseAndStageNameToADifferentName() throws Exception { Material hgMaterial = new HgMaterial("url", "folder"); String[] hgRevs = new String[]{"h1"}; u.checkinInOrder(hgMaterial, hgRevs); ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("PIPELINE_WHICH_WILL_EVENTUALLY_CHANGE_CASE", u.m(hgMaterial)); u.scheduleWith(p1, hgRevs); ScheduleTestUtil.AddedPipeline renamedPipeline = u.renamePipelineAndFirstStage(p1, "pipeline_which_will_eventually_change_case", "NEW_RANDOM_STAGE_NAME" + UUID.randomUUID()); Pipeline p1_2 = u.scheduleWith(renamedPipeline, hgRevs); CruiseConfig cruiseConfig = configHelper.load(); buildAssignmentService.onTimer(); // To Reload Job Plans buildAssignmentService.onConfigChange(cruiseConfig); Stages allStages = stageDao.findAllStagesFor(p1_2.getName(), p1_2.getCounter()); assertThat(allStages.byName(CaseInsensitiveString.str(p1.config.first().name())).getState(), is(StageState.Cancelled)); } private JobInstance buildOf(Pipeline pipeline) { return pipeline.getStages().first().getJobInstances().first(); } }
package org.ambientdynamix.contextplugins.hue; //Copyright (c) 2013 Stephan Jaetzold. // //Licensed under the Apache License, Version 2.0 (the "License"); //You may not use this file except in compliance with the License. //You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and limitations under the License. // //This Source may have been modified for this project, the original ois maintained and can be found at https://github.com/jaetzold/philips-hue-java-sdk import org.json2.JSONArray; import org.json2.JSONObject; import org.json2.JSONStringer; import org.json2.JSONWriter; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.ambientdynamix.contextplugins.hue.HueBridgeComm.RM.*; import static org.ambientdynamix.contextplugins.hue.HueLight.ColorMode.*; /** * This class represents a single light bulb. Use it to query or manipulate the state of a single light bulb. * <p> * An instance of this class is not created directly. Instead query a {@link HueBridge} for its lights using either * {@link HueBridge#getLights()} or {@link HueBridge#getLight(Integer)}. * </p> * <p> * When querying for state the actual value is automatically updated with the current value on the bridge if its local cache is 'too old'. * This behaviour can be tuned (or turned off) using {@link #setAutoSyncInterval(Integer)}. * </p> * * As a general note: A state value here is always only a cached version that may already be incorrect. * Even if no one else is controlling the lights. I've observed that e.g. the brightness value changes if a light is just switched on. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html">Philips hue API, Section 1</a> for further reference.</p> * * @author Stephan Jaetzold <p><small>Created at 20.03.13, 14:59</small> */ public class HueLightBulb implements HueLight, Comparable<HueLightBulb> { final Integer id; final HueBridge bridge; String name; boolean on; int brightness; int hue; int saturation; double ciex; double ciey; int colorTemperature; Effect effect; ColorMode colorMode; Integer transitionTime; Integer autoSyncInterval = 1000; long lastSyncTime; /** * This constructor is package private, since lights are not to be created. A {@link HueBridge} is queried for them. */ public HueLightBulb(HueBridge bridge, Integer id) { if(id==null || id<0) { throw new IllegalArgumentException("id has to be non-negative and non-null"); } if(bridge==null) { throw new IllegalArgumentException("bridge may not be null"); } this.bridge = bridge; this.id = id; } @Override public Integer getId() { return id; } @Override public HueBridge getBridge() { return bridge; } @Override public Integer getTransitionTime() { return transitionTime; } @Override public void setTransitionTime(Integer transitionTime) { this.transitionTime = transitionTime; } /** * The time in milliseconds that a queried state value may be old before it needs to be updated with the current value from the bridge device. * * @return null, if values are never automatically updated with the current state from the bridge device. */ public Integer getAutoSyncInterval() { return autoSyncInterval; } /** * Set the time in milliseconds that a queried state value may be old before it needs to be updated with the current value from the bridge device. * This may be null if not automatic syncing should take place. Manual syncing can be done by calling {@link #sync()}. * The default value is 1000 milliseconds. * * @param autoSyncInterval The time in milliseconds that a cached state value is used until it is updated again. */ public void setAutoSyncInterval(Integer autoSyncInterval) { this.autoSyncInterval = autoSyncInterval; } @Override public String getName() { checkSync(); return name; } @Override public void setName(String name) { if(name==null || name.trim().length()>32) { throw new IllegalArgumentException("Name (without leading or trailing whitespace) has to be less than 32 characters long"); } final JSONObject response = bridge.checkedSuccessRequest(PUT, "/lights/" +id, JO().key("name").value(name.trim())).get(0); final String actualName = response.getJSONObject("success").optString("/lights/" + id + "/name"); this.name = actualName!=null ? actualName : name; } /** * Whether this light is in 'on' or 'off' state. * * @return true if this light is on, false otherwise. */ public boolean isOn() { checkSync(); return on; } @Override public void setState(Boolean on, Integer hue, Integer brightness, Integer saturation) { if(brightness<0 || brightness>255) { throw new IllegalArgumentException("Brightness must be between 0-255"); } if(hue<0 || hue>65535) { throw new IllegalArgumentException("Hue must be between 0-65535"); } if(saturation<0 || saturation>255) { throw new IllegalArgumentException("Saturation must be between 0-255"); } Map<String, Object> state = new HashMap<String, Object>(); state.put("on", on); state.put("hue", hue); state.put("bri", brightness); state.put("sat", saturation); state.put("effect", Effect.NONE); state.put("alert", Alert.NONE); stateChange(state); this.on = on; this.hue = hue; this.brightness = brightness; this.saturation = saturation; this.colorMode = ColorMode.HS; this.effect = Effect.NONE; } public Map<String,Object> getStateKeyValueMap() { Map<String,Object> map = new HashMap<String,Object>(); //map.put("id", id); //map.put("name", name); map.put("on", on); map.put("hue", hue); map.put("brightness", brightness); map.put("saturation", saturation); //map.put("colorTemperature", colorTemperature); //map.put("xy", "["+ciex+","+ciey+"]"); //map.put("colorMode", colorMode); map.put("effect", effect); return map; } public void setStateUsingKeyValueMap(Map<String,String> new_state) { Map<String,Object> new_state_converted = convert(new_state); stateChange(new_state_converted); setStateFromMap(new_state_converted); } private Map<String,Object> convert(Map<String,String> map) { Map<String,Object> new_map = new HashMap<String,Object>(); int rgb_color = 0; int hsv_color = 0; int name_color = 0; for (String key : map.keySet()) { String new_key = key; if (key.equals("brightness")) { hsv_color = 1; new_key = "bri"; Integer new_value = new Integer(map.get(key)); int v = ((Integer)new_value).intValue(); if(v<0 || v>255) { throw new IllegalArgumentException("Brightness must be between 0-255"); } new_map.put(new_key, new_value); } else if (key.equals("saturation")) { hsv_color = 1; new_key = "sat"; Integer new_value = new Integer(map.get(key)); int v = ((Integer)new_value).intValue(); if(v<0 || v>255) { throw new IllegalArgumentException("Saturation must be between 0-255"); } new_map.put(new_key, new_value); } else if (key.equals("colorTemperature")) { new_key = "ct"; Integer new_value = new Integer(map.get(key)); int v = ((Integer)new_value).intValue(); if(v<153 || v>500) { throw new IllegalArgumentException("Color Temperature must be between 153-500"); } new_map.put(new_key, new_value); } else if (key.equals("hue")) { hsv_color = 1; new_key = "hue"; Integer new_value = new Integer(map.get(key)); int v = ((Integer)new_value).intValue(); if(v<0 || v>65535) { throw new IllegalArgumentException("Hue must be between 0-65535"); } new_map.put(new_key, new_value); } else if (key.equals("transitiontime")) { new_key = "transitiontime"; Integer new_value = new Integer(map.get(key)); int v = ((Integer)new_value).intValue(); if(v<0 || v>65535) { throw new IllegalArgumentException("Transitiontime must be between 0-65535"); } new_map.put(new_key, new_value); } else if (key.equals("on")) { new_key = "on"; Boolean new_value = new Boolean(map.get(key)); new_map.put(new_key, new_value); } else if ((key.equals("effect")) || (key.equals("alarm"))) { new_map.put(new_key, map.get(key)); } else if (key.equals("rgb")) { rgb_color = 1; new_key = "rgb"; new_map.put(new_key, map.get(key)); } else if (key.equals("colorname")) { name_color = 1; new_key = "colorname"; new_map.put(new_key, map.get(key)); } } if (rgb_color+hsv_color+name_color>1) { throw new IllegalArgumentException("Please use only one color information at a time (hue,saturation,brightness OR rgb OR colorname)"); } if (new_map.containsKey("colorname")) { String colorname = (String)new_map.get("colorname"); new_map.remove("colorname"); String rgb = ColorHelper.convertName2RGB(colorname); if (rgb == null) { throw new IllegalArgumentException("No color with name "+colorname+" found!"); } new_map.put("rgb", rgb); } if (new_map.containsKey("rgb")) { String rgbcolor = (String)new_map.get("rgb"); new_map.remove("rgb"); new_map.remove("hue"); new_map.remove("sat"); new_map.remove("bri"); Map<String,Integer> hue = ColorHelper.convertRGB2Hue(rgbcolor); new_map.putAll(hue); } return new_map; } private void setStateFromMap(Map<String,Object> map) { for (String key : map.keySet()) { Object value = map.get(key); if (key.equals("bri")) { this.brightness = (Integer) value; } else if (key.equals("sat")) { this.saturation = (Integer) value; } else if (key.equals("hue")) { this.hue = (Integer) value; } else if (key.equals("on")) { this.on = (Boolean) value; } else if (key.equals("effect")) { this.effect = Effect.fromName((String)value); } colorMode = ColorMode.HS; } } /** * Whether this light is in 'on' or 'off' state. * * @return true if this light is on, false otherwise. */ public Boolean getOn() { return isOn(); } @Override public void setOn(Boolean on) { stateChange("on", on); this.on = on; } /** * Get the brightness of this light. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the brightness value between 0 (lowest that is not off) and 255 (highest) */ public Integer getBrightness() { checkSync(); return brightness; } @Override public void setBrightness(Integer brightness) { if(brightness<0 || brightness>255) { throw new IllegalArgumentException("Brightness must be between 0-255"); } stateChange("bri", brightness); this.brightness = brightness; } /** * Get the hue of this light. Note that the validity of this value may depend on {@link #getColorMode()}. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @see #HUE_RED * @see #HUE_GREEN * @see #HUE_BLUE * * @return the hue value between 0 and 65535 (both red) */ public Integer getHue() { checkSync(); return hue; } @Override public void setHue(Integer hue) { if(hue<0 || hue>65535) { throw new IllegalArgumentException("Hue must be between 0-65535"); } stateChange("hue", hue); this.hue = hue; colorMode = ColorMode.HS; } /** * Get the saturation of this light. Note that the validity of this value may depend on {@link #getColorMode()}. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the saturation value between 0 (white) and 255 (colored) */ public Integer getSaturation() { checkSync(); return saturation; } @Override public void setSaturation(Integer saturation) { if(saturation<0 || saturation>255) { throw new IllegalArgumentException("Saturation must be between 0-255"); } stateChange("sat", saturation); this.saturation = saturation; colorMode = ColorMode.HS; } /** * Get the x coordinate in CIE color space of this light. Note that the validity of this value may depend on {@link #getColorMode()}. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the x coordinate in CIE color space between 0 and 1 */ public Double getCiex() { checkSync(); return ciex; } /** * Set the x coordinate of a color in CIE color space. For y the currently cached value is used. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#16_set_light_state">Philips hue API, Section 1.6</a> for further reference.</p> * * @param ciex the x coordinate in CIE color space between 0 and 1 */ public void setCiex(Double ciex) { setCieXY(ciex, ciey); this.ciex = ciex; } /** * Get the y coordinate in CIE color space of this light. Note that the validity of this value may depend on {@link #getColorMode()}. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the y coordinate in CIE color space between 0 and 1 */ public Double getCiey() { checkSync(); return ciey; } /** * Set the y coordinate of a color in CIE color space. For x the currently cached value is used. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#16_set_light_state">Philips hue API, Section 1.6</a> for further reference.</p> * * @param ciey the y coordinate in CIE color space between 0 and 1 */ public void setCiey(Double ciey) { setCieXY(ciex, ciey); this.ciey = ciey; } @Override public void setCieXY(Double ciex, Double ciey) { if(ciex<0 || ciex>1 || ciey<0 || ciey>1) { throw new IllegalArgumentException("A cie coordinate must be between 0.0-1.0"); } stateChange("xy", new JSONArray(Arrays.asList(ciex.floatValue(),ciey.floatValue()))); this.ciex = ciex; this.ciey = ciey; colorMode = ColorMode.XY; } /** * Get the mired color temperature of this light. Note that the validity of this value may depend on {@link #getColorMode()}. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the color temperature value in mired between 153 (6500K) and 500 (2000K) */ public Integer getColorTemperature() { checkSync(); return colorTemperature; } @Override public void setColorTemperature(Integer colorTemperature) { if(colorTemperature<153 || colorTemperature>500) { throw new IllegalArgumentException("ColorTemperature must be between 153-500"); } stateChange("ct", colorTemperature); this.colorTemperature = colorTemperature; colorMode = ColorMode.CT; } /** * Get the current dynamic effect of this light. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return the current dynamic effect of this light. */ public Effect getEffect() { checkSync(); return effect; } @Override public void setEffect(Effect effect) { stateChange("effect", effect.name); this.effect = effect; } @Override public void setAlert(Alert alert) { stateChange("alert", alert.name); } /** * Get the mode with which the current color of the light has been set. * * <p>See <a href="http://developers.meethue.com/1_lightsapi.html#14_get_light_attributes_and_state">Philips hue API, Section 1.4</a> for further reference.</p> * * @return The current mode with which the current color of the light has been set. */ public ColorMode getColorMode() { return colorMode; } @Override public String toString() { return getId() +"(" +getName() +")" +"[" +(isOn() ? "ON" : "OFF") +"," +(getColorMode()==ColorMode.CT ? "CT:"+getColorTemperature() : "") +(getColorMode()==ColorMode.HS ? "HS:"+getHue() +"/" +getSaturation() : "") +(getColorMode()==ColorMode.XY ? "XY:"+getCiex() +"/" +getCiey() : "") + "," +"BRI:" +getBrightness() +(getEffect()!=Effect.NONE ? ","+getEffect() : "") +"]"; } @Override public void stateChangeTransaction(Integer transitionTime, Runnable changes) { openStateChangeTransaction(transitionTime); try { try { changes.run(); } catch(Throwable t) { stateTransactionJson.set(null); //noinspection ThrowCaughtLocally throw t; } finally { commitStateChangeTransaction(); } } catch(Throwable t) { // do kind of rollback by syncing the state from the bridge sync(); } } /** * Update the local state cache with values from the bridge device. */ public void sync() { if(syncing.get() == null || !syncing.get()) { try { syncing.set(true); final JSONObject response = bridge.request(GET, "/lights/" + getId(), "").get(0); if(response.has("error")) { throw new HueCommException(response.getJSONObject("error")); } else { parseLight(response); } } finally { syncing.set(false); } } } // ***************************************** // Implementation internal methods // ***************************************** private ThreadLocal<JSONObject> stateTransactionJson = new ThreadLocal<JSONObject>(); private void openStateChangeTransaction(Integer transitionTime) { if(stateTransactionJson.get()==null) { stateTransactionJson.set(new JSONObject()); if(transitionTime!=null) { stateTransactionJson.get().put("transitiontime", transitionTime); } } else { throw new IllegalStateException("Have an open state change transaction already"); } } private List<JSONObject> commitStateChangeTransaction() { final JSONObject json = stateTransactionJson.get(); stateTransactionJson.set(null); if(json!=null) { return bridge.checkedSuccessRequest(PUT, "/lights/" + getId() + "/state", json); } else { return null; } } private List<JSONObject> stateChange(String param, Object value) { Map<String,Object> map = new HashMap<String,Object>(); map.put(param, value); return stateChange(map); } private List<JSONObject> stateChange(Map<String,Object> param_value) { if(param_value==null) { throw new IllegalArgumentException("A value of null is not allowed for any of the lights states"); } final JSONObject stateTransaction = stateTransactionJson.get(); if(stateTransaction==null) { JSONWriter json = JO(); if(transitionTime!=null) { json = json.key("transitiontime").value(transitionTime); } for (String s : param_value.keySet()) { json.key(s).value(param_value.get(s)); } return bridge.checkedSuccessRequest(PUT, "/lights/" + getId() + "/state", json); } else { for (String s : param_value.keySet()) { stateTransaction.put(s, param_value.get(s)); } return null; } } private void checkSync() { final long now = System.currentTimeMillis(); if(autoSyncInterval!=null && now-lastSyncTime>autoSyncInterval) { sync(); } } private ThreadLocal<Boolean> syncing = new ThreadLocal<Boolean>(); void parseLight(JSONObject lightJson) { name = lightJson.getString("name"); if(lightJson.has("state")) { final JSONObject state = lightJson.getJSONObject("state"); on = state.getBoolean("on"); brightness = state.getInt("bri"); hue = state.getInt("hue"); saturation = state.getInt("sat"); ciex = state.getJSONArray("xy").getDouble(0); ciey = state.getJSONArray("xy").getDouble(1); colorTemperature = state.getInt("ct"); colorMode = new ColorMode[]{HS,XY,CT}[Arrays.asList("hs", "xy", "ct").indexOf(state.getString("colormode").toLowerCase())]; final Effect effect = Effect.fromName(state.getString("effect")); if(effect==null) { throw new HueCommException("Can not find effect named \"" +state.getString("effect") +"\""); } this.effect = effect; lastSyncTime = System.currentTimeMillis(); } else { sync(); } } /** * Helper method to shorten creation of a JSONObject String. * @return A JSONStringer with an object already 'open' and auto-object-end on a call to toString() */ private static JSONStringer JO() { return new JSONStringer() { { object(); } @Override public String toString() { return writer.toString()+(mode!='d' ? "}" :""); } }; } @Override public int compareTo(HueLightBulb other) { if (this.id==other.id) { if (this.name.equals(other.name)) { if ((this.on==other.on) && (this.hue==other.hue) && (this.brightness==other.brightness) && (this.saturation==other.saturation) && (this.colorTemperature==other.colorTemperature) && (this.ciex==other.ciex) && (this.ciey==other.ciey) && (this.colorMode==other.colorMode) && (this.effect==other.effect)) { return 0; } else { return 1; } } else { return this.name.compareTo(other.name); } } else { return this.id.compareTo(other.id); } } }
package it.uniroma3.controller; import java.util.GregorianCalendar; import java.util.List; import it.uniroma3.facade.CustomerFacade; import it.uniroma3.facade.OrderFacade; import it.uniroma3.facade.ProductFacade; import it.uniroma3.helper.ContextHelper; import it.uniroma3.model.Customer; import it.uniroma3.model.OrderLine; import it.uniroma3.model.Orders; import it.uniroma3.model.Product; import javax.ejb.EJB; import javax.faces.bean.ManagedBean; import javax.faces.bean.ManagedProperty; /** * System operations for closed/evaded Order management * * @author Andrea * */ @ManagedBean public class OrderController { @EJB private OrderFacade orderFacade; @EJB private CustomerFacade customerFacade; @EJB private ProductFacade productFacade; private ContextHelper ch; @ManagedProperty(value="#{param.id}") private Long id; private Orders order; private Customer orderCustomer; public OrderController(){ this.ch = new ContextHelper(); } public Orders findOrderAndCustomer(Long idOrder){ this.order = orderFacade.getOrder(idOrder); this.orderCustomer = order.getCustomer(); return this.order; } public List<Orders> getOrders(){ CustomerController customerController = new CustomerController(); if(customerController.isLogged()){ Long fetchedId = customerController.getCurrentCustomer().getId(); if(fetchedId != -1) return orderFacade.getAllOrdersFromCustomer(fetchedId); } return null; } public List<Orders> getAllClosedOrders(){ return orderFacade.getAllClosedOrders(); } public List<Orders> getClosedOrders(){ CustomerController customerController = new CustomerController(); if(customerController.isLogged()){ Long fetchedId = customerController.getCurrentCustomer().getId(); if(fetchedId != -1) return orderFacade.getAllClosedOrdersFromCustomer(fetchedId); } return null; } public List<Orders> getEvadedOrders(){ CustomerController customerController = new CustomerController(); if(customerController.isLogged()){ Long fetchedId = customerController.getCurrentCustomer().getId(); if(fetchedId != -1) return orderFacade.getAllEvadedOrdersFromCustomer(fetchedId); } return null; } public boolean canEvade(Long idOrder){ if(!new AdminController().isLogged()) return false; Orders myOrder = orderFacade.getOrder(idOrder); if(myOrder != null) return myOrder.getEvasionTime() == null; System.out.println("canEvade(): Order is null"); return false; } public List<Orders> getLastOrders(int numOrders){ return orderFacade.getLastOrders(numOrders); } public String findOrder(Long orderId){ return "order?id=" + orderId + "&faces-redirect=true&includeViewParams=true"; } public String findOrder(){ return findOrder(this.id); } public double getOrderTotal(Long orderId){ double sum = 0; for(OrderLine ol : orderFacade.getOrderLines(orderId)) sum += ol.getQuantity()*ol.getProduct().getPrice(); return sum; } public String tryEvadeOrder() { return tryEvadeOrder(id); } public String tryEvadeOrder(Long idOrder){ System.out.println("Trying to evade order number... "); System.out.println(idOrder); try { Orders closedOrder = orderFacade.getOrder(idOrder); if(closedOrder.canEvadeAllLines() && canEvade(idOrder)){ evadeOrder(closedOrder); closedOrder.setEvasionTime(new GregorianCalendar().getTime()); this.orderFacade.updateOrder(closedOrder); System.out.println("Success!"); return "Success"; } else { System.out.println("Error! Not logged or insufficient storage."); return "Error: couldn't evade the order (insufficient products)"; } } catch(Exception e){ System.out.println("Unknown Error!"); return "Error: couldn't evade the order"; } } public void evadeOrder(Orders order) { for(OrderLine ol : order.getOrderLines()){ Product product = ol.getProduct(); product.decreaseStorageQuantity(ol.getQuantity()); productFacade.updateProduct(product); } } /** GETTER AND SETTERS **/ public OrderFacade getOrderFacade() { return orderFacade; } public void setOrderFacade(OrderFacade orderFacade) { this.orderFacade = orderFacade; } public Orders getOrder() { return order; } public void setOrder(Orders order) { this.order = order; } public ContextHelper getCh() { return ch; } public void setCh(ContextHelper ch) { this.ch = ch; } public Customer getOrderCustomer() { return orderCustomer; } public void setOrderCustomer(Customer customer) { this.orderCustomer = customer; } public CustomerFacade getCustomerFacade() { return customerFacade; } public void setCustomerFacade(CustomerFacade customerFacade) { this.customerFacade = customerFacade; } public ProductFacade getProductFacade() { return productFacade; } public void setProductFacade(ProductFacade productFacade) { this.productFacade = productFacade; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } }
package com.whitepages.whiteelephant; import java.io.IOException; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.TreeMap; import org.apache.pig.Accumulator; import org.apache.pig.Algebraic; import org.apache.pig.EvalFunc; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.apache.pig.impl.logicalLayer.schema.Schema; /** * Generates the median of values in bag given in first field of a tuple. Example: A = LOAD 'mydata' as (key: int, value: int); keys = GROUP A BY key; medians = FOREACH keys GENERATE group AS key, MEDIAN(A.value) AS median; */ public class Median extends EvalFunc<Integer> implements Accumulator<Integer>, Algebraic { private static TupleFactory mTupleFactory = TupleFactory.getInstance(); @Override public Integer exec(Tuple input) throws IOException { try { // the input is a bag of tuples of ints Map<String, Integer> counts = getCounts(input); // Iterate through to find the median return median(counts); } catch (Exception e) { String msg = "Error while computing median in " + this.getClass().getSimpleName(); throw new IOException(msg + e.getMessage()); } } // For the Algebraic interface @Override public String getInitial() { return Initial.class.getName(); } @Override public String getIntermed() { return Intermediate.class.getName(); } @Override public String getFinal() { return Final.class.getName(); } // Get the counts for the initial bags passed in static public class Initial extends EvalFunc<Tuple> { @Override public Tuple exec(Tuple input) throws IOException { try { Map<String, Integer> counts = getCounts(input); return TupleFactory.getInstance().newTuple(counts); } catch (Exception e) { throw new IOException(e.getMessage()); } } } // Combine the counts previously generated, output intermediate counts static public class Intermediate extends EvalFunc<Tuple> { @Override public Tuple exec(Tuple input) throws IOException { try { Map<String, Integer> aggCounts = combineCounts(input); return mTupleFactory.newTuple(aggCounts); } catch (Exception e) { throw new IOException(e.getMessage()); } } } // Combine the counts previously generated, output their median static public class Final extends EvalFunc<Integer> { @Override public Integer exec(Tuple input) throws IOException { try { Map<String,Integer> counts = combineCounts(input); return median(counts); } catch (Exception e) { throw new IOException(e.getMessage()); } } } // Count the number of times each value occurs, return a map // from value -> count static protected Map<String, Integer> getCounts(Tuple input) throws ExecException { // the input is a tuple of a bag of tuples of ints DataBag bag = (DataBag)input.get(0); Iterator it = bag.iterator(); // want to keep sorted for finding median later // pig only supports String->Object maps Map<String, Integer> counts = new TreeMap<String, Integer>(); // sum the count of each value while (it.hasNext()){ Tuple t = (Tuple)it.next(); if (t != null && t.size() > 0 && t.get(0) != null ) { String value = t.get(0).toString(); int oldCount = counts.containsKey(value) ? counts.get(value) : 0; counts.put(value, oldCount+1); } } return counts; } // Combine the intermediate counts to a total count, a map from value -> count static protected Map<String,Integer> combineCounts(Tuple input) throws ExecException { // the input is a tuple of a bag of maps DataBag bag = (DataBag)input.get(0); Iterator it = bag.iterator(); Map<String, Integer> aggCounts = new TreeMap<String, Integer>(); // get each map of sub-counts and aggregate them while (it.hasNext()){ Tuple t = (Tuple)it.next(); if (t != null && t.size() > 0 && t.get(0) != null ) { @SuppressWarnings("unchecked") Map<String,Integer> subCounts = (Map<String,Integer>)t.get(0); combineCountMaps(aggCounts, subCounts); } } return aggCounts; } // Combine counts from subCounts to aggCounts static protected void combineCountMaps(Map<String, Integer> aggCounts, Map<String, Integer> subCounts) { for (String value : subCounts.keySet()) { int subCount = subCounts.get(value); int aggCount = aggCounts.containsKey(value) ? aggCounts.get(value) : 0; aggCounts.put(value, aggCount+subCount); } } // Return the median value from the given values static protected int median(Map<String, Integer> counts) { long total = 0; for (int count : counts.values()) { total += count; } Iterator<String> countedIt = counts.keySet().iterator(); long sum = 0; String value = null; while (countedIt.hasNext() ) { value = countedIt.next(); sum += counts.get(value); // if we hit the middle, we found it // if we hit the middle of an even number of them if (total % 2 == 0 && sum == total/2.0 && countedIt.hasNext()) { return (Integer.parseInt(value) + Integer.parseInt(countedIt.next())) / 2; } else if (sum >= total/2.0) { return Integer.parseInt(value); } } return Integer.parseInt(value); } @Override public Schema outputSchema(Schema input) { return new Schema(new Schema.FieldSchema(null, DataType.INTEGER)); } /* Accumulator interface implementation */ // Accumulate the aggregated counts for the key // value -> count private Map<String, Integer> aggCounts = null; @Override public void accumulate(Tuple b) throws IOException { try { // first time being called for this value, so start fresh if ( aggCounts == null ) { aggCounts = getCounts(b); } else { // else combine the counts Map<String, Integer> subCounts = getCounts(b); combineCountMaps(aggCounts, subCounts); } } catch (Exception e) { throw new IOException(e.getMessage()); } } @Override public void cleanup() { aggCounts = null; } @Override public Integer getValue() { return median(aggCounts); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.gradle.service.project; import com.intellij.build.events.MessageEvent; import com.intellij.diagnostic.Activity; import com.intellij.diagnostic.ActivityCategory; import com.intellij.diagnostic.StartUpMeasurer; import com.intellij.execution.configurations.ParametersList; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.externalSystem.importing.ProjectResolverPolicy; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ExternalSystemException; import com.intellij.openapi.externalSystem.model.ProjectKeys; import com.intellij.openapi.externalSystem.model.project.*; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskId; import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskNotificationListener; import com.intellij.openapi.externalSystem.service.project.ExternalSystemProjectResolver; import com.intellij.openapi.externalSystem.service.project.PerformanceTrace; import com.intellij.openapi.externalSystem.util.ExternalSystemDebugEnvironment; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.util.ProgressIndicatorUtils; import com.intellij.openapi.util.Factory; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ExceptionUtil; import com.intellij.util.Function; import com.intellij.util.SmartList; import com.intellij.util.containers.CollectionFactory; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import org.gradle.tooling.CancellationTokenSource; import org.gradle.tooling.ProjectConnection; import org.gradle.tooling.model.ProjectModel; import org.gradle.tooling.model.build.BuildEnvironment; import org.gradle.tooling.model.idea.IdeaModule; import org.gradle.tooling.model.idea.IdeaProject; import org.gradle.util.GradleVersion; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.execution.target.TargetBuildLauncher; import org.jetbrains.plugins.gradle.issue.DeprecatedGradleVersionIssue; import org.jetbrains.plugins.gradle.model.*; import org.jetbrains.plugins.gradle.model.data.BuildParticipant; import org.jetbrains.plugins.gradle.model.data.CompositeBuildData; import org.jetbrains.plugins.gradle.model.data.GradleSourceSetData; import org.jetbrains.plugins.gradle.remote.impl.GradleLibraryNamesMixer; import org.jetbrains.plugins.gradle.service.execution.GradleExecutionHelper; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleBuildParticipant; import org.jetbrains.plugins.gradle.settings.GradleExecutionSettings; import org.jetbrains.plugins.gradle.util.GradleConstants; import java.io.File; import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.function.Predicate; import java.util.stream.Stream; import static com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil.*; import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil.getDefaultModuleTypeId; import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil.getModuleId; /** * @author Denis Zhdanov, Vladislav Soroka */ public class GradleProjectResolver implements ExternalSystemProjectResolver<GradleExecutionSettings> { private static final Logger LOG = Logger.getInstance(GradleProjectResolver.class); @NotNull private final GradleExecutionHelper myHelper; private final GradleLibraryNamesMixer myLibraryNamesMixer = new GradleLibraryNamesMixer(); private final MultiMap<ExternalSystemTaskId, CancellationTokenSource> myCancellationMap = MultiMap.createConcurrent(); public static final Key<Map<String/* module id */, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>>> RESOLVED_SOURCE_SETS = Key.create("resolvedSourceSets"); public static final Key<Map<String/* output path */, Pair<String /* module id*/, ExternalSystemSourceType>>> MODULES_OUTPUTS = Key.create("moduleOutputsMap"); public static final Key<MultiMap<ExternalSystemSourceType, String /* output path*/>> GRADLE_OUTPUTS = Key.create("gradleOutputs"); public static final Key<Map<String/* artifact path */, String /* module id*/>> CONFIGURATION_ARTIFACTS = Key.create("gradleArtifactsMap"); private static final Key<File> GRADLE_HOME_DIR = Key.create("gradleHomeDir"); // This constructor is called by external system API, see AbstractExternalSystemFacadeImpl class constructor. @SuppressWarnings("UnusedDeclaration") public GradleProjectResolver() { this(new GradleExecutionHelper()); } public GradleProjectResolver(@NotNull GradleExecutionHelper helper) { myHelper = helper; } @Nullable @Override public DataNode<ProjectData> resolveProjectInfo(@NotNull final ExternalSystemTaskId syncTaskId, @NotNull final String projectPath, final boolean isPreviewMode, @Nullable final GradleExecutionSettings settings, @Nullable ProjectResolverPolicy resolverPolicy, @NotNull final ExternalSystemTaskNotificationListener listener) throws ExternalSystemException, IllegalArgumentException, IllegalStateException { GradlePartialResolverPolicy gradleResolverPolicy = null; if (resolverPolicy != null) { if (resolverPolicy instanceof GradlePartialResolverPolicy) { gradleResolverPolicy = (GradlePartialResolverPolicy)resolverPolicy; } else { throw new ExternalSystemException("Unsupported project resolver policy: " + resolverPolicy.getClass().getName()); } } if (isPreviewMode) { // Create project preview model w/o request to gradle, there are two main reasons for the it: // * Slow project open - even the simplest project info provided by gradle can be gathered too long (mostly because of new gradle distribution download and downloading build script dependencies) // * Ability to open an invalid projects (e.g. with errors in build scripts) String projectName = new File(projectPath).getName(); ProjectData projectData = new ProjectData(GradleConstants.SYSTEM_ID, projectName, projectPath, projectPath); DataNode<ProjectData> projectDataNode = new DataNode<>(ProjectKeys.PROJECT, projectData, null); final String ideProjectPath = settings == null ? null : settings.getIdeProjectPath(); final String mainModuleFileDirectoryPath = ideProjectPath == null ? projectPath : ideProjectPath; projectDataNode .createChild(ProjectKeys.MODULE, new ModuleData(projectName, GradleConstants.SYSTEM_ID, getDefaultModuleTypeId(), projectName, mainModuleFileDirectoryPath, projectPath)) .createChild(ProjectKeys.CONTENT_ROOT, new ContentRootData(GradleConstants.SYSTEM_ID, projectPath)); return projectDataNode; } DefaultProjectResolverContext resolverContext = new DefaultProjectResolverContext(syncTaskId, projectPath, settings, listener, gradleResolverPolicy, false); final CancellationTokenSource cancellationTokenSource = resolverContext.getCancellationTokenSource(); myCancellationMap.putValue(resolverContext.getExternalSystemTaskId(), cancellationTokenSource); try { if (settings != null) { myHelper.ensureInstalledWrapper(syncTaskId, projectPath, settings, listener, cancellationTokenSource.token()); } Predicate<GradleProjectResolverExtension> extensionsFilter = gradleResolverPolicy != null ? gradleResolverPolicy.getExtensionsFilter() : null; final GradleProjectResolverExtension projectResolverChain = createProjectResolverChain(resolverContext, extensionsFilter); final DataNode<ProjectData> projectDataNode = myHelper.execute( projectPath, settings, syncTaskId, listener, cancellationTokenSource, getProjectDataFunction(resolverContext, projectResolverChain, false)); // auto-discover buildSrc projects of the main and included builds File gradleUserHome = resolverContext.getUserData(GRADLE_HOME_DIR); new GradleBuildSrcProjectsResolver(this, resolverContext, gradleUserHome, settings, listener, syncTaskId, projectResolverChain) .discoverAndAppendTo(projectDataNode); return projectDataNode; } finally { myCancellationMap.remove(resolverContext.getExternalSystemTaskId(), cancellationTokenSource); } } @NotNull Function<ProjectConnection, DataNode<ProjectData>> getProjectDataFunction(DefaultProjectResolverContext resolverContext, GradleProjectResolverExtension projectResolverChain, boolean isBuildSrcProject) { return new ProjectConnectionDataNodeFunction(resolverContext, projectResolverChain, isBuildSrcProject); } @NotNull GradleExecutionHelper getHelper() { return myHelper; } @Override public boolean cancelTask(@NotNull ExternalSystemTaskId id, @NotNull ExternalSystemTaskNotificationListener listener) { for (CancellationTokenSource cancellationTokenSource : myCancellationMap.get(id)) { cancellationTokenSource.cancel(); } return true; } @NotNull private DataNode<ProjectData> doResolveProjectInfo(@NotNull final DefaultProjectResolverContext resolverCtx, @NotNull final GradleProjectResolverExtension projectResolverChain, boolean isBuildSrcProject) throws IllegalArgumentException, IllegalStateException { final PerformanceTrace performanceTrace = new PerformanceTrace(); final GradleProjectResolverExtension tracedResolverChain = new TracedProjectResolverExtension(projectResolverChain, performanceTrace); final BuildEnvironment buildEnvironment = GradleExecutionHelper.getBuildEnvironment(resolverCtx); GradleVersion gradleVersion = null; boolean useCustomSerialization = Registry.is("gradle.tooling.custom.serializer", true); boolean isCompositeBuildsSupported = false; if (buildEnvironment != null) { gradleVersion = GradleVersion.version(buildEnvironment.getGradle().getGradleVersion()); isCompositeBuildsSupported = gradleVersion.compareTo(GradleVersion.version("3.1")) >= 0; resolverCtx.setBuildEnvironment(buildEnvironment); if (!isCustomSerializationSupported(resolverCtx, gradleVersion, isCompositeBuildsSupported)) { useCustomSerialization = false; } } final ProjectImportAction projectImportAction = useCustomSerialization ? new ProjectImportActionWithCustomSerializer(resolverCtx.isPreviewMode(), isCompositeBuildsSupported) : new ProjectImportAction(resolverCtx.isPreviewMode(), isCompositeBuildsSupported); boolean useParallelModelsFetch = Registry.is("gradle.tooling.models.parallel.fetch", false); projectImportAction.setParallelModelsFetch(useParallelModelsFetch); GradleExecutionSettings executionSettings = resolverCtx.getSettings(); if (executionSettings == null) { executionSettings = new GradleExecutionSettings(null, null, DistributionType.BUNDLED, false); } configureExecutionArgumentsAndVmOptions(executionSettings, resolverCtx, isBuildSrcProject); final Set<Class<?>> toolingExtensionClasses = new HashSet<>(); for (GradleProjectResolverExtension resolverExtension = tracedResolverChain; resolverExtension != null; resolverExtension = resolverExtension.getNext()) { // inject ProjectResolverContext into gradle project resolver extensions resolverExtension.setProjectResolverContext(resolverCtx); // pre-import checks resolverExtension.preImportCheck(); projectImportAction.addTargetTypes(resolverExtension.getTargetTypes()); // register classes of extra gradle project models required for extensions (e.g. com.android.builder.model.AndroidProject) try { ProjectImportModelProvider modelProvider = resolverExtension.getModelProvider(); if (modelProvider != null) { projectImportAction.addProjectImportModelProvider(modelProvider); } ProjectImportModelProvider projectsLoadedModelProvider = resolverExtension.getProjectsLoadedModelProvider(); if (projectsLoadedModelProvider != null) { projectImportAction.addProjectImportModelProvider(projectsLoadedModelProvider, true); } } catch (Throwable t) { LOG.warn(t); } // collect tooling extensions classes try { toolingExtensionClasses.addAll(resolverExtension.getToolingExtensionsClasses()); } catch (Throwable t) { LOG.warn(t); } } GradleExecutionHelper.attachTargetPathMapperInitScript(executionSettings); File initScript = GradleExecutionHelper.generateInitScript(isBuildSrcProject, toolingExtensionClasses); if (initScript != null) { executionSettings.withArguments(GradleConstants.INIT_SCRIPT_CMD_OPTION, initScript.getAbsolutePath()); } BuildActionRunner buildActionRunner = new BuildActionRunner(resolverCtx, projectImportAction, executionSettings, myHelper); resolverCtx.checkCancelled(); final long startTime = System.currentTimeMillis(); Activity activity = StartUpMeasurer.startActivity("project data obtaining", ActivityCategory.GRADLE_IMPORT); ProjectImportAction.AllModels allModels; CountDownLatch buildFinishWaiter = new CountDownLatch(1); try { allModels = buildActionRunner.fetchModels( models -> { for (GradleProjectResolverExtension resolver = tracedResolverChain; resolver != null; resolver = resolver.getNext()) { resolver.projectsLoaded(models); } }, (exception) -> { try { for (GradleProjectResolverExtension resolver = tracedResolverChain; resolver != null; resolver = resolver.getNext()) { resolver.buildFinished(exception); } } finally { buildFinishWaiter.countDown(); } }); if (gradleVersion != null && DeprecatedGradleVersionIssue.isDeprecated(gradleVersion)) { resolverCtx.report(MessageEvent.Kind.WARNING, new DeprecatedGradleVersionIssue(gradleVersion, resolverCtx.getProjectPath())); } performanceTrace.addTrace(allModels.getPerformanceTrace()); } catch (Throwable t) { buildFinishWaiter.countDown(); throw t; } finally { ProgressIndicatorUtils.awaitWithCheckCanceled(buildFinishWaiter); activity.end(); final long timeInMs = (System.currentTimeMillis() - startTime); performanceTrace.logPerformance("Gradle data obtained", timeInMs); LOG.debug(String.format("Gradle data obtained in %d ms", timeInMs)); } resolverCtx.checkCancelled(); if (useCustomSerialization) { assert gradleVersion != null; allModels.initToolingSerializer(); } allModels.setBuildEnvironment(buildEnvironment); try (GradleTargetPathsConverter pathsConverter = new GradleTargetPathsConverter(executionSettings)) { pathsConverter.mayBeApplyTo(allModels); return convertData(allModels, executionSettings, resolverCtx, gradleVersion, tracedResolverChain, performanceTrace, isBuildSrcProject, useCustomSerialization); } } @NotNull private DataNode<ProjectData> convertData(@NotNull ProjectImportAction.AllModels allModels, @NotNull GradleExecutionSettings executionSettings, @NotNull DefaultProjectResolverContext resolverCtx, @Nullable GradleVersion gradleVersion, @NotNull GradleProjectResolverExtension tracedResolverChain, @NotNull PerformanceTrace performanceTrace, boolean isBuildSrcProject, boolean useCustomSerialization) { final long startDataConversionTime = System.currentTimeMillis(); Activity activity = StartUpMeasurer.startActivity("project data converting", ActivityCategory.GRADLE_IMPORT); extractExternalProjectModels(allModels, resolverCtx, useCustomSerialization); String projectName = allModels.getMainBuild().getName(); ModifiableGradleProjectModelImpl modifiableGradleProjectModel = new ModifiableGradleProjectModelImpl(projectName, resolverCtx.getProjectPath()); ToolingModelsProvider modelsProvider = new ToolingModelsProviderImpl(allModels); ProjectModelContributor.EP_NAME.forEachExtensionSafe(extension -> { resolverCtx.checkCancelled(); final long starResolveTime = System.currentTimeMillis(); extension.accept(modifiableGradleProjectModel, modelsProvider, resolverCtx); final long resolveTimeInMs = (System.currentTimeMillis() - starResolveTime); performanceTrace.logPerformance("Project model contributed by " + extension.getClass().getSimpleName(), resolveTimeInMs); LOG.debug(String.format("Project model contributed by `" + extension.getClass().getSimpleName() + "` in %d ms", resolveTimeInMs)); }); DataNode<ProjectData> projectDataNode = modifiableGradleProjectModel.buildDataNodeGraph(); DataNode<PerformanceTrace> performanceTraceNode = new DataNode<>(PerformanceTrace.TRACE_NODE_KEY, performanceTrace, projectDataNode); projectDataNode.addChild(performanceTraceNode); Set<? extends IdeaModule> gradleModules = Collections.emptySet(); IdeaProject ideaProject = allModels.getModel(IdeaProject.class); if (ideaProject != null) { tracedResolverChain.populateProjectExtraModels(ideaProject, projectDataNode); gradleModules = ideaProject.getModules(); if (gradleModules == null || gradleModules.isEmpty()) { throw new IllegalStateException("No modules found for the target project: " + ideaProject); } } Collection<IdeaModule> includedModules = exposeCompositeBuild(allModels, resolverCtx, projectDataNode); final Map<String /* module id */, Pair<DataNode<ModuleData>, IdeaModule>> moduleMap = new HashMap<>(); final Map<String /* module id */, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>> sourceSetsMap = new HashMap<>(); projectDataNode.putUserData(RESOLVED_SOURCE_SETS, sourceSetsMap); final Map<String/* output path */, Pair<String /* module id*/, ExternalSystemSourceType>> moduleOutputsMap = CollectionFactory.createFilePathMap(); projectDataNode.putUserData(MODULES_OUTPUTS, moduleOutputsMap); final Map<String/* artifact path */, String /* module id*/> artifactsMap = CollectionFactory.createFilePathMap(); projectDataNode.putUserData(CONFIGURATION_ARTIFACTS, artifactsMap); // import modules data for (IdeaModule gradleModule : ContainerUtil.concat(gradleModules, includedModules)) { if (gradleModule == null) { continue; } resolverCtx.checkCancelled(); if (ExternalSystemDebugEnvironment.DEBUG_ORPHAN_MODULES_PROCESSING) { LOG.info(String.format("Importing module data: %s", gradleModule)); } final String moduleName = gradleModule.getName(); if (moduleName == null) { throw new IllegalStateException("Module with undefined name detected: " + gradleModule); } DataNode<ModuleData> moduleDataNode = tracedResolverChain.createModule(gradleModule, projectDataNode); if (moduleDataNode == null) continue; String mainModuleId = getModuleId(resolverCtx, gradleModule); if (moduleMap.containsKey(mainModuleId)) { // we should ensure deduplicated module names in the scope of single import throw new IllegalStateException("Duplicate modules names detected: " + gradleModule); } moduleMap.put(mainModuleId, Pair.create(moduleDataNode, gradleModule)); } executionSettings.getExecutionWorkspace().setModuleIdIndex(moduleMap); File gradleHomeDir = null; // populate modules nodes for (final Pair<DataNode<ModuleData>, IdeaModule> pair : moduleMap.values()) { final DataNode<ModuleData> moduleDataNode = pair.first; final IdeaModule ideaModule = pair.second; if (gradleHomeDir == null) { final BuildScriptClasspathModel buildScriptClasspathModel = resolverCtx.getExtraProject(ideaModule, BuildScriptClasspathModel.class); if (buildScriptClasspathModel != null) { gradleHomeDir = buildScriptClasspathModel.getGradleHomeDir(); } } tracedResolverChain.populateModuleContentRoots(ideaModule, moduleDataNode); tracedResolverChain.populateModuleCompileOutputSettings(ideaModule, moduleDataNode); if (!isBuildSrcProject) { tracedResolverChain.populateModuleTasks(ideaModule, moduleDataNode, projectDataNode); } final List<DataNode<? extends ModuleData>> modules = new SmartList<>(); modules.add(moduleDataNode); modules.addAll(findAll(moduleDataNode, GradleSourceSetData.KEY)); final ExternalSystemSourceType[] sourceTypes = new ExternalSystemSourceType[]{ ExternalSystemSourceType.SOURCE, ExternalSystemSourceType.RESOURCE, ExternalSystemSourceType.TEST, ExternalSystemSourceType.TEST_RESOURCE }; for (DataNode<? extends ModuleData> module : modules) { final ModuleData moduleData = module.getData(); for (ExternalSystemSourceType sourceType : sourceTypes) { final String path = moduleData.getCompileOutputPath(sourceType); if (path != null) { moduleOutputsMap.put(path, Pair.create(moduleData.getId(), sourceType)); } } if (moduleData instanceof GradleSourceSetData) { for (File artifactFile : moduleData.getArtifacts()) { artifactsMap.put(toCanonicalPath(artifactFile.getPath()), moduleData.getId()); } } } } // reuse same gradle home (for auto-discovered buildSrc projects) also for partial imports which doesn't request BuildScriptClasspathModel if (gradleHomeDir == null && executionSettings.getGradleHome() != null) { gradleHomeDir = new File(executionSettings.getGradleHome()); } resolverCtx.putUserData(GRADLE_HOME_DIR, gradleHomeDir); for (final Pair<DataNode<ModuleData>, IdeaModule> pair : moduleMap.values()) { final DataNode<ModuleData> moduleDataNode = pair.first; final IdeaModule ideaModule = pair.second; tracedResolverChain.populateModuleDependencies(ideaModule, moduleDataNode, projectDataNode); tracedResolverChain.populateModuleExtraModels(ideaModule, moduleDataNode); } mergeSourceSetContentRoots(moduleMap, resolverCtx); if (resolverCtx.isResolveModulePerSourceSet()) { mergeLibraryAndModuleDependencyData(resolverCtx, projectDataNode, resolverCtx.getGradleUserHome(), gradleHomeDir, gradleVersion); } for (GradleProjectResolverExtension resolver = tracedResolverChain; resolver != null; resolver = resolver.getNext()) { resolver.resolveFinished(projectDataNode); } projectDataNode.putUserData(RESOLVED_SOURCE_SETS, null); projectDataNode.putUserData(MODULES_OUTPUTS, null); projectDataNode.putUserData(CONFIGURATION_ARTIFACTS, null); // ensure unique library names Collection<DataNode<LibraryData>> libraries = getChildren(projectDataNode, ProjectKeys.LIBRARY); myLibraryNamesMixer.mixNames(libraries); activity.end(); final long timeConversionInMs = (System.currentTimeMillis() - startDataConversionTime); performanceTrace.logPerformance("Gradle project data processed", timeConversionInMs); LOG.debug(String.format("Project data resolved in %d ms", timeConversionInMs)); return projectDataNode; } private static boolean isCustomSerializationSupported(@NotNull DefaultProjectResolverContext resolverCtx, GradleVersion gradleVersion, boolean isCompositeBuildsSupported) { return isCompositeBuildsSupported || resolverCtx.getConnection().newBuild() instanceof TargetBuildLauncher || gradleVersion.getBaseVersion().compareTo(GradleVersion.version("3.0")) >= 0; } private static void configureExecutionArgumentsAndVmOptions(@NotNull GradleExecutionSettings executionSettings, @NotNull DefaultProjectResolverContext resolverCtx, boolean isBuildSrcProject) { executionSettings.withArgument("-Didea.sync.active=true"); if (resolverCtx.isResolveModulePerSourceSet()) { executionSettings.withArgument("-Didea.resolveSourceSetDependencies=true"); } if (!isBuildSrcProject) { for (GradleBuildParticipant buildParticipant : executionSettings.getExecutionWorkspace().getBuildParticipants()) { executionSettings.withArguments(GradleConstants.INCLUDE_BUILD_CMD_OPTION, buildParticipant.getProjectPath()); } } GradleImportCustomizer importCustomizer = GradleImportCustomizer.get(); GradleProjectResolverUtil.createProjectResolvers(resolverCtx).forEachOrdered(extension -> { if (importCustomizer == null || importCustomizer.useExtraJvmArgs()) { // collect extra JVM arguments provided by gradle project resolver extensions ParametersList parametersList = new ParametersList(); for (Pair<String, String> jvmArg : extension.getExtraJvmArgs()) { parametersList.addProperty(jvmArg.first, jvmArg.second); } executionSettings.withVmOptions(parametersList.getParameters()); } // collect extra command-line arguments executionSettings.withArguments(extension.getExtraCommandLineArgs()); }); } @NotNull private static Collection<IdeaModule> exposeCompositeBuild(ProjectImportAction.AllModels allModels, DefaultProjectResolverContext resolverCtx, DataNode<ProjectData> projectDataNode) { if (resolverCtx.getSettings() != null && !resolverCtx.getSettings().getExecutionWorkspace().getBuildParticipants().isEmpty()) { return Collections.emptyList(); } CompositeBuildData compositeBuildData; List<IdeaModule> gradleIncludedModules = new SmartList<>(); List<Build> includedBuilds = allModels.getIncludedBuilds(); if (!includedBuilds.isEmpty()) { ProjectData projectData = projectDataNode.getData(); compositeBuildData = new CompositeBuildData(projectData.getLinkedExternalProjectPath()); for (Build build : includedBuilds) { if (!build.getProjects().isEmpty()) { IdeaProject ideaProject = allModels.getModel(build, IdeaProject.class); if (ideaProject != null) { gradleIncludedModules.addAll(ideaProject.getModules()); } String rootProjectName = build.getName(); BuildParticipant buildParticipant = new BuildParticipant(); String projectPath = toCanonicalPath(build.getBuildIdentifier().getRootDir().getPath()); buildParticipant.setRootProjectName(rootProjectName); buildParticipant.setRootPath(projectPath); if (ideaProject != null) { for (IdeaModule module : ideaProject.getModules()) { String modulePath = toCanonicalPath(module.getGradleProject().getProjectDirectory().getPath()); buildParticipant.getProjects().add(modulePath); } } compositeBuildData.getCompositeParticipants().add(buildParticipant); } } projectDataNode.createChild(CompositeBuildData.KEY, compositeBuildData); } return gradleIncludedModules; } private static void mergeLibraryAndModuleDependencyData(@NotNull ProjectResolverContext context, @NotNull DataNode<ProjectData> projectDataNode, @NotNull File gradleUserHomeDir, @Nullable File gradleHomeDir, @Nullable GradleVersion gradleVersion) { final Map<String, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>> sourceSetMap = projectDataNode.getUserData(RESOLVED_SOURCE_SETS); assert sourceSetMap != null; final Map<String, Pair<String, ExternalSystemSourceType>> moduleOutputsMap = projectDataNode.getUserData(MODULES_OUTPUTS); assert moduleOutputsMap != null; final Map<String, String> artifactsMap = projectDataNode.getUserData(CONFIGURATION_ARTIFACTS); assert artifactsMap != null; final Collection<DataNode<LibraryDependencyData>> libraryDependencies = findAllRecursively(projectDataNode, ProjectKeys.LIBRARY_DEPENDENCY); LibraryDataNodeSubstitutor librarySubstitutor = new LibraryDataNodeSubstitutor( context, gradleUserHomeDir, gradleHomeDir, gradleVersion, sourceSetMap, moduleOutputsMap, artifactsMap); for (DataNode<LibraryDependencyData> libraryDependencyDataNode : libraryDependencies) { librarySubstitutor.run(libraryDependencyDataNode); } } private static void extractExternalProjectModels(@NotNull ProjectImportAction.AllModels models, @NotNull ProjectResolverContext resolverCtx, boolean useCustomSerialization) { resolverCtx.setModels(models); final Class<? extends ExternalProject> modelClazz = resolverCtx.isPreviewMode() ? ExternalProjectPreview.class : ExternalProject.class; final ExternalProject externalRootProject = models.getModel(modelClazz); if (externalRootProject == null) return; final DefaultExternalProject wrappedExternalRootProject = useCustomSerialization ? (DefaultExternalProject)externalRootProject : new DefaultExternalProject(externalRootProject); models.addModel(wrappedExternalRootProject, ExternalProject.class); final Map<String, DefaultExternalProject> externalProjectsMap = createExternalProjectsMap(wrappedExternalRootProject); Collection<Project> projects = models.getMainBuild().getProjects(); for (Project project : projects) { ExternalProject externalProject = externalProjectsMap.get(project.getProjectIdentifier().getProjectPath()); if (externalProject != null) { models.addModel(externalProject, ExternalProject.class, project); } } for (Build includedBuild : models.getIncludedBuilds()) { final ExternalProject externalIncludedRootProject = models.getModel(includedBuild, modelClazz); if (externalIncludedRootProject == null) continue; final DefaultExternalProject wrappedExternalIncludedRootProject = useCustomSerialization ? (DefaultExternalProject)externalIncludedRootProject : new DefaultExternalProject(externalIncludedRootProject); wrappedExternalRootProject.getChildProjects().put(wrappedExternalIncludedRootProject.getName(), wrappedExternalIncludedRootProject); final Map<String, DefaultExternalProject> externalIncludedProjectsMap = createExternalProjectsMap(wrappedExternalIncludedRootProject); for (ProjectModel project : includedBuild.getProjects()) { ExternalProject externalProject = externalIncludedProjectsMap.get(project.getProjectIdentifier().getProjectPath()); if (externalProject != null) { models.addModel(externalProject, ExternalProject.class, project); } } } } @NotNull private static Map<String, DefaultExternalProject> createExternalProjectsMap(@Nullable DefaultExternalProject rootExternalProject) { final Map<String, DefaultExternalProject> externalProjectMap = new HashMap<>(); if (rootExternalProject == null) return externalProjectMap; ArrayDeque<DefaultExternalProject> queue = new ArrayDeque<>(); queue.add(rootExternalProject); DefaultExternalProject externalProject; while ((externalProject = queue.pollFirst()) != null) { queue.addAll(externalProject.getChildProjects().values()); externalProjectMap.put(externalProject.getQName(), externalProject); } return externalProjectMap; } private static class Counter { int count; void increment() { count++; } @Override public String toString() { return String.valueOf(count); } } private static void mergeSourceSetContentRoots(@NotNull Map<String, Pair<DataNode<ModuleData>, IdeaModule>> moduleMap, @NotNull ProjectResolverContext resolverCtx) { final Factory<Counter> counterFactory = () -> new Counter(); final Map<String, Counter> weightMap = new HashMap<>(); for (final Pair<DataNode<ModuleData>, IdeaModule> pair : moduleMap.values()) { final DataNode<ModuleData> moduleNode = pair.first; for (DataNode<ContentRootData> contentRootNode : findAll(moduleNode, ProjectKeys.CONTENT_ROOT)) { File file = new File(contentRootNode.getData().getRootPath()); while (file != null) { ContainerUtil.getOrCreate(weightMap, file.getPath(), counterFactory).increment(); file = file.getParentFile(); } } for (DataNode<GradleSourceSetData> sourceSetNode : findAll(moduleNode, GradleSourceSetData.KEY)) { final Set<String> set = new HashSet<>(); for (DataNode<ContentRootData> contentRootNode : findAll(sourceSetNode, ProjectKeys.CONTENT_ROOT)) { File file = new File(contentRootNode.getData().getRootPath()); while (file != null) { set.add(file.getPath()); file = file.getParentFile(); } } for (String path : set) { ContainerUtil.getOrCreate(weightMap, path, counterFactory).increment(); } } } for (final Pair<DataNode<ModuleData>, IdeaModule> pair : moduleMap.values()) { final DataNode<ModuleData> moduleNode = pair.first; final ExternalProject externalProject = resolverCtx.getExtraProject(pair.second, ExternalProject.class); if (externalProject == null) continue; if (resolverCtx.isResolveModulePerSourceSet()) { for (DataNode<GradleSourceSetData> sourceSetNode : findAll(moduleNode, GradleSourceSetData.KEY)) { mergeModuleContentRoots(weightMap, externalProject, sourceSetNode); } } else { mergeModuleContentRoots(weightMap, externalProject, moduleNode); } } } private static void mergeModuleContentRoots(@NotNull Map<String, Counter> weightMap, @NotNull ExternalProject externalProject, @NotNull DataNode<? extends ModuleData> moduleNode) { final File buildDir = externalProject.getBuildDir(); final MultiMap<String, ContentRootData> sourceSetRoots = MultiMap.create(); Collection<DataNode<ContentRootData>> contentRootNodes = findAll(moduleNode, ProjectKeys.CONTENT_ROOT); if (contentRootNodes.size() <= 1) return; for (DataNode<ContentRootData> contentRootNode : contentRootNodes) { File root = new File(contentRootNode.getData().getRootPath()); if (FileUtil.isAncestor(buildDir, root, true)) continue; while (weightMap.containsKey(root.getParent()) && weightMap.get(root.getParent()).count <= 1) { root = root.getParentFile(); } ContentRootData mergedContentRoot = null; String rootPath = toCanonicalPath(root.getPath()); Set<String> paths = new HashSet<>(sourceSetRoots.keySet()); for (String path : paths) { if (FileUtil.isAncestor(rootPath, path, true)) { Collection<ContentRootData> values = sourceSetRoots.remove(path); if (values != null) { sourceSetRoots.putValues(rootPath, values); } } else if (FileUtil.isAncestor(path, rootPath, false)) { Collection<ContentRootData> contentRoots = sourceSetRoots.get(path); for (ContentRootData rootData : contentRoots) { if (StringUtil.equals(rootData.getRootPath(), path)) { mergedContentRoot = rootData; break; } } if (mergedContentRoot == null) { mergedContentRoot = contentRoots.iterator().next(); } break; } if (sourceSetRoots.size() == 1) break; } if (mergedContentRoot == null) { mergedContentRoot = new ContentRootData(GradleConstants.SYSTEM_ID, root.getPath()); sourceSetRoots.putValue(mergedContentRoot.getRootPath(), mergedContentRoot); } for (ExternalSystemSourceType sourceType : ExternalSystemSourceType.values()) { for (ContentRootData.SourceRoot sourceRoot : contentRootNode.getData().getPaths(sourceType)) { mergedContentRoot.storePath(sourceType, sourceRoot.getPath(), sourceRoot.getPackagePrefix()); } } contentRootNode.clear(true); } for (Map.Entry<String, Collection<ContentRootData>> entry : sourceSetRoots.entrySet()) { final String rootPath = entry.getKey(); final ContentRootData ideContentRoot = new ContentRootData(GradleConstants.SYSTEM_ID, rootPath); for (ContentRootData rootData : entry.getValue()) { for (ExternalSystemSourceType sourceType : ExternalSystemSourceType.values()) { Collection<ContentRootData.SourceRoot> roots = rootData.getPaths(sourceType); for (ContentRootData.SourceRoot sourceRoot : roots) { ideContentRoot.storePath(sourceType, sourceRoot.getPath(), sourceRoot.getPackagePrefix()); } } } moduleNode.createChild(ProjectKeys.CONTENT_ROOT, ideContentRoot); } } private final class ProjectConnectionDataNodeFunction implements Function<ProjectConnection, DataNode<ProjectData>> { @NotNull private final GradleProjectResolverExtension myProjectResolverChain; private final boolean myIsBuildSrcProject; private final DefaultProjectResolverContext myResolverContext; private ProjectConnectionDataNodeFunction(@NotNull DefaultProjectResolverContext resolverContext, @NotNull GradleProjectResolverExtension projectResolverChain, boolean isBuildSrcProject) { myResolverContext = resolverContext; myProjectResolverChain = projectResolverChain; myIsBuildSrcProject = isBuildSrcProject; } @Override public DataNode<ProjectData> fun(ProjectConnection connection) { try { myCancellationMap.putValue(myResolverContext.getExternalSystemTaskId(), myResolverContext.getCancellationTokenSource()); myResolverContext.setConnection(connection); return doResolveProjectInfo(myResolverContext, myProjectResolverChain, myIsBuildSrcProject); } catch (ProcessCanceledException e) { throw e; } catch (RuntimeException e) { LOG.info("Gradle project resolve error", e); ExternalSystemException esException = ExceptionUtil.findCause(e, ExternalSystemException.class); if (esException != null && esException != e) { LOG.info("\nCaused by: " + esException.getOriginalReason()); } throw myProjectResolverChain.getUserFriendlyError( myResolverContext.getBuildEnvironment(), e, myResolverContext.getProjectPath(), null); } finally { myCancellationMap.remove(myResolverContext.getExternalSystemTaskId(), myResolverContext.getCancellationTokenSource()); } } } @ApiStatus.Experimental // chaining of resolver extensions complicates things and can be replaced in future public static GradleProjectResolverExtension createProjectResolverChain() { return createProjectResolverChain(null, null); } @NotNull private static GradleProjectResolverExtension createProjectResolverChain(@Nullable DefaultProjectResolverContext resolverContext, @Nullable Predicate<? super GradleProjectResolverExtension> extensionsFilter) { Stream<GradleProjectResolverExtension> extensions = GradleProjectResolverUtil.createProjectResolvers(resolverContext); if (extensionsFilter != null) { extensions = extensions.filter(extensionsFilter.or(BaseResolverExtension.class::isInstance)); } Deque<GradleProjectResolverExtension> deque = new ArrayDeque<>(); extensions.forEachOrdered(extension -> { final GradleProjectResolverExtension previous = deque.peekLast(); if (previous != null) { previous.setNext(extension); if (previous.getNext() != extension) { throw new AssertionError("Illegal next resolver got, current resolver class is " + previous.getClass().getName()); } } deque.add(extension); }); GradleProjectResolverExtension firstResolver = deque.peekFirst(); GradleProjectResolverExtension resolverExtension = firstResolver; assert resolverExtension != null; while (resolverExtension.getNext() != null) { resolverExtension = resolverExtension.getNext(); } if (!(resolverExtension instanceof BaseResolverExtension)) { throw new AssertionError("Illegal last resolver got of class " + resolverExtension.getClass().getName()); } GradleProjectResolverExtension chainWrapper = new AbstractProjectResolverExtension() { @NotNull @Override public ExternalSystemException getUserFriendlyError(@Nullable BuildEnvironment buildEnvironment, @NotNull Throwable error, @NotNull String projectPath, @Nullable String buildFilePath) { ExternalSystemException friendlyError = super.getUserFriendlyError(buildEnvironment, error, projectPath, buildFilePath); return new BaseProjectImportErrorHandler() .checkErrorsWithoutQuickFixes(buildEnvironment, error, projectPath, buildFilePath, friendlyError); } }; chainWrapper.setNext(firstResolver); return chainWrapper; } }
/* * Copyright 2014-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.provider.of.flow.impl; import com.google.common.collect.Lists; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip6Address; import org.onosproject.net.OchSignal; import org.onosproject.net.OduSignalId; import org.onosproject.net.PortNumber; import org.onosproject.net.driver.DefaultDriverData; import org.onosproject.net.driver.DefaultDriverHandler; import org.onosproject.net.driver.Driver; import org.onosproject.net.driver.DriverService; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flow.instructions.ExtensionTreatment; import org.onosproject.net.flow.instructions.Instruction; import org.onosproject.net.flow.instructions.Instructions; import org.onosproject.net.flow.instructions.Instructions.GroupInstruction; import org.onosproject.net.flow.instructions.Instructions.OutputInstruction; import org.onosproject.net.flow.instructions.Instructions.SetQueueInstruction; import org.onosproject.net.flow.instructions.L0ModificationInstruction; import org.onosproject.net.flow.instructions.L0ModificationInstruction.ModOchSignalInstruction; import org.onosproject.net.flow.instructions.L1ModificationInstruction; import org.onosproject.net.flow.instructions.L1ModificationInstruction.ModOduSignalIdInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModEtherInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModMplsBosInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModMplsHeaderInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModMplsLabelInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModTunnelIdInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModVlanHeaderInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModVlanIdInstruction; import org.onosproject.net.flow.instructions.L2ModificationInstruction.ModVlanPcpInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction.ModArpEthInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction.ModArpIPInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction.ModArpOpInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction.ModIPInstruction; import org.onosproject.net.flow.instructions.L3ModificationInstruction.ModIPv6FlowLabelInstruction; import org.onosproject.net.flow.instructions.L4ModificationInstruction; import org.onosproject.net.flow.instructions.L4ModificationInstruction.ModTransportPortInstruction; import org.onosproject.openflow.controller.ExtensionTreatmentInterpreter; import org.onosproject.provider.of.flow.util.NoMappingFoundException; import org.onosproject.provider.of.flow.util.OpenFlowValueMapper; import org.projectfloodlight.openflow.protocol.OFFactory; import org.projectfloodlight.openflow.protocol.OFFlowAdd; import org.projectfloodlight.openflow.protocol.OFFlowDeleteStrict; import org.projectfloodlight.openflow.protocol.OFFlowMod; import org.projectfloodlight.openflow.protocol.OFFlowModFlags; import org.projectfloodlight.openflow.protocol.action.OFAction; import org.projectfloodlight.openflow.protocol.action.OFActionGroup; import org.projectfloodlight.openflow.protocol.action.OFActionOutput; import org.projectfloodlight.openflow.protocol.action.OFActionSetQueue; import org.projectfloodlight.openflow.protocol.instruction.OFInstruction; import org.projectfloodlight.openflow.protocol.match.Match; import org.projectfloodlight.openflow.protocol.oxm.OFOxm; import org.projectfloodlight.openflow.types.ArpOpcode; import org.projectfloodlight.openflow.types.CircuitSignalID; import org.projectfloodlight.openflow.types.EthType; import org.projectfloodlight.openflow.types.IPv4Address; import org.projectfloodlight.openflow.types.IPv6Address; import org.projectfloodlight.openflow.types.IPv6FlowLabel; import org.projectfloodlight.openflow.types.MacAddress; import org.projectfloodlight.openflow.types.OFBooleanValue; import org.projectfloodlight.openflow.types.OFBufferId; import org.projectfloodlight.openflow.types.OFGroup; import org.projectfloodlight.openflow.types.OFPort; import org.projectfloodlight.openflow.types.OFVlanVidMatch; import org.projectfloodlight.openflow.types.OduSignalID; import org.projectfloodlight.openflow.types.TableId; import org.projectfloodlight.openflow.types.TransportPort; import org.projectfloodlight.openflow.types.U32; import org.projectfloodlight.openflow.types.U64; import org.projectfloodlight.openflow.types.VlanPcp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Optional; /** * Flow mod builder for OpenFlow 1.3+. */ public class FlowModBuilderVer13 extends FlowModBuilder { private final Logger log = LoggerFactory.getLogger(getClass()); private static final int OFPCML_NO_BUFFER = 0xffff; private final TrafficTreatment treatment; /** * Constructor for a flow mod builder for OpenFlow 1.3. * * @param flowRule the flow rule to transform into a flow mod * @param factory the OpenFlow factory to use to build the flow mod * @param xid the transaction ID * @param driverService the device driver service */ protected FlowModBuilderVer13(FlowRule flowRule, OFFactory factory, Optional<Long> xid, Optional<DriverService> driverService) { super(flowRule, factory, xid, driverService); this.treatment = flowRule.treatment(); } @Override public OFFlowMod buildFlowAdd() { Match match = buildMatch(); List<OFAction> deferredActions = buildActions(treatment.deferred()); List<OFAction> immediateActions = buildActions(treatment.immediate()); List<OFInstruction> instructions = Lists.newLinkedList(); if (treatment.clearedDeferred()) { instructions.add(factory().instructions().clearActions()); } if (!immediateActions.isEmpty()) { instructions.add(factory().instructions().applyActions(immediateActions)); } if (!deferredActions.isEmpty()) { instructions.add(factory().instructions().writeActions(deferredActions)); } if (treatment.tableTransition() != null) { instructions.add(buildTableGoto(treatment.tableTransition())); } if (treatment.writeMetadata() != null) { instructions.add(buildMetadata(treatment.writeMetadata())); } if (treatment.metered() != null) { instructions.add(buildMeter(treatment.metered())); } long cookie = flowRule().id().value(); OFFlowAdd fm = factory().buildFlowAdd() .setXid(xid) .setCookie(U64.of(cookie)) .setBufferId(OFBufferId.NO_BUFFER) .setInstructions(instructions) .setMatch(match) .setFlags(Collections.singleton(OFFlowModFlags.SEND_FLOW_REM)) .setPriority(flowRule().priority()) .setTableId(TableId.of(flowRule().tableId())) .setHardTimeout(flowRule().hardTimeout()) .build(); return fm; } @Override public OFFlowMod buildFlowMod() { Match match = buildMatch(); List<OFAction> deferredActions = buildActions(treatment.deferred()); List<OFAction> immediateActions = buildActions(treatment.immediate()); List<OFInstruction> instructions = Lists.newLinkedList(); if (!immediateActions.isEmpty()) { instructions.add(factory().instructions().applyActions(immediateActions)); } if (treatment.clearedDeferred()) { instructions.add(factory().instructions().clearActions()); } if (!deferredActions.isEmpty()) { instructions.add(factory().instructions().writeActions(deferredActions)); } if (treatment.tableTransition() != null) { instructions.add(buildTableGoto(treatment.tableTransition())); } if (treatment.writeMetadata() != null) { instructions.add(buildMetadata(treatment.writeMetadata())); } if (treatment.metered() != null) { instructions.add(buildMeter(treatment.metered())); } long cookie = flowRule().id().value(); OFFlowMod fm = factory().buildFlowModify() .setXid(xid) .setCookie(U64.of(cookie)) .setBufferId(OFBufferId.NO_BUFFER) .setInstructions(instructions) .setMatch(match) .setFlags(Collections.singleton(OFFlowModFlags.SEND_FLOW_REM)) .setPriority(flowRule().priority()) .setTableId(TableId.of(flowRule().tableId())) .setHardTimeout(flowRule().hardTimeout()) .build(); return fm; } @Override public OFFlowMod buildFlowDel() { Match match = buildMatch(); long cookie = flowRule().id().value(); OFFlowDeleteStrict fm = factory().buildFlowDeleteStrict() .setXid(xid) .setCookie(U64.of(cookie)) .setBufferId(OFBufferId.NO_BUFFER) .setMatch(match) .setFlags(Collections.singleton(OFFlowModFlags.SEND_FLOW_REM)) .setPriority(flowRule().priority()) .setTableId(TableId.of(flowRule().tableId())) .setHardTimeout(flowRule().hardTimeout()) .build(); return fm; } private List<OFAction> buildActions(List<Instruction> treatments) { if (treatment == null) { return Collections.emptyList(); } boolean tableFound = false; List<OFAction> actions = new LinkedList<>(); for (Instruction i : treatments) { switch (i.type()) { case NOACTION: return Collections.emptyList(); case L0MODIFICATION: actions.add(buildL0Modification(i)); break; case L1MODIFICATION: actions.add(buildL1Modification(i)); break; case L2MODIFICATION: actions.add(buildL2Modification(i)); break; case L3MODIFICATION: actions.add(buildL3Modification(i)); break; case L4MODIFICATION: actions.add(buildL4Modification(i)); break; case OUTPUT: OutputInstruction out = (OutputInstruction) i; OFActionOutput.Builder action = factory().actions().buildOutput() .setPort(OFPort.of((int) out.port().toLong())); if (out.port().equals(PortNumber.CONTROLLER)) { action.setMaxLen(OFPCML_NO_BUFFER); } actions.add(action.build()); break; case GROUP: GroupInstruction group = (GroupInstruction) i; OFActionGroup.Builder groupBuilder = factory().actions().buildGroup() .setGroup(OFGroup.of(group.groupId().id())); actions.add(groupBuilder.build()); break; case QUEUE: SetQueueInstruction queue = (SetQueueInstruction) i; OFActionSetQueue.Builder queueBuilder = factory().actions().buildSetQueue() .setQueueId(queue.queueId()); actions.add(queueBuilder.build()); break; case TABLE: //FIXME: should not occur here. tableFound = true; break; case EXTENSION: actions.add(buildExtensionAction(((Instructions.ExtensionInstructionWrapper) i) .extensionInstruction())); break; default: log.warn("Instruction type {} not yet implemented.", i.type()); } } if (tableFound && actions.isEmpty()) { // handles the case where there are no actions, but there is // a goto instruction for the next table return Collections.emptyList(); } return actions; } private OFInstruction buildTableGoto(Instructions.TableTypeTransition i) { OFInstruction instruction = factory().instructions().gotoTable( TableId.of(i.tableId())); return instruction; } private OFInstruction buildMetadata(Instructions.MetadataInstruction m) { OFInstruction instruction = factory().instructions().writeMetadata( U64.of(m.metadata()), U64.of(m.metadataMask())); return instruction; } private OFInstruction buildMeter(Instructions.MeterInstruction metered) { return factory().instructions().meter(metered.meterId().id()); } private OFAction buildL0Modification(Instruction i) { L0ModificationInstruction l0m = (L0ModificationInstruction) i; OFOxm<?> oxm = null; switch (l0m.subtype()) { case OCH: try { ModOchSignalInstruction modOchSignalInstruction = (ModOchSignalInstruction) l0m; OchSignal signal = modOchSignalInstruction.lambda(); byte gridType = OpenFlowValueMapper.lookupGridType(signal.gridType()); byte channelSpacing = OpenFlowValueMapper.lookupChannelSpacing(signal.channelSpacing()); oxm = factory().oxms().expOchSigId( new CircuitSignalID(gridType, channelSpacing, (short) signal.spacingMultiplier(), (short) signal.slotGranularity())); } catch (NoMappingFoundException e) { log.warn(e.getMessage()); break; } break; default: log.warn("Unimplemented action type {}.", l0m.subtype()); break; } if (oxm != null) { return factory().actions().buildSetField().setField(oxm).build(); } return null; } private OFAction buildModOchSignalInstruction(ModOchSignalInstruction instruction) { OchSignal signal = instruction.lambda(); byte gridType = OpenFlowValueMapper.lookupGridType(signal.gridType()); byte channelSpacing = OpenFlowValueMapper.lookupChannelSpacing(signal.channelSpacing()); return factory().actions().circuit(factory().oxms().expOchSigId( new CircuitSignalID(gridType, channelSpacing, (short) signal.spacingMultiplier(), (short) signal.slotGranularity()) )); } private OFAction buildL1Modification(Instruction i) { L1ModificationInstruction l1m = (L1ModificationInstruction) i; OFOxm<?> oxm = null; switch (l1m.subtype()) { case ODU_SIGID: ModOduSignalIdInstruction modOduSignalIdInstruction = (ModOduSignalIdInstruction) l1m; OduSignalId oduSignalId = modOduSignalIdInstruction.oduSignalId(); OduSignalID oduSignalID = new OduSignalID((short) oduSignalId.tributaryPortNumber(), (short) oduSignalId.tributarySlotLength(), oduSignalId.tributarySlotBitmap()); oxm = factory().oxms().expOduSigId(oduSignalID); break; default: log.warn("Unimplemented action type {}.", l1m.subtype()); break; } if (oxm != null) { return factory().actions().buildSetField().setField(oxm).build(); } return null; } private OFAction buildL2Modification(Instruction i) { L2ModificationInstruction l2m = (L2ModificationInstruction) i; ModEtherInstruction eth; OFOxm<?> oxm = null; switch (l2m.subtype()) { case ETH_DST: eth = (ModEtherInstruction) l2m; oxm = factory().oxms().ethDst(MacAddress.of(eth.mac().toLong())); break; case ETH_SRC: eth = (ModEtherInstruction) l2m; oxm = factory().oxms().ethSrc(MacAddress.of(eth.mac().toLong())); break; case VLAN_ID: ModVlanIdInstruction vlanId = (ModVlanIdInstruction) l2m; oxm = factory().oxms().vlanVid(OFVlanVidMatch.ofVlan(vlanId.vlanId().toShort())); break; case VLAN_PCP: ModVlanPcpInstruction vlanPcp = (ModVlanPcpInstruction) l2m; oxm = factory().oxms().vlanPcp(VlanPcp.of(vlanPcp.vlanPcp())); break; case MPLS_PUSH: ModMplsHeaderInstruction pushHeaderInstructions = (ModMplsHeaderInstruction) l2m; return factory().actions().pushMpls(EthType.of(pushHeaderInstructions .ethernetType().toShort())); case MPLS_POP: ModMplsHeaderInstruction popHeaderInstructions = (ModMplsHeaderInstruction) l2m; return factory().actions().popMpls(EthType.of(popHeaderInstructions .ethernetType().toShort())); case MPLS_LABEL: ModMplsLabelInstruction mplsLabel = (ModMplsLabelInstruction) l2m; oxm = factory().oxms().mplsLabel(U32.of(mplsLabel.label().toInt())); break; case MPLS_BOS: ModMplsBosInstruction mplsBos = (ModMplsBosInstruction) l2m; oxm = factory().oxms() .mplsBos(mplsBos.mplsBos() ? OFBooleanValue.TRUE : OFBooleanValue.FALSE); break; case DEC_MPLS_TTL: return factory().actions().decMplsTtl(); case VLAN_POP: return factory().actions().popVlan(); case VLAN_PUSH: ModVlanHeaderInstruction pushVlanInstruction = (ModVlanHeaderInstruction) l2m; return factory().actions().pushVlan( EthType.of(pushVlanInstruction.ethernetType().toShort())); case TUNNEL_ID: ModTunnelIdInstruction tunnelId = (ModTunnelIdInstruction) l2m; oxm = factory().oxms().tunnelId(U64.of(tunnelId.tunnelId())); break; default: log.warn("Unimplemented action type {}.", l2m.subtype()); break; } if (oxm != null) { return factory().actions().buildSetField().setField(oxm).build(); } return null; } private OFAction buildL3Modification(Instruction i) { L3ModificationInstruction l3m = (L3ModificationInstruction) i; ModIPInstruction ip; Ip4Address ip4; Ip6Address ip6; OFOxm<?> oxm = null; switch (l3m.subtype()) { case IPV4_SRC: ip = (ModIPInstruction) i; ip4 = ip.ip().getIp4Address(); oxm = factory().oxms().ipv4Src(IPv4Address.of(ip4.toInt())); break; case IPV4_DST: ip = (ModIPInstruction) i; ip4 = ip.ip().getIp4Address(); oxm = factory().oxms().ipv4Dst(IPv4Address.of(ip4.toInt())); break; case IPV6_SRC: ip = (ModIPInstruction) i; ip6 = ip.ip().getIp6Address(); oxm = factory().oxms().ipv6Src(IPv6Address.of(ip6.toOctets())); break; case IPV6_DST: ip = (ModIPInstruction) i; ip6 = ip.ip().getIp6Address(); oxm = factory().oxms().ipv6Dst(IPv6Address.of(ip6.toOctets())); break; case IPV6_FLABEL: ModIPv6FlowLabelInstruction flowLabelInstruction = (ModIPv6FlowLabelInstruction) i; int flowLabel = flowLabelInstruction.flowLabel(); oxm = factory().oxms().ipv6Flabel(IPv6FlowLabel.of(flowLabel)); break; case ARP_SPA: ModArpIPInstruction aip = (ModArpIPInstruction) i; ip4 = aip.ip().getIp4Address(); oxm = factory().oxms().arpSpa(IPv4Address.of(ip4.toInt())); break; case ARP_SHA: ModArpEthInstruction ei = (ModArpEthInstruction) i; oxm = factory().oxms().arpSha(MacAddress.of(ei.mac().toLong())); break; case ARP_OP: ModArpOpInstruction oi = (ModArpOpInstruction) i; oxm = factory().oxms().arpOp(ArpOpcode.of((int) oi.op())); break; case DEC_TTL: return factory().actions().decNwTtl(); case TTL_IN: return factory().actions().copyTtlIn(); case TTL_OUT: return factory().actions().copyTtlOut(); default: log.warn("Unimplemented action type {}.", l3m.subtype()); break; } if (oxm != null) { return factory().actions().buildSetField().setField(oxm).build(); } return null; } private OFAction buildL4Modification(Instruction i) { L4ModificationInstruction l4m = (L4ModificationInstruction) i; ModTransportPortInstruction tp; OFOxm<?> oxm = null; switch (l4m.subtype()) { case TCP_SRC: tp = (ModTransportPortInstruction) l4m; oxm = factory().oxms().tcpSrc(TransportPort.of(tp.port().toInt())); break; case TCP_DST: tp = (ModTransportPortInstruction) l4m; oxm = factory().oxms().tcpDst(TransportPort.of(tp.port().toInt())); break; case UDP_SRC: tp = (ModTransportPortInstruction) l4m; oxm = factory().oxms().udpSrc(TransportPort.of(tp.port().toInt())); break; case UDP_DST: tp = (ModTransportPortInstruction) l4m; oxm = factory().oxms().udpDst(TransportPort.of(tp.port().toInt())); break; default: log.warn("Unimplemented action type {}.", l4m.subtype()); break; } if (oxm != null) { return factory().actions().buildSetField().setField(oxm).build(); } return null; } private OFAction buildExtensionAction(ExtensionTreatment i) { if (!driverService.isPresent()) { log.error("No driver service present"); return null; } Driver driver = driverService.get().getDriver(deviceId); if (driver.hasBehaviour(ExtensionTreatmentInterpreter.class)) { DefaultDriverHandler handler = new DefaultDriverHandler(new DefaultDriverData(driver, deviceId)); ExtensionTreatmentInterpreter interpreter = handler.behaviour(ExtensionTreatmentInterpreter.class); return interpreter.mapInstruction(factory(), i); } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.sql.validate; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.SqlWindow; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.util.SqlBasicVisitor; import org.apache.calcite.util.Litmus; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; import static org.apache.calcite.util.Static.RESOURCE; /** * Visitor which throws an exception if any component of the expression is not a * group expression. */ class AggChecker extends SqlBasicVisitor<Void> { //~ Instance fields -------------------------------------------------------- private final Deque<SqlValidatorScope> scopes = new ArrayDeque<>(); private final List<SqlNode> extraExprs; private final List<SqlNode> groupExprs; private boolean distinct; private SqlValidatorImpl validator; //~ Constructors ----------------------------------------------------------- /** * Creates an AggChecker. * * @param validator Validator * @param scope Scope * @param groupExprs Expressions in GROUP BY (or SELECT DISTINCT) clause, * that are therefore available * @param distinct Whether aggregation checking is because of a SELECT * DISTINCT clause */ AggChecker( SqlValidatorImpl validator, AggregatingScope scope, List<SqlNode> extraExprs, List<SqlNode> groupExprs, boolean distinct) { this.validator = validator; this.extraExprs = extraExprs; this.groupExprs = groupExprs; this.distinct = distinct; this.scopes.push(scope); } //~ Methods ---------------------------------------------------------------- boolean isGroupExpr(SqlNode expr) { for (SqlNode groupExpr : groupExprs) { if (groupExpr.equalsDeep(expr, Litmus.IGNORE)) { return true; } } for (SqlNode extraExpr : extraExprs) { if (extraExpr.equalsDeep(expr, Litmus.IGNORE)) { return true; } } return false; } public Void visit(SqlIdentifier id) { if (isGroupExpr(id) || id.isStar()) { // Star may validly occur in "SELECT COUNT(*) OVER w" return null; } // Is it a call to a parentheses-free function? final SqlCall call = validator.makeNullaryCall(id); if (call != null) { return call.accept(this); } // Didn't find the identifier in the group-by list as is, now find // it fully-qualified. // TODO: It would be better if we always compared fully-qualified // to fully-qualified. final SqlQualified fqId = scopes.peek().fullyQualify(id); if (isGroupExpr(fqId.identifier)) { return null; } SqlNode originalExpr = validator.getOriginal(id); final String exprString = originalExpr.toString(); throw validator.newValidationError(originalExpr, distinct ? RESOURCE.notSelectDistinctExpr(exprString) : RESOURCE.notGroupExpr(exprString)); } public Void visit(SqlCall call) { final SqlValidatorScope scope = scopes.peek(); if (call.getOperator().isAggregator()) { if (distinct) { if (scope instanceof AggregatingSelectScope) { SqlNodeList selectList = ((SqlSelect) scope.getNode()).getSelectList(); // Check if this aggregation function is just an element in the select for (SqlNode sqlNode : selectList) { if (sqlNode.getKind() == SqlKind.AS) { sqlNode = ((SqlCall) sqlNode).operand(0); } if (validator.expand(sqlNode, scope) .equalsDeep(call, Litmus.IGNORE)) { return null; } } } // Cannot use agg fun in ORDER BY clause if have SELECT DISTINCT. SqlNode originalExpr = validator.getOriginal(call); final String exprString = originalExpr.toString(); throw validator.newValidationError(call, RESOURCE.notSelectDistinctExpr(exprString)); } // For example, 'sum(sal)' in 'SELECT sum(sal) FROM emp GROUP // BY deptno' return null; } switch (call.getKind()) { case FILTER: case WITHIN_GROUP: case RESPECT_NULLS: case IGNORE_NULLS: call.operand(0).accept(this); return null; } // Visit the operand in window function if (call.getKind() == SqlKind.OVER) { for (SqlNode operand : call.<SqlCall>operand(0).getOperandList()) { operand.accept(this); } // Check the OVER clause final SqlNode over = call.operand(1); if (over instanceof SqlCall) { over.accept(this); } else if (over instanceof SqlIdentifier) { // Check the corresponding SqlWindow in WINDOW clause final SqlWindow window = scope.lookupWindow(((SqlIdentifier) over).getSimple()); window.getPartitionList().accept(this); window.getOrderList().accept(this); } } if (isGroupExpr(call)) { // This call matches an expression in the GROUP BY clause. return null; } final SqlCall groupCall = SqlStdOperatorTable.convertAuxiliaryToGroupCall(call); if (groupCall != null) { if (isGroupExpr(groupCall)) { // This call is an auxiliary function that matches a group call in the // GROUP BY clause. // // For example TUMBLE_START is an auxiliary of the TUMBLE // group function, and // TUMBLE_START(rowtime, INTERVAL '1' HOUR) // matches // TUMBLE(rowtime, INTERVAL '1' HOUR') return null; } throw validator.newValidationError(groupCall, RESOURCE.auxiliaryWithoutMatchingGroupCall( call.getOperator().getName(), groupCall.getOperator().getName())); } if (call.isA(SqlKind.QUERY)) { // Allow queries for now, even though they may contain // references to forbidden columns. return null; } // Switch to new scope. SqlValidatorScope newScope = scope.getOperandScope(call); scopes.push(newScope); // Visit the operands (only expressions). call.getOperator() .acceptCall(this, call, true, ArgHandlerImpl.instance()); // Restore scope. scopes.pop(); return null; } } // End AggChecker.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.api.request; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.Locale; import javax.servlet.ServletOutputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletResponse; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.commons.testing.sling.MockResource; import org.apache.sling.commons.testing.sling.MockSlingHttpServletRequest; import junit.framework.TestCase; public class RequestUtilTest extends TestCase { public void testHandleIfModifiedSince(){ assertTrue(RequestUtil.handleIfModifiedSince(getMockRequest(1309268989938L,1309269042730L),getMockResponse())); assertFalse(RequestUtil.handleIfModifiedSince(getMockRequest(1309269042730L,1309268989938L),getMockResponse())); assertFalse(RequestUtil.handleIfModifiedSince(getMockRequest(-1,1309268989938L),getMockResponse())); } protected SlingHttpServletRequest getMockRequest(final long modificationTime, final long ifModifiedSince) { final String resourcePath = "foo"; final MockSlingHttpServletRequest r = new MockSlingHttpServletRequest(resourcePath, null, null, null, null) { @Override public long getDateHeader(String name) { return ifModifiedSince; } }; final String path = "/foo/node"; final MockResource mr = new MockResource(null, path, null) {}; mr.getResourceMetadata().setModificationTime(modificationTime); r.setResource(mr); return r; } public void testParserAcceptHeader(){ assertEquals(RequestUtil.parserAcceptHeader("compress;q=0.5, gzip;q=1.0").get("compress"), 0.5); assertEquals(RequestUtil.parserAcceptHeader("compress,gzip").get("compress"),1.0); assertEquals(RequestUtil.parserAcceptHeader("compress").get("compress"),1.0); assertEquals(RequestUtil.parserAcceptHeader("compress;q=string,gzip;q=1.0").get("compress"), 1.0); assertNull(RequestUtil.parserAcceptHeader("compress;q=0.5, gzip;q=1.0").get("compres")); } protected HttpServletResponse getMockResponse() { return new HttpServletResponse() { @Override public void setLocale(Locale loc) {} @Override public void setContentType(String type) {} @Override public void setContentLength(int len) {} @Override public void setCharacterEncoding(String charset) {} @Override public void setBufferSize(int size) {} @Override public void resetBuffer() {} @Override public void reset() {} @Override public boolean isCommitted() { return false; } @Override public PrintWriter getWriter() throws IOException { return null; } @Override public ServletOutputStream getOutputStream() throws IOException { return null; } @Override public Locale getLocale() { return null; } @Override public String getContentType() { return null; } @Override public String getCharacterEncoding() { return null; } @Override public int getBufferSize() { return 0; } @Override public void flushBuffer() throws IOException {} @Override @SuppressWarnings("deprecation") public void setStatus(int sc, String sm) {} @Override public void setStatus(int sc) {} @Override public void setIntHeader(String name, int value) {} @Override public void setHeader(String name, String value) {} @Override public void setDateHeader(String name, long date) {} @Override public void sendRedirect(String location) throws IOException {} @Override public void sendError(int sc, String msg) throws IOException {} @Override public void sendError(int sc) throws IOException {} @Override @SuppressWarnings("deprecation") public String encodeUrl(String url) { return null; } @Override public String encodeURL(String url) { return null; } @Override @SuppressWarnings("deprecation") public String encodeRedirectUrl(String url) { return null; } @Override public String encodeRedirectURL(String url) { return null; } @Override public boolean containsHeader(String name) { return false; } @Override public void addIntHeader(String name, int value) {} @Override public void addHeader(String name, String value) {} @Override public void addDateHeader(String name, long date) {} @Override public void addCookie(Cookie cookie) {} @Override public void setContentLengthLong(long len) { // TODO Auto-generated method stub } @Override public int getStatus() { // TODO Auto-generated method stub return 0; } @Override public String getHeader(String name) { // TODO Auto-generated method stub return null; } @Override public Collection<String> getHeaders(String name) { // TODO Auto-generated method stub return null; } @Override public Collection<String> getHeaderNames() { // TODO Auto-generated method stub return null; } }; } }
/* Copyright (c) 2004-2007, Dennis M. Sosnoski All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of JiBX nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jibx.binding.model; import org.jibx.binding.classes.ClassFile; import org.jibx.binding.classes.ClassItem; import org.jibx.runtime.JiBXException; import org.jibx.util.IClass; import org.jibx.util.IClassItem; import org.jibx.util.IClassLocator; /** * Wrapper for class information. This wraps the BCEL-based class handling * implementation to support the interface defined for use with the binding * model. * * @author Dennis M. Sosnoski */ public class ClassWrapper implements IClass { private final IClassLocator m_locator; private final ClassFile m_class; private IClassItem[] m_fields; private IClassItem[] m_methods; /** * Constructor. * * @param loc * @param clas */ public ClassWrapper(IClassLocator loc, ClassFile clas) { m_locator = loc; m_class = clas; } /** * Build an item wrapper. This method may be overridden by subclasses to * return a specialized form of wrapper. * * @param item * @return wrapper */ protected IClassItem buildItem(ClassItem item) { return new ClassItemWrapper(this, item); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getName() */ public String getName() { return m_class.getName(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getSignature() */ public String getSignature() { return m_class.getSignature(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getPackage() */ public String getPackage() { return m_class.getPackage(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getSuperClass() */ public IClass getSuperClass() { ClassFile scf = m_class.getSuperFile(); if (scf == null) { return null; } else { return new ClassWrapper(m_locator, m_class.getSuperFile()); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getInterfaces() */ public String[] getInterfaces() { return m_class.getInterfaces(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getInstanceSigs() */ public String[] getInstanceSigs() { try { return m_class.getInstanceSigs(); } catch (JiBXException e) { // TODO need to handle this differently - perhaps get all when created throw new IllegalStateException("Internal error: instance " + "signatures not found for class " + m_class.getName()); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isImplements(java.lang.String) */ public boolean isImplements(String sig) { try { return m_class.isImplements(sig); } catch (JiBXException e) { // TODO need to handle this differently - perhaps get all when created throw new IllegalStateException("Internal error: instance " + "signatures not found for class " + m_class.getName()); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isAbstract() */ public boolean isAbstract() { return m_class.isAbstract(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isInterface() */ public boolean isInterface() { return m_class.isInterface(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isModifiable() */ public boolean isModifiable() { return m_class.isModifiable(); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isSuperclass(org.jibx.binding.model.IClass) */ public boolean isSuperclass(String name) { ClassFile current = m_class; while (current != null) { if (current.getName().equals(name)) { return true; } else { current = current.getSuperFile(); } } return false; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getDirectField(java.lang.String) */ public IClassItem getDirectField(String name) { ClassItem item = m_class.getDirectField(name); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getField(java.lang.String) */ public IClassItem getField(String name) { try { return buildItem(m_class.getField(name)); } catch (JiBXException e) { // TODO need to handle this differently - perhaps get all when created return null; } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getMethod(java.lang.String, java.lang.String) */ public IClassItem getMethod(String name, String sig) { ClassItem item = m_class.getMethod(name, sig); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getMethod(java.lang.String, java.lang.String[]) */ public IClassItem getMethod(String name, String[] sigs) { ClassItem item = m_class.getMethod(name, sigs); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getInitializerMethod(java.lang.String) */ public IClassItem getInitializerMethod(String sig) { ClassItem item = m_class.getInitializerMethod(sig); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getStaticMethod(java.lang.String, java.lang.String) */ public IClassItem getStaticMethod(String name, String sig) { ClassItem item = m_class.getStaticMethod(name, sig); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isAccessible(org.jibx.binding.model.IClassItem) */ public boolean isAccessible(IClassItem item) { return m_class.isAccessible(((ClassItemWrapper)item).getClassItem()); } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#isAssignable(org.jibx.binding.model.IClass) */ public boolean isAssignable(IClass other) { String[] sigs; try { sigs = m_class.getInstanceSigs(); } catch (JiBXException e) { throw new IllegalStateException ("Internal error: class information not available"); } String match = other.getSignature(); for (int i = 0; i < sigs.length; i++) { if (match.equals(sigs[i])) { return true; } } return false; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getBestMethod(java.lang.String, java.lang.String, java.lang.String[]) */ public IClassItem getBestMethod(String name, String type, String[] args) { ClassItem item = m_class.getBestMethod(name, type, args); if (item == null) { return null; } else { return buildItem(item); } } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getClassFile() * TODO: eliminate this method */ public ClassFile getClassFile() { return m_class; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#loadClass() */ public Class loadClass() { String name = m_class.getName(); Class clas = ClassFile.loadClass(name); if (clas == null) { // TODO: this is a kludge try { clas = ClassUtils.class.getClassLoader().loadClass(name); } catch (ClassNotFoundException ex) { /* deliberately empty */ } } return clas; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getFields() */ public IClassItem[] getFields() { if (m_fields == null) { ClassItem[] items = m_class.getFieldItems(); m_fields = new IClassItem[items.length]; for (int i = 0; i < items.length; i++) { m_fields[i] = buildItem(items[i]); } } return m_fields; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getMethods() */ public IClassItem[] getMethods() { if (m_methods == null) { ClassItem[] items = m_class.getMethodItems(); m_methods = new IClassItem[items.length]; for (int i = 0; i < items.length; i++) { m_methods[i] = buildItem(items[i]); } } return m_methods; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getJavaDoc() */ public String getJavaDoc() { return null; } /* (non-Javadoc) * @see org.jibx.binding.model.IClass#getLocator() */ public IClassLocator getLocator() { return m_locator; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.distributed.test; import java.io.IOException; import java.util.Arrays; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.apache.cassandra.distributed.Cluster; import org.apache.cassandra.distributed.api.ICluster; import org.apache.cassandra.distributed.api.IInstanceConfig; import org.apache.cassandra.distributed.api.IInvokableInstance; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.utils.concurrent.SimpleCondition; import org.apache.cassandra.utils.progress.ProgressEventType; import static java.util.concurrent.TimeUnit.MINUTES; import static org.apache.cassandra.distributed.api.Feature.GOSSIP; import static org.apache.cassandra.distributed.api.Feature.NETWORK; import static org.apache.cassandra.distributed.shared.AssertUtils.assertRows; import static org.apache.cassandra.distributed.test.ExecUtil.rethrow; public class RepairTest extends TestBaseImpl { private static ICluster<IInvokableInstance> cluster; private static void insert(ICluster<IInvokableInstance> cluster, String keyspace, int start, int end, int ... nodes) { String insert = String.format("INSERT INTO %s.test (k, c1, c2) VALUES (?, 'value1', 'value2');", keyspace); for (int i = start ; i < end ; ++i) for (int node : nodes) cluster.get(node).executeInternal(insert, Integer.toString(i)); } private static void verify(ICluster<IInvokableInstance> cluster, String keyspace, int start, int end, int ... nodes) { String query = String.format("SELECT k, c1, c2 FROM %s.test WHERE k = ?;", keyspace); for (int i = start ; i < end ; ++i) { for (int node = 1 ; node <= cluster.size() ; ++node) { Object[][] rows = cluster.get(node).executeInternal(query, Integer.toString(i)); if (Arrays.binarySearch(nodes, node) >= 0) assertRows(rows, new Object[] { Integer.toString(i), "value1", "value2" }); else assertRows(rows); } } } private static void flush(ICluster<IInvokableInstance> cluster, String keyspace, int ... nodes) { for (int node : nodes) cluster.get(node).runOnInstance(rethrow(() -> StorageService.instance.forceKeyspaceFlush(keyspace))); } private static ICluster create(Consumer<IInstanceConfig> configModifier) throws IOException { configModifier = configModifier.andThen( config -> config.set("hinted_handoff_enabled", false) .set("commitlog_sync_batch_window_in_ms", 5) .with(NETWORK) .with(GOSSIP) ); return init(Cluster.build().withNodes(3).withConfig(configModifier).start()); } static void repair(ICluster<IInvokableInstance> cluster, String keyspace, Map<String, String> options) { cluster.get(1).runOnInstance(rethrow(() -> { SimpleCondition await = new SimpleCondition(); StorageService.instance.repair(keyspace, options, ImmutableList.of((tag, event) -> { if (event.getType() == ProgressEventType.COMPLETE) await.signalAll(); })).right.get(); await.await(1L, MINUTES); })); } static void populate(ICluster<IInvokableInstance> cluster, String keyspace, String compression) throws Exception { try { cluster.schemaChange(String.format("DROP TABLE IF EXISTS %s.test;", keyspace)); cluster.schemaChange(String.format("CREATE TABLE %s.test (k text, c1 text, c2 text, PRIMARY KEY (k)) WITH compression = %s", keyspace, compression)); insert(cluster, keyspace, 0, 1000, 1, 2, 3); flush(cluster, keyspace, 1); insert(cluster, keyspace, 1000, 1001, 1, 2); insert(cluster, keyspace, 1001, 2001, 1, 2, 3); flush(cluster, keyspace, 1, 2, 3); verify(cluster, keyspace, 0, 1000, 1, 2, 3); verify(cluster, keyspace, 1000, 1001, 1, 2); verify(cluster, keyspace, 1001, 2001, 1, 2, 3); } catch (Throwable t) { cluster.close(); throw t; } } void repair(ICluster<IInvokableInstance> cluster, boolean sequential, String compression) throws Exception { populate(cluster, KEYSPACE, compression); repair(cluster, KEYSPACE, ImmutableMap.of("parallelism", sequential ? "sequential" : "parallel")); verify(cluster, KEYSPACE, 0, 2001, 1, 2, 3); } void shutDownNodesAndForceRepair(ICluster<IInvokableInstance> cluster, String keyspace, int downNode) throws Exception { populate(cluster, keyspace, "{'enabled': false}"); cluster.get(downNode).shutdown().get(5, TimeUnit.SECONDS); repair(cluster, keyspace, ImmutableMap.of("forceRepair", "true")); } @BeforeClass public static void setupCluster() throws IOException { cluster = create(config -> {}); } @AfterClass public static void closeCluster() throws Exception { if (cluster != null) cluster.close(); } @Test public void testSequentialRepairWithDefaultCompression() throws Exception { repair(cluster, true, "{'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}"); } @Test public void testParallelRepairWithDefaultCompression() throws Exception { repair(cluster, false, "{'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}"); } @Test public void testSequentialRepairWithMinCompressRatio() throws Exception { repair(cluster, true, "{'class': 'org.apache.cassandra.io.compress.LZ4Compressor', 'min_compress_ratio': 4.0}"); } @Test public void testParallelRepairWithMinCompressRatio() throws Exception { repair(cluster, false, "{'class': 'org.apache.cassandra.io.compress.LZ4Compressor', 'min_compress_ratio': 4.0}"); } @Test public void testSequentialRepairWithoutCompression() throws Exception { repair(cluster, true, "{'enabled': false}"); } @Test public void testParallelRepairWithoutCompression() throws Exception { repair(cluster, false, "{'enabled': false}"); } @Test public void testForcedNormalRepairWithOneNodeDown() throws Exception { // The test uses its own keyspace with rf == 2 String forceRepairKeyspace = "test_force_repair_keyspace"; int rf = 2; cluster.schemaChange("CREATE KEYSPACE " + forceRepairKeyspace + " WITH replication = {'class': 'SimpleStrategy', 'replication_factor': " + rf + "};"); try { shutDownNodesAndForceRepair(cluster, forceRepairKeyspace, 3); // shutdown node 3 after inserting DistributedRepairUtils.assertParentRepairSuccess(cluster, 1, forceRepairKeyspace, "test", row -> { Set<String> successfulRanges = row.getSet("successful_ranges"); Set<String> requestedRanges = row.getSet("requested_ranges"); Assert.assertNotNull("Found no successful ranges", successfulRanges); Assert.assertNotNull("Found no requested ranges", requestedRanges); Assert.assertEquals("Requested ranges count should equals to replication factor", rf, requestedRanges.size()); Assert.assertTrue("Given clusterSize = 3, RF = 2 and 1 node down in the replica set, it should yield only 1 successful repaired range.", successfulRanges.size() == 1 && !successfulRanges.contains("")); // the successful ranges set should not only contain empty string }); } finally { // bring the node 3 back up if (cluster.get(3).isShutdown()) cluster.get(3).startup(cluster); } } }
package org.jolokia.jvmagent; /* * Copyright 2009-2014 Roland Huss * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.*; import java.lang.reflect.Field; import java.net.*; import java.security.*; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.net.ssl.*; import com.sun.net.httpserver.HttpServer; import org.jolokia.Version; import org.jolokia.jvmagent.security.KeyStoreUtil; import org.jolokia.test.util.EnvTestUtil; import org.jolokia.util.Base64Util; import org.testng.annotations.Test; import static org.testng.Assert.*; /** * @author roland * @author nevenr * @since 31.08.11 */ public class JolokiaServerTest { @Test public void http() throws Exception { String configs[] = { null, "executor=fixed,threadNr=5", "executor=cached", "executor=single", "executor=fixed,threadNr=5,threadNamePrefix=JolokiaServerTestExecutorFixed", "executor=cached,threadNamePrefix=JolokiaServerTestExecutorFixedCached", "executor=single,threadNamePrefix=JolokiaServerTestExecutorFixedSingle", "executor=fixed,threadNamePrefix=jolokia-,threadNr=5", }; for (String c : configs) { roundtrip(c, true); } } @Test(expectedExceptions = IOException.class,expectedExceptionsMessageRegExp = ".*401.*") public void httpWithAuthenticationRejected() throws Exception { Map config = new HashMap(); config.put("user", "roland"); config.put("password", "s!cr!t"); config.put("port", "0"); roundtrip(config, true); } @Test public void serverPicksThePort() throws Exception { roundtrip("host=localhost,port=0", true); } // SSL Checks ======================================================================================== /* Test Scenarios ============== - 1 no client auth: - 11 https only (no certs) - 12 with keystore - 13 with PEM server cert - 131 without CA validation - 132 with CA validation (positive) - 2 with client auth: - 21 self-signed client cert --> fail - 22 properly signed client cert --> ok - 23 with 'extended key usage check' - 231 with extended key usage == client --> ok - 232 with extended key usage == server --> fail - 233 with no extended key usage: - 2331 with 'extendedClientCheck' options == true --> fail - 2332 with 'extendedClientCheck' option == false --> ok - 24 with 'clientPrincipal' given - 241 matching clientPrincipal --> ok - 241 non-matching clientPrincipal --> fail - 25 no CA given to verify against --> fail - 26 with clientPrincipal and basic auth */ @Test public void t_11_https_only() throws Exception { httpsRoundtrip("agentId=test", false); } @Test public void t_12_with_keystore() throws Exception { httpsRoundtrip("keystore=" + getResourcePath("/keystore") + ",keystorePassword=jetty7", false); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = ".*without.*key.*") public void serverCertWithoutKey() throws Exception { httpsRoundtrip("serverCert=" + getCertPath("server/cert.pem"), false); } @Test public void t_131_pem_without_ca() throws Exception { httpsRoundtrip("serverCert=" + getCertPath("server/cert.pem") + "," + "serverKey=" + getCertPath("server/key.pem"), false); } @Test public void t_132_pem_with_ca() throws Exception { httpsRoundtrip(getFullCertSetup(), true); } @Test(expectedExceptions = IOException.class) public void t_21_self_signed_client_cert_fail() throws Exception { httpsRoundtrip("useSslClientAuthentication=true," + getFullCertSetup(), true, "client/self-signed-with-key-usage"); } @Test public void t_22_signed_client_cert() throws Exception { // default is no extended client check httpsRoundtrip("useSslClientAuthentication=true," + getFullCertSetup(), true, "client/without-key-usage"); } @Test public void t_231_with_extended_client_key_usage() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,extendedClientCheck=true," + getFullCertSetup(), true, "client/with-key-usage"); } @Test(expectedExceptions = IOException.class) public void t_232_with_wrong_extended_client_key_usage() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,extendedClientCheck=true," + getFullCertSetup(), true, "client/with-wrong-key-usage"); } @Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = ".*403.*") public void t_2331_without_extended_client_key_usage() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,extendedClientCheck=true," + getFullCertSetup(), true, "client/without-key-usage"); } @Test public void t_2332_without_extended_client_key_usage_allowed() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,extendedClientCheck=false," + getFullCertSetup(), true, "client/with-key-usage"); } @Test(expectedExceptions = IOException.class) public void t_2333_with_wrong_extended_client_key_usage_allowed() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,extendedClientCheck=false," + getFullCertSetup(), true, "client/with-wrong-key-usage"); } @Test public void t_241_with_client_principal() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,clientPrincipal=O\\=jolokia.org\\,CN\\=Client signed with client key usage," + getFullCertSetup(), true, "client/with-key-usage"); } @Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = ".*403.*") public void t_242_with_wrong_client_principal() throws Exception { httpsRoundtrip("useSslClientAuthentication=true,clientPrincipal=O=microsoft.com," + getFullCertSetup(), true, "client/with-key-usage"); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = ".*no CA.*") public void t_25_no_ca_given() throws Exception { httpsRoundtrip("useSslClientAuthentication=true," + "serverCert=" + getCertPath("server/cert.pem") + "," + "serverKey=" + getCertPath("server/key.pem"), true, "client/with-key-usage"); } @Test public void t_261_with_client_principal() throws Exception { httpsRoundtrip("authMode=basic,user=admin,password=password,useSslClientAuthentication=true,clientPrincipal=O\\=jolokia.org\\,CN\\=Client signed with client key usage," + getFullCertSetup(), true, "client/with-key-usage"); } @Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = ".*401.*") public void t_262_with_wrong_client_principal() throws Exception { httpsRoundtrip("authMode=basic,user=admin,password=password,useSslClientAuthentication=true,clientPrincipal=O=microsoft.com," + getFullCertSetup(), true, "client/with-key-usage"); } @Test public void t_263_with_basic_auth() throws Exception { httpsRoundtrip("authMode=basic,user=admin,password=password,useSslClientAuthentication=true,clientPrincipal=O=microsoft.com," + getFullCertSetup(), true, "client/with-key-usage", "admin:password"); } @Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = ".*401.*") public void t_264_with_wrong_basic_auth() throws Exception { httpsRoundtrip("authMode=basic,user=admin,password=password,useSslClientAuthentication=true,clientPrincipal=O=microsoft.com," + getFullCertSetup(), true, "client/with-key-usage", "admin:wrong"); } @Test(expectedExceptions = IOException.class) public void t_264_with_basic_auth_and_wrong_client_cert() throws Exception { httpsRoundtrip("authMode=basic,user=admin,password=password,useSslClientAuthentication=true,clientPrincipal=O=microsoft.com," + getFullCertSetup(), true, "client/self-signed-with-key-usage", "admin:wrong"); } // ================================================================================================== private String getFullCertSetup() { return "serverCert=" + getCertPath("server/cert.pem") + "," + "serverKey=" + getCertPath("server/key.pem") + "," + "caCert=" + getCertPath("ca/cert.pem"); } @Test public void sslWithAdditionalHttpsSettings() throws Exception { httpsRoundtrip("keystore=" + getResourcePath("/keystore") + ",keystorePassword=jetty7" + ",config=" + getResourcePath("/agent-test-additionalHttpsConf.properties"), false); } @Test public void sslWithSpecialHttpsSettings() throws Exception { JvmAgentConfig config = new JvmAgentConfig( prepareConfigString("host=localhost,port=" + EnvTestUtil.getFreePort() + ",protocol=https," + getFullCertSetup() + ",config=" + getResourcePath("/agent-test-specialHttpsSettings.properties"))); JolokiaServer server = new JolokiaServer(config, false); server.start(); // Skipping hostname verification because the cert doesn't have a SAN of localhost HostnameVerifier verifier = createHostnameVerifier(); HostnameVerifier oldVerifier = HttpsURLConnection.getDefaultHostnameVerifier(); SSLSocketFactory oldSslSocketFactory = HttpsURLConnection.getDefaultSSLSocketFactory(); List<String> cipherSuites = Arrays.asList(config.getSSLCipherSuites()); List<String> protocols = Arrays.asList(config.getSSLProtocols()); final String[] protocolCandidates; if ("IBM Corporation".equals(System.getProperty("java.vendor"))) { /* IBM's VM is technically capable to use SSL but due to POODLE it has been disabled by default for quite a while and throws an exception if an attempt is made to use it. Take note that this can lead to a bit of confusion as the cipher suites all are prefixed with SSL_ on J9 (compared to TLS_ on OpenJDK/Oracle). */ protocolCandidates = new String[]{"TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"}; } else { protocolCandidates = new String[]{"TLSv1.2"}; // Readd 1.3 when everywhere available: // protocolCandidates = new String[]{"TLSv1.2", "TLSv1.3"}; } for (String protocol : protocolCandidates) { // Make sure at least one connection for this protocol succeeds (if expected to) boolean connectionSucceeded = false; for (String cipherSuite : oldSslSocketFactory.getSupportedCipherSuites()) { if (!cipherSuites.contains(cipherSuite)) continue; try { TrustManager tms[] = getTrustManagers(true); SSLContext sc = SSLContext.getInstance(protocol); sc.init(new KeyManager[0], tms, new java.security.SecureRandom()); HttpsURLConnection.setDefaultHostnameVerifier(verifier); HttpsURLConnection.setDefaultSSLSocketFactory( new FakeSSLSocketFactory(sc.getSocketFactory(), new String[]{protocol}, new String[]{cipherSuite})); URL url = new URL(server.getUrl()); String resp = EnvTestUtil.readToString(url.openStream()); assertTrue( resp.matches(".*type.*version.*" + Version.getAgentVersion() + ".*")); if (!protocols.contains(protocol) || !cipherSuites.contains(cipherSuite)) { fail(String.format("Expected SSLHandshakeException with the %s protocol and %s cipher suite", protocol, cipherSuite)); } connectionSucceeded = true; } catch (javax.net.ssl.SSLHandshakeException e) { // We make sure at least one connection with this protocol succeeds if expected // down below } finally { HttpsURLConnection.setDefaultHostnameVerifier(oldVerifier); HttpsURLConnection.setDefaultSSLSocketFactory(oldSslSocketFactory); } } if (protocols.contains(protocol) && !connectionSucceeded) { fail("Expected at least one connection to succeed on " + protocol); } } server.stop(); } @Test(expectedExceptions = IllegalArgumentException.class,expectedExceptionsMessageRegExp = ".*password.*") public void invalidConfig() throws IOException, InterruptedException { JvmAgentConfig cfg = new JvmAgentConfig("user=roland,port=" + EnvTestUtil.getFreePort()); Thread.sleep(1000); new JolokiaServer(cfg, false); } @Test public void customHttpServer() throws IOException, NoSuchFieldException, IllegalAccessException { HttpServer httpServer = HttpServer.create(); JvmAgentConfig cfg = new JvmAgentConfig(""); JolokiaServer server = new JolokiaServer(httpServer, cfg, false); Field field = JolokiaServer.class.getDeclaredField("httpServer"); field.setAccessible(true); assertNull(field.get(server)); server.start(); server.stop(); } // ================================================================== private String getCertPath(String pCert) { return getResourcePath("/certs/" + pCert); } private String getResourcePath(String relativeResourcePath) { URL ksURL = this.getClass().getResource(relativeResourcePath); if (ksURL != null && "file".equalsIgnoreCase(ksURL.getProtocol())) { return URLDecoder.decode(ksURL.getPath()); } throw new IllegalStateException(ksURL + " is not a file URL"); } private void roundtrip(Map<String,String> pConfig, boolean pDoRequest) throws Exception { checkServer(new JvmAgentConfig(pConfig), pDoRequest); } private void roundtrip(String pConfig, boolean pDoRequest) throws Exception { JvmAgentConfig config = new JvmAgentConfig(prepareConfigString(pConfig)); checkServer(config, pDoRequest); } private void httpsRoundtrip(String pConfig, boolean pValidateCa) throws Exception { httpsRoundtrip(pConfig, pValidateCa, "client/with-key-usage"); } private void httpsRoundtrip(String pConfig, boolean pValidateCa, String clientCert) throws Exception { JvmAgentConfig config = new JvmAgentConfig( prepareConfigString("host=localhost,port=" + EnvTestUtil.getFreePort() + ",protocol=https," + pConfig)); checkServer(config, true, createHostnameVerifier(), pValidateCa, clientCert); } private void httpsRoundtrip(String pConfig, boolean pValidateCa, String clientCert, String pUserPassword) throws Exception { JvmAgentConfig config = new JvmAgentConfig( prepareConfigString("host=localhost,port=" + EnvTestUtil.getFreePort() + ",protocol=https," + pConfig)); checkServer(config, true, createHostnameVerifier(), pValidateCa, clientCert, pUserPassword); } private HostnameVerifier createHostnameVerifier() { return new HostnameVerifier() { @Override public boolean verify(String host, SSLSession sslSession) { return true; } }; } private String prepareConfigString(String pConfig) throws IOException { String c = pConfig != null ? pConfig + "," : ""; boolean portSpecified = c.contains("port="); c = c + "host=localhost,"; if (!portSpecified) { int port = EnvTestUtil.getFreePort(); c = c + "port=" + port; } return c; } private void checkServer(JvmAgentConfig pConfig, boolean pDoRequest) throws Exception { checkServer(pConfig, pDoRequest, null, false, null); } private TrustManager[] getTrustManagers(final boolean pValidateCa) throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { if (!pValidateCa) { return new TrustManager[] { getAllowAllTrustManager() }; } else { KeyStore keystore = KeyStore.getInstance("JKS"); keystore.load(null); KeyStoreUtil.updateWithCaPem(keystore, new File(getCertPath("ca/cert.pem"))); TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keystore); return tmf.getTrustManagers(); } } private TrustManager getAllowAllTrustManager() { return new X509TrustManager() { public X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted(X509Certificate[] certs, String authType) { System.out.println(certs); } public void checkServerTrusted(X509Certificate[] certs, String authType) { System.out.println(certs); } }; } private void checkServer(JvmAgentConfig pConfig, boolean pDoRequest, HostnameVerifier pVerifier, boolean pValidateCa, String pClientCert) throws Exception { checkServer(pConfig, pDoRequest, pVerifier, pValidateCa, pClientCert, null); } private void checkServer(JvmAgentConfig pConfig, boolean pDoRequest, HostnameVerifier pVerifier, boolean pValidateCa, String pClientCert, String pUserPassword) throws Exception { JolokiaServer server = new JolokiaServer(pConfig, false); server.start(); //Thread.sleep(2000); HostnameVerifier oldVerifier = HttpsURLConnection.getDefaultHostnameVerifier(); SSLSocketFactory oldSslSocketFactory = HttpsURLConnection.getDefaultSSLSocketFactory(); try { if (pDoRequest) { if (pVerifier != null) { HttpsURLConnection.setDefaultHostnameVerifier(pVerifier); } TrustManager tms[] = null; KeyManager kms[] = null; SSLContext sc = SSLContext.getInstance("SSL"); tms = getTrustManagers(pValidateCa); if (pClientCert != null) { KeyStore ks = KeyStore.getInstance("PKCS12"); InputStream fis = getClass().getResourceAsStream("/certs/" + pClientCert + "/cert.p12"); ks.load(fis, "1234".toCharArray()); KeyManagerFactory kmf; if ("IBM Corporation".equals(System.getProperty("java.vendor"))) { kmf = KeyManagerFactory.getInstance("IBMX509"); } else { kmf = KeyManagerFactory.getInstance("SunX509"); } kmf.init(ks, "1234".toCharArray()); kms = kmf.getKeyManagers() ; } sc.init(kms, tms, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } URL url = new URL(server.getUrl()); URLConnection uc = url.openConnection(); if (pUserPassword != null) { uc.setRequestProperty("Authorization", "Basic " + Base64Util.encode(pUserPassword.getBytes())); } uc.connect(); String resp = EnvTestUtil.readToString(uc.getInputStream()); assertTrue(resp.matches(".*type.*version.*" + Version.getAgentVersion() + ".*")); } finally { server.stop(); try { Thread.sleep(10); } catch (InterruptedException e) { } HttpsURLConnection.setDefaultHostnameVerifier(oldVerifier); HttpsURLConnection.setDefaultSSLSocketFactory(oldSslSocketFactory); } } // FakeSSLSocketFactory wraps a normal SSLSocketFactory so it can set the explicit SSL / TLS // protocol version(s) and cipher suite(s) private static class FakeSSLSocketFactory extends SSLSocketFactory { private String[] cipherSuites; private String[] protocols; private SSLSocketFactory socketFactory; public FakeSSLSocketFactory(SSLSocketFactory socketFactory, String[] protocols, String[] cipherSuites) { super(); this.socketFactory = socketFactory; this.protocols = protocols; this.cipherSuites = cipherSuites; } public Socket createSocket(InetAddress host, int port) throws IOException { return wrapSocket((SSLSocket)socketFactory.createSocket(host, port)); } public Socket createSocket(Socket s, String host, int port, boolean autoClose) throws IOException { return wrapSocket((SSLSocket)socketFactory.createSocket(s, host, port, autoClose)); } public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) throws IOException { return wrapSocket((SSLSocket)socketFactory.createSocket(address, port, localAddress, localPort)); } public Socket createSocket(String host, int port) throws IOException { return wrapSocket((SSLSocket)socketFactory.createSocket(host, port)); } public Socket createSocket(String host, int port, InetAddress localHost, int localPort) throws IOException { return wrapSocket((SSLSocket)socketFactory.createSocket(host, port, localHost, localPort)); } public String[] getDefaultCipherSuites() { return socketFactory.getDefaultCipherSuites(); } public String[] getSupportedCipherSuites() { return socketFactory.getSupportedCipherSuites(); } private Socket wrapSocket(SSLSocket sslSocket) { sslSocket.setEnabledProtocols(this.protocols); sslSocket.setEnabledCipherSuites(this.cipherSuites); return sslSocket; } } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: SecondaryTest.java,v 1.59 2010/01/04 15:51:07 cwl Exp $ */ package com.sleepycat.je.test; import java.util.Arrays; import java.util.List; import junit.framework.Test; import com.sleepycat.je.Cursor; import com.sleepycat.je.Database; import com.sleepycat.je.DatabaseConfig; import com.sleepycat.je.DatabaseEntry; import com.sleepycat.je.DatabaseException; import com.sleepycat.je.EnvironmentConfig; import com.sleepycat.je.LockConflictException; import com.sleepycat.je.LockMode; import com.sleepycat.je.OperationStatus; import com.sleepycat.je.SecondaryConfig; import com.sleepycat.je.SecondaryCursor; import com.sleepycat.je.SecondaryDatabase; import com.sleepycat.je.SecondaryKeyCreator; import com.sleepycat.je.Transaction; import com.sleepycat.je.UniqueConstraintException; import com.sleepycat.je.config.EnvironmentParams; import com.sleepycat.je.junit.JUnitThread; import com.sleepycat.je.util.TestUtils; public class SecondaryTest extends MultiKeyTxnTestCase { private static final int NUM_RECS = 5; private static final int KEY_OFFSET = 100; private JUnitThread junitThread; protected static EnvironmentConfig envConfig = TestUtils.initEnvConfig(); static { envConfig.setConfigParam(EnvironmentParams.ENV_CHECK_LEAKS.getName(), "false"); envConfig.setConfigParam(EnvironmentParams.NODE_MAX.getName(), "6"); envConfig.setTxnNoSync(Boolean.getBoolean(TestUtils.NO_SYNC)); envConfig.setLockTimeout(1); // to speed up intentional deadlocks envConfig.setAllowCreate(true); } static protected Class<?> testClass = SecondaryTest.class; public static Test suite() { return multiKeyTxnTestSuite(testClass, envConfig, null); } @Override public void tearDown() throws Exception { super.tearDown(); if (junitThread != null) { while (junitThread.isAlive()) { junitThread.interrupt(); Thread.yield(); } junitThread = null; } } public void testPutAndDelete() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); DatabaseEntry data = new DatabaseEntry(); DatabaseEntry key = new DatabaseEntry(); OperationStatus status; Transaction txn = txnBegin(); /* Database.put() */ status = priDb.put(txn, entry(1), entry(2)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(2), data); /* Database.putNoOverwrite() */ status = priDb.putNoOverwrite(txn, entry(1), entry(1)); assertSame(OperationStatus.KEYEXIST, status); status = secDb.get(txn, entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(2), data); /* Database.put() overwrite */ status = priDb.put(txn, entry(1), entry(3)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = secDb.get(txn, entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(3), data); /* Database.delete() */ status = priDb.delete(txn, entry(1)); assertSame(OperationStatus.SUCCESS, status); status = priDb.delete(txn, entry(1)); assertSame(OperationStatus.NOTFOUND, status); status = secDb.get(txn, entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* SecondaryDatabase.delete() */ status = priDb.put(txn, entry(1), entry(1)); assertSame(OperationStatus.SUCCESS, status); status = priDb.put(txn, entry(2), entry(1)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(101), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(1), data); status = secDb.delete(txn, entry(101)); assertSame(OperationStatus.SUCCESS, status); status = secDb.delete(txn, entry(101)); assertSame(OperationStatus.NOTFOUND, status); status = secDb.get(txn, entry(101), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = priDb.get(txn, entry(1), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = priDb.get(txn, entry(2), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* * Database.putNoDupData() cannot be called since the primary cannot be * configured for duplicates. */ /* Primary and secondary are empty now. */ /* Get a txn for a cursor. */ txnCommit(txn); txn = txnBeginCursor(); Cursor priCursor = null; SecondaryCursor secCursor = null; try { priCursor = priDb.openCursor(txn, null); secCursor = secDb.openSecondaryCursor(txn, null); /* Cursor.putNoOverwrite() */ status = priCursor.putNoOverwrite(entry(1), entry(2)); assertSame(OperationStatus.SUCCESS, status); status = secCursor.getSearchKey(entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(2), data); /* Cursor.putCurrent() */ status = priCursor.putCurrent(entry(3)); assertSame(OperationStatus.SUCCESS, status); status = secCursor.getSearchKey(entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = secCursor.getSearchKey(entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(3), data); /* Cursor.delete() */ status = priCursor.delete(); assertSame(OperationStatus.SUCCESS, status); status = priCursor.delete(); assertSame(OperationStatus.KEYEMPTY, status); status = secCursor.getSearchKey(entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = priCursor.getSearchKey(entry(1), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* Cursor.put() */ status = priCursor.put(entry(1), entry(4)); assertSame(OperationStatus.SUCCESS, status); status = secCursor.getSearchKey(entry(104), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(4), data); /* SecondaryCursor.delete() */ status = secCursor.delete(); assertSame(OperationStatus.SUCCESS, status); status = secCursor.delete(); assertSame(OperationStatus.KEYEMPTY, status); status = secCursor.getCurrent(new DatabaseEntry(), key, data, LockMode.DEFAULT); assertSame(OperationStatus.KEYEMPTY, status); status = secCursor.getSearchKey(entry(104), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = priCursor.getSearchKey(entry(1), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* * Cursor.putNoDupData() cannot be called since the primary cannot * be configured for duplicates. */ /* Primary and secondary are empty now. */ } finally { if (secCursor != null) { secCursor.close(); } if (priCursor != null) { priCursor.close(); } } txnCommit(txn); secDb.close(); priDb.close(); } public void testPartialDataPut() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); DatabaseEntry data = new DatabaseEntry(); DatabaseEntry key = new DatabaseEntry(); OperationStatus status; Transaction txn = txnBegin(); /* Database.put() */ status = priDb.putNoOverwrite(txn, entry(1), partialEntry(0, 1)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(101), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(1), data); status = priDb.put(txn, entry(1), partialEntry(1, 2)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(2), data); status = priDb.put(txn, entry(1), partialEntry(2, 3)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = secDb.get(txn, entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(3), data); /* Get a txn for a cursor. */ txnCommit(txn); txn = txnBeginCursor(); Cursor priCursor = null; SecondaryCursor secCursor = null; try { priCursor = priDb.openCursor(txn, null); secCursor = secDb.openSecondaryCursor(txn, null); /* Cursor.put() */ status = priCursor.put(entry(1), partialEntry(3, 2)); assertSame(OperationStatus.SUCCESS, status); status = secCursor.getSearchKey(entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(2), data); /* Cursor.putCurrent() */ status = priCursor.putCurrent(partialEntry(2, 3)); assertSame(OperationStatus.SUCCESS, status); status = secCursor.getSearchKey(entry(102), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); status = secCursor.getSearchKey(entry(103), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), key); assertDataEquals(entry(3), data); } finally { if (secCursor != null) { secCursor.close(); } if (priCursor != null) { priCursor.close(); } } txnCommit(txn); secDb.close(); priDb.close(); } public void testGet() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); DatabaseEntry data = new DatabaseEntry(); DatabaseEntry key = new DatabaseEntry(); DatabaseEntry secKey = new DatabaseEntry(); OperationStatus status; Transaction txn = txnBegin(); /* * For parameters that do not require initialization with a non-null * data array, we set them to null to make sure this works. [#12121] */ /* Add one record for each key with one data/duplicate. */ for (int i = 0; i < NUM_RECS; i += 1) { status = priDb.put(txn, entry(i), entry(i)); assertSame(OperationStatus.SUCCESS, status); } /* SecondaryDatabase.get() */ for (int i = 0; i < NUM_RECS; i += 1) { data.setData(null); status = secDb.get(txn, entry(i + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); } data.setData(null); status = secDb.get(txn, entry(NUM_RECS + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* SecondaryDatabase.getSearchBoth() */ for (int i = 0; i < NUM_RECS; i += 1) { data.setData(null); status = secDb.getSearchBoth(txn, entry(i + KEY_OFFSET), entry(i), data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), data); } data.setData(null); status = secDb.getSearchBoth(txn, entry(NUM_RECS + KEY_OFFSET), entry(NUM_RECS), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* Get a cursor txn. */ txnCommit(txn); txn = txnBeginCursor(); SecondaryCursor cursor = secDb.openSecondaryCursor(txn, null); try { /* SecondaryCursor.getFirst()/getNext() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getFirst(secKey, key, data, LockMode.DEFAULT); for (int i = 0; i < NUM_RECS; i += 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getNext(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getCurrent() (last) */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getCurrent(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(NUM_RECS - 1 + KEY_OFFSET), secKey); assertDataEquals(entry(NUM_RECS - 1), key); assertDataEquals(entry(NUM_RECS - 1), data); assertPriLocked(priDb, key); /* SecondaryCursor.getLast()/getPrev() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getLast(secKey, key, data, LockMode.DEFAULT); for (int i = NUM_RECS - 1; i >= 0; i -= 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getPrev(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getCurrent() (first) */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getCurrent(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(0 + KEY_OFFSET), secKey); assertDataEquals(entry(0), key); assertDataEquals(entry(0), data); assertPriLocked(priDb, key); /* SecondaryCursor.getSearchKey() */ key.setData(null); data.setData(null); status = cursor.getSearchKey(entry(KEY_OFFSET - 1), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); for (int i = 0; i < NUM_RECS; i += 1) { key.setData(null); data.setData(null); status = cursor.getSearchKey(entry(i + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key); } key.setData(null); data.setData(null); status = cursor.getSearchKey(entry(NUM_RECS + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getSearchBoth() */ data.setData(null); status = cursor.getSearchKey(entry(KEY_OFFSET - 1), entry(0), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); for (int i = 0; i < NUM_RECS; i += 1) { data.setData(null); status = cursor.getSearchBoth(entry(i + KEY_OFFSET), entry(i), data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), data); assertPriLocked(priDb, entry(i)); } data.setData(null); status = cursor.getSearchBoth(entry(NUM_RECS + KEY_OFFSET), entry(NUM_RECS), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getSearchKeyRange() */ key.setData(null); data.setData(null); status = cursor.getSearchKeyRange(entry(KEY_OFFSET - 1), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(0), key); assertDataEquals(entry(0), data); assertPriLocked(priDb, key); for (int i = 0; i < NUM_RECS; i += 1) { key.setData(null); data.setData(null); status = cursor.getSearchKeyRange(entry(i + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key); } key.setData(null); data.setData(null); status = cursor.getSearchKeyRange(entry(NUM_RECS + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getSearchBothRange() */ data.setData(null); status = cursor.getSearchBothRange(entry(1 + KEY_OFFSET), entry(1), data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(1), data); assertPriLocked(priDb, entry(1)); for (int i = 0; i < NUM_RECS; i += 1) { data.setData(null); status = cursor.getSearchBothRange(entry(i + KEY_OFFSET), entry(i), data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i), data); assertPriLocked(priDb, entry(i)); } data.setData(null); status = cursor.getSearchBothRange(entry(NUM_RECS + KEY_OFFSET), entry(NUM_RECS), data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); /* Add one duplicate for each key. */ Cursor priCursor = priDb.openCursor(txn, null); try { for (int i = 0; i < NUM_RECS; i += 1) { status = priCursor.put(entry(i + KEY_OFFSET), entry(i)); assertSame(OperationStatus.SUCCESS, status); } } finally { priCursor.close(); } /* SecondaryCursor.getNextDup() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getFirst(secKey, key, data, LockMode.DEFAULT); for (int i = 0; i < NUM_RECS; i += 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getNextDup(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i + KEY_OFFSET), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getNextDup(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getNext(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getNextNoDup() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getFirst(secKey, key, data, LockMode.DEFAULT); for (int i = 0; i < NUM_RECS; i += 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getNextNoDup(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getPrevDup() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getLast(secKey, key, data, LockMode.DEFAULT); for (int i = NUM_RECS - 1; i >= 0; i -= 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i + KEY_OFFSET), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getPrevDup(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getPrevDup(secKey, key, data, LockMode.DEFAULT); assertSame(OperationStatus.NOTFOUND, status); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getPrev(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); /* SecondaryCursor.getPrevNoDup() */ secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getLast(secKey, key, data, LockMode.DEFAULT); for (int i = NUM_RECS - 1; i >= 0; i -= 1) { assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(i + KEY_OFFSET), secKey); assertDataEquals(entry(i + KEY_OFFSET), key); assertDataEquals(entry(i), data); assertPriLocked(priDb, key, data); secKey.setData(null); key.setData(null); data.setData(null); status = cursor.getPrevNoDup(secKey, key, data, LockMode.DEFAULT); } assertSame(OperationStatus.NOTFOUND, status); } finally { cursor.close(); } txnCommit(txn); secDb.close(); priDb.close(); } public void testOpenAndClose() throws DatabaseException { Database priDb = openDatabase(false, "testDB", false); /* Open two secondaries as regular databases and as secondaries. */ Database secDbDetached = openDatabase(true, "testSecDB", false); SecondaryDatabase secDb = openSecondary(priDb, true, "testSecDB", false, false); Database secDb2Detached = openDatabase(true, "testSecDB2", false); SecondaryDatabase secDb2 = openSecondary(priDb, true, "testSecDB2", false, false); assertEquals(priDb.getSecondaryDatabases(), Arrays.asList(new SecondaryDatabase[] {secDb, secDb2})); Transaction txn = txnBegin(); /* Check that primary writes to both secondaries. */ checkSecondaryUpdate(txn, priDb, 1, secDbDetached, true, secDb2Detached, true); /* New txn before closing database. */ txnCommit(txn); txn = txnBegin(); /* Close 2nd secondary. */ secDb2.close(); assertEquals(priDb.getSecondaryDatabases(), Arrays.asList(new SecondaryDatabase[] {secDb })); /* Check that primary writes to 1st secondary only. */ checkSecondaryUpdate(txn, priDb, 2, secDbDetached, true, secDb2Detached, false); /* New txn before closing database. */ txnCommit(txn); txn = txnBegin(); /* Close 1st secondary. */ secDb.close(); assertEquals(0, priDb.getSecondaryDatabases().size()); /* Check that primary writes to no secondaries. */ checkSecondaryUpdate(txn, priDb, 3, secDbDetached, false, secDb2Detached, false); /* Open the two secondaries again. */ secDb = openSecondary(priDb, true, "testSecDB", false, false); secDb2 = openSecondary(priDb, true, "testSecDB2", false, false); assertEquals(priDb.getSecondaryDatabases(), Arrays.asList(new SecondaryDatabase[] {secDb, secDb2})); /* Check that primary writes to both secondaries. */ checkSecondaryUpdate(txn, priDb, 4, secDbDetached, true, secDb2Detached, true); /* Close the primary first to disassociate secondaries. */ txnCommit(txn); priDb.close(); assertNull(secDb.getPrimaryDatabase()); assertNull(secDb2.getPrimaryDatabase()); secDb2.close(); secDb.close(); secDb2Detached.close(); secDbDetached.close(); } /** * Check that primary put() writes to each secondary that is open. */ private void checkSecondaryUpdate(Transaction txn, Database priDb, int val, Database secDb, boolean expectSecDbVal, Database secDb2, boolean expectSecDb2Val) throws DatabaseException { OperationStatus status; DatabaseEntry data = new DatabaseEntry(); int secVal = KEY_OFFSET + val; status = priDb.put(txn, entry(val), entry(val)); assertSame(OperationStatus.SUCCESS, status); status = secDb.get(txn, entry(secVal), data, LockMode.DEFAULT); assertSame(expectSecDbVal ? OperationStatus.SUCCESS : OperationStatus.NOTFOUND, status); status = secDb2.get(txn, entry(secVal), data, LockMode.DEFAULT); assertSame(expectSecDb2Val ? OperationStatus.SUCCESS : OperationStatus.NOTFOUND, status); status = priDb.delete(txn, entry(val)); assertSame(OperationStatus.SUCCESS, status); } public void testReadOnly() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); OperationStatus status; Transaction txn = txnBegin(); for (int i = 0; i < NUM_RECS; i += 1) { status = priDb.put(txn, entry(i), entry(i)); assertSame(OperationStatus.SUCCESS, status); } /* * Secondaries can be opened without a key creator if the primary is * read only. openSecondary will specify a null key creator if the * readOnly param is false. */ Database readOnlyPriDb = openDatabase(false, "testDB", true); SecondaryDatabase readOnlySecDb = openSecondary(readOnlyPriDb, true, "testSecDB", false, true); assertNull(readOnlySecDb.getSecondaryConfig().getKeyCreator()); verifyRecords(txn, readOnlySecDb, NUM_RECS, true); txnCommit(txn); readOnlySecDb.close(); readOnlyPriDb.close(); secDb.close(); priDb.close(); } public void testPopulate() throws DatabaseException { Database priDb = openDatabase(false, "testDB", false); Transaction txn = txnBegin(); /* Test population of newly created secondary database. */ for (int i = 0; i < NUM_RECS; i += 1) { assertSame(OperationStatus.SUCCESS, priDb.put(txn, entry(i), entry(i))); } txnCommit(txn); SecondaryDatabase secDb = openSecondary(priDb, true, "testSecDB", true, false); txn = txnBegin(); verifyRecords(txn, secDb, NUM_RECS, true); txnCommit(txn); /* * Clear secondary and perform populate again, to test the case where * an existing database is opened, and therefore a write txn will only * be created in order to populate it */ Database secDbDetached = openDatabase(true, "testSecDB", false); secDb.close(); txn = txnBegin(); for (int i = 0; i < NUM_RECS; i += 1) { assertSame(OperationStatus.SUCCESS, secDbDetached.delete(txn, entry(i + KEY_OFFSET))); } verifyRecords(txn, secDbDetached, 0, true); txnCommit(txn); secDb = openSecondary(priDb, true, "testSecDB", true, false); txn = txnBegin(); verifyRecords(txn, secDb, NUM_RECS, true); verifyRecords(txn, secDbDetached, NUM_RECS, true); txnCommit(txn); secDbDetached.close(); secDb.close(); priDb.close(); } public void testTruncate() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); Transaction txn = txnBegin(); for (int i = 0; i < NUM_RECS; i += 1) { priDb.put(txn, entry(i), entry(i)); } verifyRecords(txn, priDb, NUM_RECS, false); verifyRecords(txn, secDb, NUM_RECS, true); txnCommit(txn); secDb.close(); priDb.close(); txn = txnBegin(); assertEquals(NUM_RECS, env.truncateDatabase(txn, "testDB", true)); assertEquals(NUM_RECS, env.truncateDatabase(txn, "testSecDB", true)); txnCommit(txn); secDb = initDb(); priDb = secDb.getPrimaryDatabase(); txn = txnBegin(); verifyRecords(txn, priDb, 0, false); verifyRecords(txn, secDb, 0, true); txnCommit(txn); secDb.close(); priDb.close(); } private void verifyRecords(Transaction txn, Database db, int numRecs, boolean isSecondary) throws DatabaseException { /* We're only reading, so txn may be null. */ Cursor cursor = db.openCursor(txn, null); try { DatabaseEntry data = new DatabaseEntry(); DatabaseEntry key = new DatabaseEntry(); OperationStatus status; int count = 0; status = cursor.getFirst(key, data, LockMode.DEFAULT); while (status == OperationStatus.SUCCESS) { assertDataEquals(entry(count), data); if (isSecondary) { assertDataEquals(entry(count + KEY_OFFSET), key); } else { assertDataEquals(entry(count), key); } count += 1; status = cursor.getNext(key, data, LockMode.DEFAULT); } assertEquals(numRecs, count); } finally { cursor.close(); } } public void testUniqueSecondaryKey() throws DatabaseException { Database priDb = openDatabase(false, "testDB", false); SecondaryDatabase secDb = openSecondary(priDb, false, "testSecDB", false, false); DatabaseEntry key; DatabaseEntry data; DatabaseEntry pkey = new DatabaseEntry(); Transaction txn; /* Put {0, 0} */ txn = txnBegin(); key = entry(0); data = entry(0); priDb.put(txn, key, data); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, entry(0 + KEY_OFFSET), pkey, data, null)); assertEquals(0, TestUtils.getTestVal(pkey.getData())); assertEquals(0, TestUtils.getTestVal(data.getData())); txnCommit(txn); /* Put {1, 1} */ txn = txnBegin(); key = entry(1); data = entry(1); priDb.put(txn, key, data); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, entry(1 + KEY_OFFSET), pkey, data, null)); txnCommit(txn); assertEquals(1, TestUtils.getTestVal(pkey.getData())); assertEquals(1, TestUtils.getTestVal(data.getData())); /* Put {2, 0} */ txn = txnBegin(); key = entry(2); data = entry(0); try { priDb.put(txn, key, data); /* Expect exception because secondary key must be unique. */ fail(); } catch (UniqueConstraintException e) { txnAbort(txn); /* Ensure that primary record was not inserted. */ assertEquals(OperationStatus.NOTFOUND, secDb.get(null, key, data, null)); /* Ensure that secondary record has not changed. */ assertEquals(OperationStatus.SUCCESS, secDb.get(null, entry(0 + KEY_OFFSET), pkey, data, null)); assertEquals(0, TestUtils.getTestVal(pkey.getData())); assertEquals(0, TestUtils.getTestVal(data.getData())); } /* Overwrite {1, 1} */ txn = txnBegin(); key = entry(1); data = entry(1); priDb.put(txn, key, data); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, entry(1 + KEY_OFFSET), pkey, data, null)); assertEquals(1, TestUtils.getTestVal(pkey.getData())); assertEquals(1, TestUtils.getTestVal(data.getData())); txnCommit(txn); /* Modify secondary key to {1, 3} */ txn = txnBegin(); key = entry(1); data = entry(3); priDb.put(txn, key, data); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, entry(3 + KEY_OFFSET), pkey, data, null)); assertEquals(1, TestUtils.getTestVal(pkey.getData())); assertEquals(3, TestUtils.getTestVal(data.getData())); txnCommit(txn); secDb.close(); priDb.close(); } /** */ public void testOperationsNotAllowed() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); Transaction txn = txnBegin(); /* Open secondary without a key creator. */ try { env.openSecondaryDatabase(txn, "xxx", priDb, null); fail(); } catch (IllegalArgumentException expected) { } try { env.openSecondaryDatabase(txn, "xxx", priDb, new SecondaryConfig()); fail(); } catch (IllegalArgumentException expected) { } /* Open secondary with both single and multi key creators. */ SecondaryConfig config = new SecondaryConfig(); config.setKeyCreator(new MyKeyCreator()); config.setMultiKeyCreator (new SimpleMultiKeyCreator(new MyKeyCreator())); try { env.openSecondaryDatabase(txn, "xxx", priDb, config); fail(); } catch (IllegalArgumentException expected) { } /* Database operations. */ DatabaseEntry key = entry(1); DatabaseEntry data = entry(2); try { secDb.getSearchBoth(txn, key, data, LockMode.DEFAULT); fail(); } catch (UnsupportedOperationException expected) { } try { secDb.put(txn, key, data); fail(); } catch (UnsupportedOperationException expected) { } try { secDb.putNoOverwrite(txn, key, data); fail(); } catch (UnsupportedOperationException expected) { } try { secDb.putNoDupData(txn, key, data); fail(); } catch (UnsupportedOperationException expected) { } try { secDb.join(new Cursor[0], null); fail(); } catch (UnsupportedOperationException expected) { } /* Cursor operations. */ txnCommit(txn); txn = txnBeginCursor(); SecondaryCursor cursor = null; try { cursor = secDb.openSecondaryCursor(txn, null); try { cursor.getSearchBoth(key, data, LockMode.DEFAULT); fail(); } catch (UnsupportedOperationException expected) { } try { cursor.getSearchBothRange(key, data, LockMode.DEFAULT); fail(); } catch (UnsupportedOperationException expected) { } try { cursor.putCurrent(data); fail(); } catch (UnsupportedOperationException expected) { } try { cursor.put(key, data); fail(); } catch (UnsupportedOperationException expected) { } try { cursor.putNoOverwrite(key, data); fail(); } catch (UnsupportedOperationException expected) { } try { cursor.putNoDupData(key, data); fail(); } catch (UnsupportedOperationException expected) { } } finally { if (cursor != null) { cursor.close(); } } txnCommit(txn); secDb.close(); priDb.close(); /* Primary with duplicates. */ priDb = openDatabase(true, "testDBWithDups", false); try { openSecondary(priDb, true, "testSecDB", false, false); fail(); } catch (IllegalArgumentException expected) {} priDb.close(); /* Single secondary with two primaries.*/ Database pri1 = openDatabase(false, "pri1", false); Database pri2 = openDatabase(false, "pri2", false); Database sec1 = openSecondary(pri1, false, "sec", false, false); try { openSecondary(pri2, false, "sec", false, false); fail(); } catch (IllegalArgumentException expected) {} sec1.close(); pri1.close(); pri2.close(); } /** * Test that null can be passed for the LockMode to all get methods. */ public void testNullLockMode() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); Transaction txn = txnBegin(); DatabaseEntry key = entry(0); DatabaseEntry data = entry(0); DatabaseEntry secKey = entry(KEY_OFFSET); DatabaseEntry found = new DatabaseEntry(); DatabaseEntry found2 = new DatabaseEntry(); DatabaseEntry found3 = new DatabaseEntry(); assertEquals(OperationStatus.SUCCESS, priDb.put(txn, key, data)); assertEquals(OperationStatus.SUCCESS, priDb.put(txn, entry(1), data)); assertEquals(OperationStatus.SUCCESS, priDb.put(txn, entry(2), entry(2))); /* Database operations. */ assertEquals(OperationStatus.SUCCESS, priDb.get(txn, key, found, null)); assertEquals(OperationStatus.SUCCESS, priDb.getSearchBoth(txn, key, data, null)); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, secKey, found, null)); assertEquals(OperationStatus.SUCCESS, secDb.get(txn, secKey, found, found2, null)); assertEquals(OperationStatus.SUCCESS, secDb.getSearchBoth(txn, secKey, key, found, null)); /* Cursor operations. */ txnCommit(txn); txn = txnBeginCursor(); Cursor cursor = priDb.openCursor(txn, null); SecondaryCursor secCursor = secDb.openSecondaryCursor(txn, null); assertEquals(OperationStatus.SUCCESS, cursor.getSearchKey(key, found, null)); assertEquals(OperationStatus.SUCCESS, cursor.getSearchBoth(key, data, null)); assertEquals(OperationStatus.SUCCESS, cursor.getSearchKeyRange(key, found, null)); assertEquals(OperationStatus.SUCCESS, cursor.getSearchBothRange(key, data, null)); assertEquals(OperationStatus.SUCCESS, cursor.getFirst(found, found2, null)); assertEquals(OperationStatus.SUCCESS, cursor.getNext(found, found2, null)); assertEquals(OperationStatus.SUCCESS, cursor.getPrev(found, found2, null)); assertEquals(OperationStatus.NOTFOUND, cursor.getNextDup(found, found2, null)); assertEquals(OperationStatus.NOTFOUND, cursor.getPrevDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, cursor.getNextNoDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, cursor.getPrevNoDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, cursor.getLast(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchKey(secKey, found, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchKeyRange(secKey, found, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getFirst(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNext(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrev(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNextDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrevDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNextNoDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrevNoDup(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getLast(found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchKey(secKey, found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchBoth(secKey, data, found, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchKeyRange(secKey, found, found2, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchBothRange(secKey, data, found, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getFirst(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNext(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrev(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNextDup(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrevDup(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getNextNoDup(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getPrevNoDup(found, found2, found3, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getLast(found, found2, found3, null)); secCursor.close(); cursor.close(); txnCommit(txn); secDb.close(); priDb.close(); closeEnv(); env = null; } /** * Test that an exception is thrown when a cursor is used in the wrong * state. No put or get is allowed in the closed state, and certain gets * and puts are not allowed in the uninitialized state. */ public void testCursorState() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); Transaction txn = txnBegin(); DatabaseEntry key = entry(0); DatabaseEntry data = entry(0); DatabaseEntry secKey = entry(KEY_OFFSET); DatabaseEntry found = new DatabaseEntry(); DatabaseEntry found2 = new DatabaseEntry(); assertEquals(OperationStatus.SUCCESS, priDb.put(txn, key, data)); txnCommit(txn); txn = txnBeginCursor(); Cursor cursor = priDb.openCursor(txn, null); SecondaryCursor secCursor = secDb.openSecondaryCursor(txn, null); /* Check the uninitialized state for certain operations. */ try { cursor.count(); fail(); } catch (IllegalStateException expected) {} try { cursor.delete(); fail(); } catch (IllegalStateException expected) {} try { cursor.putCurrent(data); fail(); } catch (IllegalStateException expected) {} try { cursor.getCurrent(key, data, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getNextDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getPrevDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.count(); fail(); } catch (IllegalStateException expected) {} try { secCursor.delete(); fail(); } catch (IllegalStateException expected) {} try { secCursor.getCurrent(key, data, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getNextDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getPrevDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} /* Cursor.dup works whether initialized or not. */ { Cursor c2 = secCursor.dup(false); c2.close(); c2 = secCursor.dup(true); c2.close(); c2 = secCursor.dup(false); c2.close(); c2 = secCursor.dup(true); c2.close(); } /* Initialize, then close, then check all operations. */ assertEquals(OperationStatus.SUCCESS, cursor.getSearchKey(key, found, null)); assertEquals(OperationStatus.SUCCESS, secCursor.getSearchKey(secKey, found, null)); /* Cursor.dup works whether initialized or not. */ { Cursor c2 = cursor.dup(false); c2.close(); c2 = cursor.dup(true); c2.close(); c2 = secCursor.dup(false); c2.close(); c2 = secCursor.dup(true); c2.close(); } /* Close, then check all operations. */ secCursor.close(); cursor.close(); try { cursor.close(); } catch (RuntimeException expected) { fail("Caught IllegalStateException while re-closing a Cursor."); } try { cursor.count(); fail(); } catch (IllegalStateException expected) {} try { cursor.delete(); fail(); } catch (IllegalStateException expected) {} try { cursor.put(key, data); fail(); } catch (IllegalStateException expected) {} try { cursor.putNoOverwrite(key, data); fail(); } catch (IllegalStateException expected) {} try { cursor.putNoDupData(key, data); fail(); } catch (IllegalStateException expected) {} try { cursor.putCurrent(data); fail(); } catch (IllegalStateException expected) {} try { cursor.getCurrent(key, data, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getSearchKey(key, found, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getSearchBoth(key, data, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getSearchKeyRange(key, found, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getSearchBothRange(key, data, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getFirst(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getNext(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getPrev(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getNextDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getPrevDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getNextNoDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getPrevNoDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { cursor.getLast(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.close(); } catch (RuntimeException e) { fail("Caught exception while re-closing a SecondaryCursor."); } try { secCursor.count(); fail(); } catch (IllegalStateException expected) {} try { secCursor.delete(); fail(); } catch (IllegalStateException expected) {} try { secCursor.getCurrent(key, data, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getSearchKey(secKey, found, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getSearchKeyRange(secKey, found, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getFirst(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getNext(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getPrev(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getNextDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getPrevDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getNextNoDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getPrevNoDup(found, found2, null); fail(); } catch (IllegalStateException expected) {} try { secCursor.getLast(found, found2, null); fail(); } catch (IllegalStateException expected) {} txnCommit(txn); secDb.close(); priDb.close(); closeEnv(); // env.close(); env = null; } /** * [#14966] */ public void testDirtyReadPartialGet() throws DatabaseException { SecondaryDatabase secDb = initDb(); Database priDb = secDb.getPrimaryDatabase(); DatabaseEntry data = new DatabaseEntry(); DatabaseEntry key = new DatabaseEntry(); DatabaseEntry secKey = new DatabaseEntry(); OperationStatus status; /* Put a record */ Transaction txn = txnBegin(); status = priDb.put(txn, entry(0), entry(0)); assertSame(OperationStatus.SUCCESS, status); /* Regular get */ status = secDb.get(txn, entry(0 + KEY_OFFSET), key, data, LockMode.DEFAULT); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(0), key); assertDataEquals(entry(0), data); /* Dirty read returning no data */ data.setPartial(0, 0, true); status = secDb.get(txn, entry(0 + KEY_OFFSET), key, data, LockMode.READ_UNCOMMITTED); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(0), key); assertEquals(0, data.getData().length); assertEquals(0, data.getSize()); /* Dirty read returning partial data */ data.setPartial(0, 1, true); status = secDb.get(txn, entry(0 + KEY_OFFSET), key, data, LockMode.READ_UNCOMMITTED); assertSame(OperationStatus.SUCCESS, status); assertDataEquals(entry(0), key); assertEquals(1, data.getData().length); assertEquals(1, data.getSize()); txnCommit(txn); secDb.close(); priDb.close(); } /** * Open environment, primary and secondary db */ private SecondaryDatabase initDb() throws DatabaseException { Database priDb = openDatabase(false, "testDB", false); SecondaryDatabase secDb = openSecondary(priDb, true, "testSecDB", false, false); return secDb; } private Database openDatabase(boolean allowDuplicates, String name, boolean readOnly) throws DatabaseException { DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setTransactional(isTransactional); dbConfig.setAllowCreate(true); dbConfig.setSortedDuplicates(allowDuplicates); dbConfig.setReadOnly(readOnly); Transaction txn = txnBegin(); Database priDb; try { priDb = env.openDatabase(txn, name, dbConfig); } finally { txnCommit(txn); } assertNotNull(priDb); return priDb; } private SecondaryDatabase openSecondary(Database priDb, boolean allowDuplicates, String dbName, boolean allowPopulate, boolean readOnly) throws DatabaseException { List secListBefore = priDb.getSecondaryDatabases(); SecondaryConfig dbConfig = new SecondaryConfig(); dbConfig.setTransactional(isTransactional); dbConfig.setAllowCreate(true); dbConfig.setSortedDuplicates(allowDuplicates); dbConfig.setReadOnly(readOnly); dbConfig.setAllowPopulate(allowPopulate); if (!readOnly) { if (useMultiKey) { dbConfig.setMultiKeyCreator (new SimpleMultiKeyCreator(new MyKeyCreator())); } else { dbConfig.setKeyCreator(new MyKeyCreator()); } } Transaction txn = txnBegin(); SecondaryDatabase secDb; try { secDb = env.openSecondaryDatabase(txn, dbName, priDb, dbConfig); } finally { txnCommit(txn); } assertNotNull(secDb); /* Check configuration. */ assertSame(priDb, secDb.getPrimaryDatabase()); SecondaryConfig config2 = secDb.getSecondaryConfig(); assertEquals(allowPopulate, config2.getAllowPopulate()); assertEquals(dbConfig.getKeyCreator(), config2.getKeyCreator()); /* Make sure the new secondary is added to the primary's list. */ List secListAfter = priDb.getSecondaryDatabases(); assertTrue(secListAfter.remove(secDb)); assertEquals(secListBefore, secListAfter); return secDb; } private DatabaseEntry entry(int val) { return new DatabaseEntry(TestUtils.getTestArray(val)); } /** * Creates a partial entry for changing oldVal to newVal. */ private DatabaseEntry partialEntry(final int oldVal, final int newVal) { final DatabaseEntry oldEntry = new DatabaseEntry(TestUtils.getTestArray(oldVal)); final DatabaseEntry newEntry = new DatabaseEntry(TestUtils.getTestArray(newVal)); final int size = oldEntry.getSize(); assertEquals(size, newEntry.getSize()); int begOff; for (begOff = 0; begOff < size; begOff += 1) { if (oldEntry.getData()[begOff] != newEntry.getData()[begOff]) { break; } } int endOff; for (endOff = size - 1; endOff >= begOff; endOff += 1) { if (oldEntry.getData()[endOff] != newEntry.getData()[endOff]) { break; } } final int partialSize = endOff - begOff + 1; final byte[] newData = new byte[partialSize]; System.arraycopy(newEntry.getData(), begOff, newData, 0, partialSize); newEntry.setData(newData); newEntry.setPartial(begOff, partialSize, true); return newEntry; } private void assertDataEquals(DatabaseEntry e1, DatabaseEntry e2) { assertTrue(e1.equals(e2)); } private void assertPriLocked(Database priDb, DatabaseEntry key) { assertPriLocked(priDb, key, null); } /** * Checks that the given key (or both key and data if data is non-null) is * locked in the primary database. The primary record should be locked * whenever a secondary cursor is positioned to point to that primary * record. [#15573] */ private void assertPriLocked(final Database priDb, final DatabaseEntry key, final DatabaseEntry data) { /* * Whether the record is locked transactionally or not in the current * thread, we should not be able to write lock the record * non-transactionally in another thread. */ final StringBuffer error = new StringBuffer(); junitThread = new JUnitThread("primary-locker") { @Override public void testBody() { Cursor cursor = priDb.openCursor(null, null); try { if (data != null) { cursor.getSearchBoth(key, data, LockMode.RMW); } else { DatabaseEntry myData = new DatabaseEntry(); cursor.getSearchKey(key, myData, LockMode.RMW); } error.append("Expected LockConflictException"); } catch (Exception expected) { assertTrue(expected instanceof LockConflictException); } finally { cursor.close(); } } }; junitThread.start(); Throwable t = null; try { junitThread.finishTest(); } catch (Throwable e) { t = e; } finally { junitThread = null; } if (t != null) { t.printStackTrace(); fail(t.toString()); } if (error.length() > 0) { fail(error.toString()); } } private static class MyKeyCreator implements SecondaryKeyCreator { public boolean createSecondaryKey(SecondaryDatabase secondary, DatabaseEntry key, DatabaseEntry data, DatabaseEntry result) { result.setData( TestUtils.getTestArray( TestUtils.getTestVal(data.getData()) + KEY_OFFSET)); return true; } } }
package com.weinyc.sa.app.model; import static com.weinyc.sa.common.util.JSONUtils.expectOne; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Temporal; import javax.persistence.TemporalType; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import com.weinyc.sa.core.viewer.bean.W2UIColumnBean; import com.weinyc.sa.common.constants.Constants; import com.weinyc.sa.core.model.AbstractModel; /** * * @author ronghai */ @Entity(name="categories") public class Category extends AbstractModel implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "id") private Long id; @Column(name = "name") private String name; @Column(name = "code", nullable=true) private String code; @Column(name = "level") private Integer level = 1; @Column(name = "parent_id") private Long parentId = -1L; private transient Category parent; private transient List<Category> children; public List<Category> getChildren() { return children; } public void setChildren(List<Category> children) { this.children = children; } @Column(name = "disabled") private Integer disabled; @Column(name = "add_time", nullable=true) @Temporal(TemporalType.TIMESTAMP) private Date addTime; @Column(name = "update_time", nullable=true) @Temporal(TemporalType.TIMESTAMP) private Date updateTime; @Column(name = "note", nullable=true) private String note; public Category() { } public Category(Long id) { this.id = id; } @Override public Long getId() { return id; } @Override public void setId(Long id) { this.id = id; } @Override public boolean isDisabled() { return disabled != null && disabled == DISABLED_YES; } /** * * @param disabled */ @Override public void setDisabled(boolean disabled) { this.disabled = disabled ? DISABLED_YES : DISABLED_NO; } public Date getAddTime() { return addTime; } public void setAddTime(Date addTime) { this.addTime = addTime; } public Date getUpdateTime() { return updateTime; } public void setUpdateTime(Date updateTime) { this.updateTime = updateTime; } public String getNote() { return note; } public void setNote(String note) { this.note = note; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof Category)) { return false; } Category other = (Category) object; if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { return false; } return true; } @Override public String toString() { return this.getClass().getName()+"[ id=" + id + " ]"; } private transient boolean changed; @Override public boolean isChanged() { return changed; } @Override public void setChanged(boolean changed) { this.changed = changed; } public static final JSONArray COLUMNS; static{ COLUMNS = new JSONArray(); COLUMNS.add(new W2UIColumnBean("recid", "ID", "20%", true ,"int").toJson()); COLUMNS.add(new W2UIColumnBean("name", "Name", "20%", true, "text" , JSONObject.fromObject("{ type: 'text' }")).toJson()); COLUMNS.add(new W2UIColumnBean("code", "Code", "20%", true, "text" , JSONObject.fromObject("{ type: 'text' }")).toJson()); W2UIColumnBean col = new W2UIColumnBean("parentId", "Parent", "20%", Constants.SAJS_PREFIX+".render_parent", true, null, JSONObject.fromObject("{ type: 'select', items:'"+Constants.SAJS_PREFIX+".categories()' , renderDrop:'"+Constants.SAJS_PREFIX+".renderDrop' }")); col.setSearchable(false); COLUMNS.add(col.toJson()); COLUMNS.add(new W2UIColumnBean("note", "Note", "120px", true, "text", JSONObject.fromObject("{ type: 'text' }")).toJson()); } @Override public Object toJson(){ JSONObject map = new JSONObject(); map.put("recid", this.getRecid()); map.put("id", this.id); map.put("name", this.name); map.put("code", this.code); map.put("level", this.level); map.put("parentId", this.parentId); map.put("note", this.note); // map.put("children", this.children); return map; } public static Category fromJson(JSONObject json){ expectOne(json, "recid", "name", "code", "level", "parentId", "note"); expectOne(json, "id"); if(json.has("recid") && !json.has("id")){ json.put("id", json.get("recid")); } return Category.fromJson(json, Category.class); } private static ModelMeta<Category> modelMeta; @SuppressWarnings("unchecked") @Override public ModelMeta<Category> modelMeta(){ return _getModelMeta(); } public static ModelMeta<Category> _getModelMeta(){ if(modelMeta == null){ modelMeta = new ModelMeta<>(Category.class); } return modelMeta; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public int getLevel() { return level; } public void setLevel(int level) { this.level = level; } public Long getParentId() { return parentId; } public void setParentId(Long parentId) { this.parentId = parentId; } public Category getParent() { return parent; } public void setParent(Category parent) { this.parent = parent; } public void addChild(Category cat) { if(this.children == null){ this.children = new ArrayList<>(); } this.children.add(cat); } public boolean isLevelOne(){ return this.level == 1; } }
package org.hisp.dhis.reporting.dataset.action; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import static org.hisp.dhis.period.PeriodType.getAvailablePeriodTypes; import static org.hisp.dhis.period.PeriodType.getPeriodFromIsoString; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.hisp.dhis.dataelement.CategoryOptionGroupSet; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementCategoryService; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.DataSetService; import org.hisp.dhis.organisationunit.OrganisationUnitGroupService; import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet; import org.hisp.dhis.organisationunit.OrganisationUnitService; import org.hisp.dhis.oust.manager.SelectionTreeManager; import org.hisp.dhis.period.Period; import org.hisp.dhis.period.PeriodType; import org.joda.time.DateTime; import com.opensymphony.xwork2.Action; /** * @author Lars Helge Overland */ public class GetDataSetReportOptionsAction implements Action { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private DataSetService dataSetService; public void setDataSetService( DataSetService dataSetService ) { this.dataSetService = dataSetService; } private OrganisationUnitService organisationUnitService; public void setOrganisationUnitService( OrganisationUnitService organisationUnitService ) { this.organisationUnitService = organisationUnitService; } private OrganisationUnitGroupService organisationUnitGroupService; public void setOrganisationUnitGroupService( OrganisationUnitGroupService organisationUnitGroupService ) { this.organisationUnitGroupService = organisationUnitGroupService; } private DataElementCategoryService categoryService; public void setCategoryService( DataElementCategoryService categoryService ) { this.categoryService = categoryService; } private SelectionTreeManager selectionTreeManager; public void setSelectionTreeManager( SelectionTreeManager selectionTreeManager ) { this.selectionTreeManager = selectionTreeManager; } // ------------------------------------------------------------------------- // Input // ------------------------------------------------------------------------- private String ds; public String getDs() { return ds; } public void setDs( String ds ) { this.ds = ds; } private String pe; public String getPe() { return pe; } public void setPe( String pe ) { this.pe = pe; } private String ou; public void setOu( String ou ) { this.ou = ou; } // ------------------------------------------------------------------------- // Output // ------------------------------------------------------------------------- private List<DataSet> dataSets; public List<DataSet> getDataSets() { return dataSets; } private List<PeriodType> periodTypes; public List<PeriodType> getPeriodTypes() { return periodTypes; } private boolean render; public boolean isRender() { return render; } private int offset; public int getOffset() { return offset; } private PeriodType periodType; public PeriodType getPeriodType() { return periodType; } private DataElementCategoryCombo defaultCategoryCombo; public DataElementCategoryCombo getDefaultCategoryCombo() { return defaultCategoryCombo; } private List<DataElementCategoryCombo> categoryCombos; public List<DataElementCategoryCombo> getCategoryCombos() { return categoryCombos; } private List<CategoryOptionGroupSet> categoryOptionGroupSets; public List<CategoryOptionGroupSet> getCategoryOptionGroupSets() { return categoryOptionGroupSets; } private List<OrganisationUnitGroupSet> organisationUnitGroupSets; public List<OrganisationUnitGroupSet> getOrganisationUnitGroupSets() { return organisationUnitGroupSets; } // ------------------------------------------------------------------------- // Action implementation // ------------------------------------------------------------------------- @Override public String execute() { periodTypes = getAvailablePeriodTypes(); render = ( ds != null && pe != null && ou != null ); if ( pe != null && getPeriodFromIsoString( pe ) != null ) { Period period = getPeriodFromIsoString( pe ); offset = new DateTime( period.getStartDate() ).getYear() - new DateTime().getYear(); periodType = period.getPeriodType(); selectionTreeManager.setSelectedOrganisationUnit( organisationUnitService.getOrganisationUnit( ou ) ); //TODO set unit state in client instead } defaultCategoryCombo = categoryService.getDefaultDataElementCategoryCombo(); dataSets = new ArrayList<>( dataSetService.getAllDataSets() ); categoryCombos = new ArrayList<>( categoryService.getAttributeCategoryCombos() ); categoryOptionGroupSets = new ArrayList<>( categoryService.getAllCategoryOptionGroupSets() ); organisationUnitGroupSets = new ArrayList<>( organisationUnitGroupService.getAllOrganisationUnitGroupSets() ); Collections.sort( dataSets ); Collections.sort( categoryCombos ); Collections.sort( categoryOptionGroupSets ); Collections.sort( organisationUnitGroupSets ); return SUCCESS; } }
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved. package jodd.db.oom; import jodd.db.DbHsqldbTestCase; import jodd.db.DbSession; import jodd.db.DbThreadSession; import jodd.db.oom.sqlgen.DbEntitySql; import jodd.db.oom.tst.Boy; import jodd.db.oom.tst.Girl2; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.Iterator; import java.util.List; import java.util.Set; import static jodd.db.oom.sqlgen.DbSqlBuilder.sql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @SuppressWarnings("SimplifiableJUnitAssertion") public class EntityCacheTest extends DbHsqldbTestCase { public static final String TSQL = "select $C{g.id, g.name, g.speciality}, $C{b.*} from " + "$T{Girl2 g} join $T{Boy b} on $g.id = $b.girlId " + "order by $g.id desc"; public static final String TSQL_LEFT = "select $C{g.id, g.name, g.speciality}, $C{b.*} from " + "$T{Girl2 g} left join $T{Boy b} on $g.id = $b.girlId " + "order by $g.id desc"; DbSession dbSession; @Override @Before public void setUp() throws Exception { super.setUp(); DbOomManager.resetAll(); DbOomManager dbOom = DbOomManager.getInstance(); dbOom.registerEntity(Girl2.class); dbOom.registerEntity(Boy.class); dbSession = new DbThreadSession(cp); assertEquals(1, DbEntitySql.insert(new Girl2(1, "Anna", "seduction")).query().executeUpdateAndClose()); assertEquals(1, DbEntitySql.insert(new Girl2(2, "Sandra", "spying")).query().executeUpdateAndClose()); assertEquals(1, DbEntitySql.insert(new Girl2(3, "Emma", "nothing")).query().executeUpdateAndClose()); assertEquals(1, DbEntitySql.insert(new Boy(1, "Johny", 2)).query().executeUpdateAndClose()); assertEquals(1, DbEntitySql.insert(new Boy(2, "Marco", 2)).query().executeUpdateAndClose()); assertEquals(1, DbEntitySql.insert(new Boy(3, "Hugo", 1)).query().executeUpdateAndClose()); } @After public void tearDown() throws Exception { dbSession.closeSession(); super.tearDown(); } @Test public void testMapRows2Types_useCache_noHints() { DbOomQuery q = new DbOomQuery(sql(TSQL)); List<Object[]> result = q.cacheEntities(true).listAndClose(Girl2.class, Boy.class); assertEquals(3, result.size()); Girl2 girl1 = (Girl2) result.get(0)[0]; Girl2 girl2 = (Girl2) result.get(1)[0]; Girl2 girl3 = (Girl2) result.get(2)[0]; assertTrue(girl1.equals(girl2)); assertTrue(girl1 == girl2); assertFalse(girl3 == girl1); Boy boy1 = (Boy) result.get(0)[1]; Boy boy2 = (Boy) result.get(1)[1]; Boy boy3 = (Boy) result.get(2)[1]; assertTrue(boy1.id != boy2.id); assertFalse(boy1 == boy2); assertFalse(boy2 == boy3); assertNull(girl1.getBoys()); assertNull(girl3.getBoys()); } @Test public void testMapRows2Types_useCache_noHints_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); List<Object[]> result = q.cacheEntities(true).listAndClose(Girl2.class, Boy.class); assertEquals(4, result.size()); Girl2 girl0 = (Girl2) result.get(0)[0]; Girl2 girl1 = (Girl2) result.get(1)[0]; Girl2 girl2 = (Girl2) result.get(2)[0]; Girl2 girl3 = (Girl2) result.get(3)[0]; assertEquals("Emma", girl0.name); assertTrue(girl1.equals(girl2)); assertTrue(girl1 == girl2); assertFalse(girl3 == girl1); Boy boy0 = (Boy) result.get(0)[1]; Boy boy1 = (Boy) result.get(1)[1]; Boy boy2 = (Boy) result.get(2)[1]; Boy boy3 = (Boy) result.get(3)[1]; assertNull(boy0); assertTrue(boy1.id != boy2.id); assertFalse(boy1 == boy2); assertFalse(boy2 == boy3); assertNull(girl1.getBoys()); assertNull(girl3.getBoys()); } @Test public void testMapRows2Types_useCache_useHints_1perRow() { DbOomQuery q = new DbOomQuery(sql(TSQL)); List<Girl2> result2 = q.withHints("g", "g.boys").cacheEntities(true).listAndClose(Girl2.class, Boy.class); assertEquals(3, result2.size()); Girl2 girl1 = result2.get(0); Girl2 girl2 = result2.get(1); Girl2 girl3 = result2.get(2); assertTrue(girl1.equals(girl2)); assertTrue(girl1 == girl2); assertFalse(girl3 == girl1); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); assertEquals("Hugo", girl3.getBoys().get(0).name); } @Test public void testMapRows2Types_useCache_useHints_1perRow_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); List<Girl2> result2 = q.withHints("g", "g.boys").cacheEntities(true).listAndClose(Girl2.class, Boy.class); assertEquals(4, result2.size()); Girl2 girl0 = result2.get(0); Girl2 girl1 = result2.get(1); Girl2 girl2 = result2.get(2); Girl2 girl3 = result2.get(3); assertTrue(girl1.equals(girl2)); assertTrue(girl1 == girl2); assertFalse(girl3 == girl1); assertNull(girl0.getBoys()); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); assertEquals("Hugo", girl3.getBoys().get(0).name); } @Test public void testMapRows2Types_entityAware() { DbOomQuery q = new DbOomQuery(sql(TSQL)); List<Girl2> result2 = q.withHints("g", "g.boys").entityAwareMode(true).listAndClose(Girl2.class, Boy.class); assertEquals(2, result2.size()); Girl2 girl1 = result2.get(0); Girl2 girl3 = result2.get(1); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); } @Test public void testMapRows2Types_entityAware_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); List<Girl2> result2 = q.withHints("g", "g.boys").entityAwareMode(true).listAndClose(Girl2.class, Boy.class); assertEquals(3, result2.size()); Girl2 girl0 = result2.get(0); Girl2 girl1 = result2.get(1); Girl2 girl3 = result2.get(2); assertNull(girl0.getBoys()); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); } @Test public void testMapRows2Types_entityAware_List() { DbOomQuery q = new DbOomQuery(sql(TSQL)); List<Girl2> result2 = q.withHints("g", "g.boys").entityAwareMode(true).listAndClose(1, Girl2.class, Boy.class); assertEquals(1, result2.size()); Girl2 girl1 = result2.get(0); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); } @Test public void testMapRows2Types_entityAware_List_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); List<Girl2> result2 = q.withHints("g", "g.boys").entityAwareMode(true).listAndClose(2, Girl2.class, Boy.class); assertEquals(2, result2.size()); Girl2 girl0 = result2.get(0); Girl2 girl1 = result2.get(1); assertNull(girl0.getBoys()); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); } @Test public void testMapRows2Types_entityAware_Set() { DbOomQuery q = new DbOomQuery(sql(TSQL)); Set<Girl2> set1 = q.withHints("g", "g.boys").entityAwareMode(true).listSetAndClose(Girl2.class, Boy.class); assertEquals(2, set1.size()); for (Girl2 girl : set1) { if (girl.id.equals(1)) { assertEquals(1, girl.getBoys().size()); } if (girl.id.equals(2)) { assertEquals(2, girl.getBoys().size()); } } } @Test public void testMapRows2Types_entityAware_Set_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); Set<Girl2> set1 = q.withHints("g", "g.boys").entityAwareMode(true).listSetAndClose(Girl2.class, Boy.class); assertEquals(3, set1.size()); for (Girl2 girl : set1) { if (girl.id.equals(1)) { assertEquals(1, girl.getBoys().size()); } if (girl.id.equals(2)) { assertEquals(2, girl.getBoys().size()); } if (girl.id.equals(3)) { assertNull(girl.getBoys()); } } } @Test public void testMapRows2Types_entityAware_Max() { DbOomQuery q = new DbOomQuery(sql(TSQL)); Set<Girl2> set1 = q.withHints("g", "g.boys").entityAwareMode(true).listSetAndClose(1, Girl2.class, Boy.class); assertEquals(1, set1.size()); for (Girl2 girl : set1) { if (girl.id.equals(2)) { assertEquals(2, girl.getBoys().size()); } else { fail(); } } } @Test public void testMapRows2Types_entityAware_Max_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); Set<Girl2> set1 = q.withHints("g", "g.boys").entityAwareMode(true).listSetAndClose(2, Girl2.class, Boy.class); assertEquals(2, set1.size()); for (Girl2 girl : set1) { if (girl.id.equals(3)) { assertNull(girl.getBoys()); } else if (girl.id.equals(2)) { assertEquals(2, girl.getBoys().size()); } else { fail(); } } } @Test public void testMapRows2Types_entityAware_Iterator() { DbOomQuery q = new DbOomQuery(sql(TSQL)); Iterator<Girl2> iterator = q.withHints("g", "g.boys").entityAwareMode(true).iterateAndClose(Girl2.class, Boy.class); assertTrue(iterator.hasNext()); assertTrue(iterator.hasNext()); assertTrue(iterator.hasNext()); Girl2 girl1 = iterator.next(); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertTrue(iterator.hasNext()); Girl2 girl3 = iterator.next(); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); assertFalse(iterator.hasNext()); } @Test public void testMapRows2Types_entityAware_Iterator_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); Iterator<Girl2> iterator = q.withHints("g", "g.boys").entityAwareMode(true).iterateAndClose(Girl2.class, Boy.class); assertTrue(iterator.hasNext()); Girl2 girl0 = iterator.next(); assertNull(girl0.getBoys()); Girl2 girl1 = iterator.next(); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); assertTrue(iterator.hasNext()); Girl2 girl3 = iterator.next(); assertNotNull(girl3.getBoys()); assertEquals(1, girl3.getBoys().size()); assertFalse(iterator.hasNext()); } @Test public void testMapRows2Types_entityAware_Find() { DbOomQuery q = new DbOomQuery(sql(TSQL)); Girl2 girl1 = q.withHints("g", "g.boys").entityAwareMode(true).findAndClose(Girl2.class, Boy.class); assertNotNull(girl1.getBoys()); assertEquals(2, girl1.getBoys().size()); } @Test public void testMapRows2Types_entityAware_Find_LEFT() { DbOomQuery q = new DbOomQuery(sql(TSQL_LEFT)); Girl2 girl0 = q.withHints("g", "g.boys").entityAwareMode(true).findAndClose(Girl2.class, Boy.class); assertNull(girl0.getBoys()); } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot; import java.lang.reflect.Constructor; import java.security.AccessControlException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.BeanUtils; import org.springframework.beans.CachedIntrospectionResults; import org.springframework.beans.factory.groovy.GroovyBeanDefinitionReader; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.boot.Banner.Mode; import org.springframework.boot.context.properties.bind.Bindable; import org.springframework.boot.context.properties.bind.Binder; import org.springframework.boot.context.properties.source.ConfigurationPropertySources; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.AnnotatedBeanDefinitionReader; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import org.springframework.context.annotation.AnnotationConfigUtils; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.GenericTypeResolver; import org.springframework.core.annotation.AnnotationAwareOrderComparator; import org.springframework.core.env.CommandLinePropertySource; import org.springframework.core.env.CompositePropertySource; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.env.Environment; import org.springframework.core.env.MapPropertySource; import org.springframework.core.env.MutablePropertySources; import org.springframework.core.env.PropertySource; import org.springframework.core.env.SimpleCommandLinePropertySource; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.ResourceLoader; import org.springframework.core.io.support.SpringFactoriesLoader; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ReflectionUtils; import org.springframework.util.StopWatch; import org.springframework.util.StringUtils; import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.StandardServletEnvironment; /** * Class that can be used to bootstrap and launch a Spring application from a Java main * method. By default class will perform the following steps to bootstrap your * application: * * <ul> * <li>Create an appropriate {@link ApplicationContext} instance (depending on your * classpath)</li> * <li>Register a {@link CommandLinePropertySource} to expose command line arguments as * Spring properties</li> * <li>Refresh the application context, loading all singleton beans</li> * <li>Trigger any {@link CommandLineRunner} beans</li> * </ul> * * In most circumstances the static {@link #run(Class, String[])} method can be called * directly from your {@literal main} method to bootstrap your application: * * <pre class="code"> * &#064;Configuration * &#064;EnableAutoConfiguration * public class MyApplication { * * // ... Bean definitions * * public static void main(String[] args) throws Exception { * SpringApplication.run(MyApplication.class, args); * } * } * </pre> * * <p> * For more advanced configuration a {@link SpringApplication} instance can be created and * customized before being run: * * <pre class="code"> * public static void main(String[] args) throws Exception { * SpringApplication application = new SpringApplication(MyApplication.class); * // ... customize application settings here * application.run(args) * } * </pre> * * {@link SpringApplication}s can read beans from a variety of different sources. It is * generally recommended that a single {@code @Configuration} class is used to bootstrap * your application, however, you may also set {@link #getSources() sources} from: * <ul> * <li>The fully qualified class name to be loaded by * {@link AnnotatedBeanDefinitionReader}</li> * <li>The location of an XML resource to be loaded by {@link XmlBeanDefinitionReader}, or * a groovy script to be loaded by {@link GroovyBeanDefinitionReader}</li> * <li>The name of a package to be scanned by {@link ClassPathBeanDefinitionScanner}</li> * </ul> * * Configuration properties are also bound to the {@link SpringApplication}. This makes it * possible to set {@link SpringApplication} properties dynamically, like additional * sources ("spring.main.sources" - a CSV list) the flag to indicate a web environment * ("spring.main.web-application-type=none") or the flag to switch off the banner * ("spring.main.banner-mode=off"). * * @author Phillip Webb * @author Dave Syer * @author Andy Wilkinson * @author Christian Dupuis * @author Stephane Nicoll * @author Jeremy Rickard * @author Craig Burke * @author Michael Simons * @author Madhura Bhave * @author Brian Clozel * @author Ethan Rubinson * @see #run(Class, String[]) * @see #run(Class[], String[]) * @see #SpringApplication(Class...) */ public class SpringApplication { /** * The class name of application context that will be used by default for non-web * environments. */ public static final String DEFAULT_CONTEXT_CLASS = "org.springframework.context." + "annotation.AnnotationConfigApplicationContext"; /** * The class name of application context that will be used by default for web * environments. */ public static final String DEFAULT_WEB_CONTEXT_CLASS = "org.springframework.boot." + "web.servlet.context.AnnotationConfigServletWebServerApplicationContext"; private static final String[] WEB_ENVIRONMENT_CLASSES = { "javax.servlet.Servlet", "org.springframework.web.context.ConfigurableWebApplicationContext" }; /** * The class name of application context that will be used by default for reactive web * environments. */ public static final String DEFAULT_REACTIVE_WEB_CONTEXT_CLASS = "org.springframework." + "boot.web.reactive.context.AnnotationConfigReactiveWebServerApplicationContext"; private static final String REACTIVE_WEB_ENVIRONMENT_CLASS = "org.springframework." + "web.reactive.DispatcherHandler"; private static final String MVC_WEB_ENVIRONMENT_CLASS = "org.springframework." + "web.servlet.DispatcherServlet"; /** * Default banner location. */ public static final String BANNER_LOCATION_PROPERTY_VALUE = SpringApplicationBannerPrinter.DEFAULT_BANNER_LOCATION; /** * Banner location property key. */ public static final String BANNER_LOCATION_PROPERTY = SpringApplicationBannerPrinter.BANNER_LOCATION_PROPERTY; private static final String SYSTEM_PROPERTY_JAVA_AWT_HEADLESS = "java.awt.headless"; private static final Log logger = LogFactory.getLog(SpringApplication.class); private Set<Class<?>> primarySources; private Set<String> sources = new LinkedHashSet<>(); private Class<?> mainApplicationClass; private Banner.Mode bannerMode = Banner.Mode.CONSOLE; private boolean logStartupInfo = true; private boolean addCommandLineProperties = true; private Banner banner; private ResourceLoader resourceLoader; private BeanNameGenerator beanNameGenerator; private ConfigurableEnvironment environment; private Class<? extends ConfigurableApplicationContext> applicationContextClass; private WebApplicationType webApplicationType; private boolean headless = true; private boolean registerShutdownHook = true; private List<ApplicationContextInitializer<?>> initializers; private List<ApplicationListener<?>> listeners; private Map<String, Object> defaultProperties; private Set<String> additionalProfiles = new HashSet<>(); /** * Create a new {@link SpringApplication} instance. The application context will load * beans from the specified primary sources (see {@link SpringApplication class-level} * documentation for details. The instance can be customized before calling * {@link #run(String...)}. * @param primarySources the primary bean sources * @see #run(Class, String[]) * @see #SpringApplication(ResourceLoader, Class...) * @see #setSources(Set) */ public SpringApplication(Class<?>... primarySources) { this(null, primarySources); } /** * Create a new {@link SpringApplication} instance. The application context will load * beans from the specified primary sources (see {@link SpringApplication class-level} * documentation for details. The instance can be customized before calling * {@link #run(String...)}. * @param resourceLoader the resource loader to use * @param primarySources the primary bean sources * @see #run(Class, String[]) * @see #setSources(Set) */ @SuppressWarnings({ "unchecked", "rawtypes" }) public SpringApplication(ResourceLoader resourceLoader, Class<?>... primarySources) { this.resourceLoader = resourceLoader; Assert.notNull(primarySources, "PrimarySources must not be null"); this.primarySources = new LinkedHashSet<>(Arrays.asList(primarySources)); this.webApplicationType = deduceWebApplicationType(); setInitializers((Collection) getSpringFactoriesInstances( ApplicationContextInitializer.class)); setListeners((Collection) getSpringFactoriesInstances(ApplicationListener.class)); this.mainApplicationClass = deduceMainApplicationClass(); } private WebApplicationType deduceWebApplicationType() { if (ClassUtils.isPresent(REACTIVE_WEB_ENVIRONMENT_CLASS, null) && !ClassUtils.isPresent(MVC_WEB_ENVIRONMENT_CLASS, null)) { return WebApplicationType.REACTIVE; } for (String className : WEB_ENVIRONMENT_CLASSES) { if (!ClassUtils.isPresent(className, null)) { return WebApplicationType.NONE; } } return WebApplicationType.SERVLET; } private Class<?> deduceMainApplicationClass() { try { StackTraceElement[] stackTrace = new RuntimeException().getStackTrace(); for (StackTraceElement stackTraceElement : stackTrace) { if ("main".equals(stackTraceElement.getMethodName())) { return Class.forName(stackTraceElement.getClassName()); } } } catch (ClassNotFoundException ex) { // Swallow and continue } return null; } /** * Run the Spring application, creating and refreshing a new * {@link ApplicationContext}. * @param args the application arguments (usually passed from a Java main method) * @return a running {@link ApplicationContext} */ public ConfigurableApplicationContext run(String... args) { StopWatch stopWatch = new StopWatch(); stopWatch.start(); ConfigurableApplicationContext context = null; Collection<SpringBootExceptionReporter> exceptionReporters = new ArrayList<>(); configureHeadlessProperty(); SpringApplicationRunListeners listeners = getRunListeners(args); listeners.starting(); try { ApplicationArguments applicationArguments = new DefaultApplicationArguments( args); ConfigurableEnvironment environment = prepareEnvironment(listeners, applicationArguments); configureIgnoreBeanInfo(environment); Banner printedBanner = printBanner(environment); context = createApplicationContext(); exceptionReporters = getSpringFactoriesInstances( SpringBootExceptionReporter.class, new Class[] { ConfigurableApplicationContext.class }, context); prepareContext(context, environment, listeners, applicationArguments, printedBanner); refreshContext(context); afterRefresh(context, applicationArguments); stopWatch.stop(); if (this.logStartupInfo) { new StartupInfoLogger(this.mainApplicationClass) .logStarted(getApplicationLog(), stopWatch); } listeners.started(context); callRunners(context, applicationArguments); } catch (Throwable ex) { handleRunFailure(context, listeners, exceptionReporters, ex); throw new IllegalStateException(ex); } listeners.running(context); return context; } private ConfigurableEnvironment prepareEnvironment( SpringApplicationRunListeners listeners, ApplicationArguments applicationArguments) { // Create and configure the environment ConfigurableEnvironment environment = getOrCreateEnvironment(); configureEnvironment(environment, applicationArguments.getSourceArgs()); listeners.environmentPrepared(environment); bindToSpringApplication(environment); if (this.webApplicationType == WebApplicationType.NONE) { environment = new EnvironmentConverter(getClassLoader()) .convertToStandardEnvironmentIfNecessary(environment); } ConfigurationPropertySources.attach(environment); return environment; } private void prepareContext(ConfigurableApplicationContext context, ConfigurableEnvironment environment, SpringApplicationRunListeners listeners, ApplicationArguments applicationArguments, Banner printedBanner) { context.setEnvironment(environment); postProcessApplicationContext(context); applyInitializers(context); listeners.contextPrepared(context); if (this.logStartupInfo) { logStartupInfo(context.getParent() == null); logStartupProfileInfo(context); } // Add boot specific singleton beans context.getBeanFactory().registerSingleton("springApplicationArguments", applicationArguments); if (printedBanner != null) { context.getBeanFactory().registerSingleton("springBootBanner", printedBanner); } // Load the sources Set<Object> sources = getAllSources(); Assert.notEmpty(sources, "Sources must not be empty"); load(context, sources.toArray(new Object[sources.size()])); listeners.contextLoaded(context); } private void refreshContext(ConfigurableApplicationContext context) { refresh(context); if (this.registerShutdownHook) { try { context.registerShutdownHook(); } catch (AccessControlException ex) { // Not allowed in some environments. } } } private void configureHeadlessProperty() { System.setProperty(SYSTEM_PROPERTY_JAVA_AWT_HEADLESS, System.getProperty( SYSTEM_PROPERTY_JAVA_AWT_HEADLESS, Boolean.toString(this.headless))); } private SpringApplicationRunListeners getRunListeners(String[] args) { Class<?>[] types = new Class<?>[] { SpringApplication.class, String[].class }; return new SpringApplicationRunListeners(logger, getSpringFactoriesInstances( SpringApplicationRunListener.class, types, this, args)); } private <T> Collection<T> getSpringFactoriesInstances(Class<T> type) { return getSpringFactoriesInstances(type, new Class<?>[] {}); } private <T> Collection<T> getSpringFactoriesInstances(Class<T> type, Class<?>[] parameterTypes, Object... args) { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); // Use names and ensure unique to protect against duplicates Set<String> names = new LinkedHashSet<>( SpringFactoriesLoader.loadFactoryNames(type, classLoader)); List<T> instances = createSpringFactoriesInstances(type, parameterTypes, classLoader, args, names); AnnotationAwareOrderComparator.sort(instances); return instances; } @SuppressWarnings("unchecked") private <T> List<T> createSpringFactoriesInstances(Class<T> type, Class<?>[] parameterTypes, ClassLoader classLoader, Object[] args, Set<String> names) { List<T> instances = new ArrayList<>(names.size()); for (String name : names) { try { Class<?> instanceClass = ClassUtils.forName(name, classLoader); Assert.isAssignable(type, instanceClass); Constructor<?> constructor = instanceClass .getDeclaredConstructor(parameterTypes); T instance = (T) BeanUtils.instantiateClass(constructor, args); instances.add(instance); } catch (Throwable ex) { throw new IllegalArgumentException( "Cannot instantiate " + type + " : " + name, ex); } } return instances; } private ConfigurableEnvironment getOrCreateEnvironment() { if (this.environment != null) { return this.environment; } if (this.webApplicationType == WebApplicationType.SERVLET) { return new StandardServletEnvironment(); } return new StandardEnvironment(); } /** * Template method delegating to * {@link #configurePropertySources(ConfigurableEnvironment, String[])} and * {@link #configureProfiles(ConfigurableEnvironment, String[])} in that order. * Override this method for complete control over Environment customization, or one of * the above for fine-grained control over property sources or profiles, respectively. * @param environment this application's environment * @param args arguments passed to the {@code run} method * @see #configureProfiles(ConfigurableEnvironment, String[]) * @see #configurePropertySources(ConfigurableEnvironment, String[]) */ protected void configureEnvironment(ConfigurableEnvironment environment, String[] args) { configurePropertySources(environment, args); configureProfiles(environment, args); } /** * Add, remove or re-order any {@link PropertySource}s in this application's * environment. * @param environment this application's environment * @param args arguments passed to the {@code run} method * @see #configureEnvironment(ConfigurableEnvironment, String[]) */ protected void configurePropertySources(ConfigurableEnvironment environment, String[] args) { MutablePropertySources sources = environment.getPropertySources(); if (this.defaultProperties != null && !this.defaultProperties.isEmpty()) { sources.addLast( new MapPropertySource("defaultProperties", this.defaultProperties)); } if (this.addCommandLineProperties && args.length > 0) { String name = CommandLinePropertySource.COMMAND_LINE_PROPERTY_SOURCE_NAME; if (sources.contains(name)) { PropertySource<?> source = sources.get(name); CompositePropertySource composite = new CompositePropertySource(name); composite.addPropertySource(new SimpleCommandLinePropertySource( "springApplicationCommandLineArgs", args)); composite.addPropertySource(source); sources.replace(name, composite); } else { sources.addFirst(new SimpleCommandLinePropertySource(args)); } } } /** * Configure which profiles are active (or active by default) for this application * environment. Additional profiles may be activated during configuration file * processing via the {@code spring.profiles.active} property. * @param environment this application's environment * @param args arguments passed to the {@code run} method * @see #configureEnvironment(ConfigurableEnvironment, String[]) * @see org.springframework.boot.context.config.ConfigFileApplicationListener */ protected void configureProfiles(ConfigurableEnvironment environment, String[] args) { environment.getActiveProfiles(); // ensure they are initialized // But these ones should go first (last wins in a property key clash) Set<String> profiles = new LinkedHashSet<>(this.additionalProfiles); profiles.addAll(Arrays.asList(environment.getActiveProfiles())); environment.setActiveProfiles(profiles.toArray(new String[profiles.size()])); } private void configureIgnoreBeanInfo(ConfigurableEnvironment environment) { if (System.getProperty( CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME) == null) { Boolean ignore = environment.getProperty("spring.beaninfo.ignore", Boolean.class, Boolean.TRUE); System.setProperty(CachedIntrospectionResults.IGNORE_BEANINFO_PROPERTY_NAME, ignore.toString()); } } /** * Bind the environment to the {@link SpringApplication}. * @param environment the environment to bind */ protected void bindToSpringApplication(ConfigurableEnvironment environment) { try { Binder.get(environment).bind("spring.main", Bindable.ofInstance(this)); } catch (Exception ex) { throw new IllegalStateException("Cannot bind to SpringApplication", ex); } } private Banner printBanner(ConfigurableEnvironment environment) { if (this.bannerMode == Banner.Mode.OFF) { return null; } ResourceLoader resourceLoader = this.resourceLoader != null ? this.resourceLoader : new DefaultResourceLoader(getClassLoader()); SpringApplicationBannerPrinter bannerPrinter = new SpringApplicationBannerPrinter( resourceLoader, this.banner); if (this.bannerMode == Mode.LOG) { return bannerPrinter.print(environment, this.mainApplicationClass, logger); } return bannerPrinter.print(environment, this.mainApplicationClass, System.out); } /** * Strategy method used to create the {@link ApplicationContext}. By default this * method will respect any explicitly set application context or application context * class before falling back to a suitable default. * @return the application context (not yet refreshed) * @see #setApplicationContextClass(Class) */ protected ConfigurableApplicationContext createApplicationContext() { Class<?> contextClass = this.applicationContextClass; if (contextClass == null) { try { switch (this.webApplicationType) { case SERVLET: contextClass = Class.forName(DEFAULT_WEB_CONTEXT_CLASS); break; case REACTIVE: contextClass = Class.forName(DEFAULT_REACTIVE_WEB_CONTEXT_CLASS); break; default: contextClass = Class.forName(DEFAULT_CONTEXT_CLASS); } } catch (ClassNotFoundException ex) { throw new IllegalStateException( "Unable create a default ApplicationContext, " + "please specify an ApplicationContextClass", ex); } } return (ConfigurableApplicationContext) BeanUtils.instantiateClass(contextClass); } /** * Apply any relevant post processing the {@link ApplicationContext}. Subclasses can * apply additional processing as required. * @param context the application context */ protected void postProcessApplicationContext(ConfigurableApplicationContext context) { if (this.beanNameGenerator != null) { context.getBeanFactory().registerSingleton( AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR, this.beanNameGenerator); } if (this.resourceLoader != null) { if (context instanceof GenericApplicationContext) { ((GenericApplicationContext) context) .setResourceLoader(this.resourceLoader); } if (context instanceof DefaultResourceLoader) { ((DefaultResourceLoader) context) .setClassLoader(this.resourceLoader.getClassLoader()); } } } /** * Apply any {@link ApplicationContextInitializer}s to the context before it is * refreshed. * @param context the configured ApplicationContext (not refreshed yet) * @see ConfigurableApplicationContext#refresh() */ @SuppressWarnings({ "rawtypes", "unchecked" }) protected void applyInitializers(ConfigurableApplicationContext context) { for (ApplicationContextInitializer initializer : getInitializers()) { Class<?> requiredType = GenericTypeResolver.resolveTypeArgument( initializer.getClass(), ApplicationContextInitializer.class); Assert.isInstanceOf(requiredType, context, "Unable to call initializer."); initializer.initialize(context); } } /** * Called to log startup information, subclasses may override to add additional * logging. * @param isRoot true if this application is the root of a context hierarchy */ protected void logStartupInfo(boolean isRoot) { if (isRoot) { new StartupInfoLogger(this.mainApplicationClass) .logStarting(getApplicationLog()); } } /** * Called to log active profile information. * @param context the application context */ protected void logStartupProfileInfo(ConfigurableApplicationContext context) { Log log = getApplicationLog(); if (log.isInfoEnabled()) { String[] activeProfiles = context.getEnvironment().getActiveProfiles(); if (ObjectUtils.isEmpty(activeProfiles)) { String[] defaultProfiles = context.getEnvironment().getDefaultProfiles(); log.info("No active profile set, falling back to default profiles: " + StringUtils.arrayToCommaDelimitedString(defaultProfiles)); } else { log.info("The following profiles are active: " + StringUtils.arrayToCommaDelimitedString(activeProfiles)); } } } /** * Returns the {@link Log} for the application. By default will be deduced. * @return the application log */ protected Log getApplicationLog() { if (this.mainApplicationClass == null) { return logger; } return LogFactory.getLog(this.mainApplicationClass); } /** * Load beans into the application context. * @param context the context to load beans into * @param sources the sources to load */ protected void load(ApplicationContext context, Object[] sources) { if (logger.isDebugEnabled()) { logger.debug( "Loading source " + StringUtils.arrayToCommaDelimitedString(sources)); } BeanDefinitionLoader loader = createBeanDefinitionLoader( getBeanDefinitionRegistry(context), sources); if (this.beanNameGenerator != null) { loader.setBeanNameGenerator(this.beanNameGenerator); } if (this.resourceLoader != null) { loader.setResourceLoader(this.resourceLoader); } if (this.environment != null) { loader.setEnvironment(this.environment); } loader.load(); } /** * The ResourceLoader that will be used in the ApplicationContext. * @return the resourceLoader the resource loader that will be used in the * ApplicationContext (or null if the default) */ public ResourceLoader getResourceLoader() { return this.resourceLoader; } /** * Either the ClassLoader that will be used in the ApplicationContext (if * {@link #setResourceLoader(ResourceLoader) resourceLoader} is set, or the context * class loader (if not null), or the loader of the Spring {@link ClassUtils} class. * @return a ClassLoader (never null) */ public ClassLoader getClassLoader() { if (this.resourceLoader != null) { return this.resourceLoader.getClassLoader(); } return ClassUtils.getDefaultClassLoader(); } /** * Get the bean definition registry. * @param context the application context * @return the BeanDefinitionRegistry if it can be determined */ private BeanDefinitionRegistry getBeanDefinitionRegistry(ApplicationContext context) { if (context instanceof BeanDefinitionRegistry) { return (BeanDefinitionRegistry) context; } if (context instanceof AbstractApplicationContext) { return (BeanDefinitionRegistry) ((AbstractApplicationContext) context) .getBeanFactory(); } throw new IllegalStateException("Could not locate BeanDefinitionRegistry"); } /** * Factory method used to create the {@link BeanDefinitionLoader}. * @param registry the bean definition registry * @param sources the sources to load * @return the {@link BeanDefinitionLoader} that will be used to load beans */ protected BeanDefinitionLoader createBeanDefinitionLoader( BeanDefinitionRegistry registry, Object[] sources) { return new BeanDefinitionLoader(registry, sources); } /** * Refresh the underlying {@link ApplicationContext}. * @param applicationContext the application context to refresh */ protected void refresh(ApplicationContext applicationContext) { Assert.isInstanceOf(AbstractApplicationContext.class, applicationContext); ((AbstractApplicationContext) applicationContext).refresh(); } /** * Called after the context has been refreshed. * @param context the application context * @param args the application arguments */ protected void afterRefresh(ConfigurableApplicationContext context, ApplicationArguments args) { } private void callRunners(ApplicationContext context, ApplicationArguments args) { List<Object> runners = new ArrayList<>(); runners.addAll(context.getBeansOfType(ApplicationRunner.class).values()); runners.addAll(context.getBeansOfType(CommandLineRunner.class).values()); AnnotationAwareOrderComparator.sort(runners); for (Object runner : new LinkedHashSet<>(runners)) { if (runner instanceof ApplicationRunner) { callRunner((ApplicationRunner) runner, args); } if (runner instanceof CommandLineRunner) { callRunner((CommandLineRunner) runner, args); } } } private void callRunner(ApplicationRunner runner, ApplicationArguments args) { try { (runner).run(args); } catch (Exception ex) { throw new IllegalStateException("Failed to execute ApplicationRunner", ex); } } private void callRunner(CommandLineRunner runner, ApplicationArguments args) { try { (runner).run(args.getSourceArgs()); } catch (Exception ex) { throw new IllegalStateException("Failed to execute CommandLineRunner", ex); } } private void handleRunFailure(ConfigurableApplicationContext context, SpringApplicationRunListeners listeners, Collection<SpringBootExceptionReporter> exceptionReporters, Throwable exception) { try { try { handleExitCode(context, exception); listeners.failed(context, exception); } finally { reportFailure(exceptionReporters, exception); if (context != null) { context.close(); } } } catch (Exception ex) { logger.warn("Unable to close ApplicationContext", ex); } ReflectionUtils.rethrowRuntimeException(exception); } private void reportFailure(Collection<SpringBootExceptionReporter> exceptionReporters, Throwable failure) { try { for (SpringBootExceptionReporter reporter : exceptionReporters) { if (reporter.reportException(failure)) { registerLoggedException(failure); return; } } } catch (Throwable ex) { // Continue with normal handling of the original failure } if (logger.isErrorEnabled()) { logger.error("Application run failed", failure); registerLoggedException(failure); } } /** * Register that the given exception has been logged. By default, if the running in * the main thread, this method will suppress additional printing of the stacktrace. * @param exception the exception that was logged */ protected void registerLoggedException(Throwable exception) { SpringBootExceptionHandler handler = getSpringBootExceptionHandler(); if (handler != null) { handler.registerLoggedException(exception); } } private void handleExitCode(ConfigurableApplicationContext context, Throwable exception) { int exitCode = getExitCodeFromException(context, exception); if (exitCode != 0) { if (context != null) { context.publishEvent(new ExitCodeEvent(context, exitCode)); } SpringBootExceptionHandler handler = getSpringBootExceptionHandler(); if (handler != null) { handler.registerExitCode(exitCode); } } } private int getExitCodeFromException(ConfigurableApplicationContext context, Throwable exception) { int exitCode = getExitCodeFromMappedException(context, exception); if (exitCode == 0) { exitCode = getExitCodeFromExitCodeGeneratorException(exception); } return exitCode; } private int getExitCodeFromMappedException(ConfigurableApplicationContext context, Throwable exception) { if (context == null || !context.isActive()) { return 0; } ExitCodeGenerators generators = new ExitCodeGenerators(); Collection<ExitCodeExceptionMapper> beans = context .getBeansOfType(ExitCodeExceptionMapper.class).values(); generators.addAll(exception, beans); return generators.getExitCode(); } private int getExitCodeFromExitCodeGeneratorException(Throwable exception) { if (exception == null) { return 0; } if (exception instanceof ExitCodeGenerator) { return ((ExitCodeGenerator) exception).getExitCode(); } return getExitCodeFromExitCodeGeneratorException(exception.getCause()); } SpringBootExceptionHandler getSpringBootExceptionHandler() { if (isMainThread(Thread.currentThread())) { return SpringBootExceptionHandler.forCurrentThread(); } return null; } private boolean isMainThread(Thread currentThread) { return ("main".equals(currentThread.getName()) || "restartedMain".equals(currentThread.getName())) && "main".equals(currentThread.getThreadGroup().getName()); } /** * Returns the main application class that has been deduced or explicitly configured. * @return the main application class or {@code null} */ public Class<?> getMainApplicationClass() { return this.mainApplicationClass; } /** * Set a specific main application class that will be used as a log source and to * obtain version information. By default the main application class will be deduced. * Can be set to {@code null} if there is no explicit application class. * @param mainApplicationClass the mainApplicationClass to set or {@code null} */ public void setMainApplicationClass(Class<?> mainApplicationClass) { this.mainApplicationClass = mainApplicationClass; } /** * Returns whether this {@link SpringApplication} is running within a web environment. * @return {@code true} if running within a web environment, otherwise {@code false}. * @see #setWebEnvironment(boolean) * @deprecated since 2.0.0 in favor of {@link #getWebApplicationType()} */ @Deprecated public boolean isWebEnvironment() { return this.webApplicationType == WebApplicationType.SERVLET; } /** * Returns the type of web application that is being run. * @return the type of web application * @since 2.0.0 */ public WebApplicationType getWebApplicationType() { return this.webApplicationType; } /** * Sets if this application is running within a web environment. If not specified will * attempt to deduce the environment based on the classpath. * @param webEnvironment if the application is running in a web environment * @deprecated since 2.0.0 in favor of * {@link #setWebApplicationType(WebApplicationType)} */ @Deprecated public void setWebEnvironment(boolean webEnvironment) { this.webApplicationType = webEnvironment ? WebApplicationType.SERVLET : WebApplicationType.NONE; } /** * Sets the type of web application to be run. If not explicitly set the type of web * application will be deduced based on the classpath. * @param webApplicationType the web application type * @since 2.0.0 */ public void setWebApplicationType(WebApplicationType webApplicationType) { Assert.notNull(webApplicationType, "WebApplicationType must not be null"); this.webApplicationType = webApplicationType; } /** * Sets if the application is headless and should not instantiate AWT. Defaults to * {@code true} to prevent java icons appearing. * @param headless if the application is headless */ public void setHeadless(boolean headless) { this.headless = headless; } /** * Sets if the created {@link ApplicationContext} should have a shutdown hook * registered. Defaults to {@code true} to ensure that JVM shutdowns are handled * gracefully. * @param registerShutdownHook if the shutdown hook should be registered */ public void setRegisterShutdownHook(boolean registerShutdownHook) { this.registerShutdownHook = registerShutdownHook; } /** * Sets the {@link Banner} instance which will be used to print the banner when no * static banner file is provided. * @param banner The Banner instance to use */ public void setBanner(Banner banner) { this.banner = banner; } /** * Sets the mode used to display the banner when the application runs. Defaults to * {@code Banner.Mode.CONSOLE}. * @param bannerMode the mode used to display the banner */ public void setBannerMode(Banner.Mode bannerMode) { this.bannerMode = bannerMode; } /** * Sets if the application information should be logged when the application starts. * Defaults to {@code true}. * @param logStartupInfo if startup info should be logged. */ public void setLogStartupInfo(boolean logStartupInfo) { this.logStartupInfo = logStartupInfo; } /** * Sets if a {@link CommandLinePropertySource} should be added to the application * context in order to expose arguments. Defaults to {@code true}. * @param addCommandLineProperties if command line arguments should be exposed */ public void setAddCommandLineProperties(boolean addCommandLineProperties) { this.addCommandLineProperties = addCommandLineProperties; } /** * Set default environment properties which will be used in addition to those in the * existing {@link Environment}. * @param defaultProperties the additional properties to set */ public void setDefaultProperties(Map<String, Object> defaultProperties) { this.defaultProperties = defaultProperties; } /** * Convenient alternative to {@link #setDefaultProperties(Map)}. * @param defaultProperties some {@link Properties} */ public void setDefaultProperties(Properties defaultProperties) { this.defaultProperties = new HashMap<>(); for (Object key : Collections.list(defaultProperties.propertyNames())) { this.defaultProperties.put((String) key, defaultProperties.get(key)); } } /** * Set additional profile values to use (on top of those set in system or command line * properties). * @param profiles the additional profiles to set */ public void setAdditionalProfiles(String... profiles) { this.additionalProfiles = new LinkedHashSet<>(Arrays.asList(profiles)); } /** * Sets the bean name generator that should be used when generating bean names. * @param beanNameGenerator the bean name generator */ public void setBeanNameGenerator(BeanNameGenerator beanNameGenerator) { this.beanNameGenerator = beanNameGenerator; } /** * Sets the underlying environment that should be used with the created application * context. * @param environment the environment */ public void setEnvironment(ConfigurableEnvironment environment) { this.environment = environment; } /** * Add additional items to the primary sources that will be added to an * ApplicationContext when {@link #run(String...)} is called. * <p> * The sources here are added to those that were set in the constructor. Most users * should consider using {@link #getSources()}/{@link #setSources(Set)} rather than * calling this method. * @param additionalPrimarySources the additional primary sources to add * @see #SpringApplication(Class...) * @see #getSources() * @see #setSources(Set) * @see #getAllSources() */ public void addPrimarySources(Collection<Class<?>> additionalPrimarySources) { this.primarySources.addAll(additionalPrimarySources); } /** * Returns a mutable set of the sources that will be added to an ApplicationContext * when {@link #run(String...)} is called. * <p> * Sources set here will be used in addition to any primary sources set in the * constructor. * @return the application sources. * @see #SpringApplication(Class...) * @see #getAllSources() */ public Set<String> getSources() { return this.sources; } /** * Set additional sources that will be used to create an ApplicationContext. A source * can be: a class name, package name, or an XML resource location. * <p> * Sources set here will be used in addition to any primary sources set in the * constructor. * @param sources the application sources to set * @see #SpringApplication(Class...) * @see #getAllSources() */ public void setSources(Set<String> sources) { Assert.notNull(sources, "Sources must not be null"); this.sources = new LinkedHashSet<>(sources); } /** * Return an immutable set of all the sources that will be added to an * ApplicationContext when {@link #run(String...)} is called. This method combines any * primary sources specified in the constructor with any additional ones that have * been {@link #setSources(Set) explicitly set}. * @return an immutable set of all sources */ public Set<Object> getAllSources() { Set<Object> allSources = new LinkedHashSet<>(); if (!CollectionUtils.isEmpty(this.primarySources)) { allSources.addAll(this.primarySources); } if (!CollectionUtils.isEmpty(this.sources)) { allSources.addAll(this.sources); } return Collections.unmodifiableSet(allSources); } /** * Sets the {@link ResourceLoader} that should be used when loading resources. * @param resourceLoader the resource loader */ public void setResourceLoader(ResourceLoader resourceLoader) { Assert.notNull(resourceLoader, "ResourceLoader must not be null"); this.resourceLoader = resourceLoader; } /** * Sets the type of Spring {@link ApplicationContext} that will be created. If not * specified defaults to {@link #DEFAULT_WEB_CONTEXT_CLASS} for web based applications * or {@link AnnotationConfigApplicationContext} for non web based applications. * @param applicationContextClass the context class to set */ public void setApplicationContextClass( Class<? extends ConfigurableApplicationContext> applicationContextClass) { this.applicationContextClass = applicationContextClass; if (!isWebApplicationContext(applicationContextClass)) { this.webApplicationType = WebApplicationType.NONE; } } private boolean isWebApplicationContext(Class<?> applicationContextClass) { try { return WebApplicationContext.class.isAssignableFrom(applicationContextClass); } catch (NoClassDefFoundError ex) { return false; } } /** * Sets the {@link ApplicationContextInitializer} that will be applied to the Spring * {@link ApplicationContext}. * @param initializers the initializers to set */ public void setInitializers( Collection<? extends ApplicationContextInitializer<?>> initializers) { this.initializers = new ArrayList<>(); this.initializers.addAll(initializers); } /** * Add {@link ApplicationContextInitializer}s to be applied to the Spring * {@link ApplicationContext}. * @param initializers the initializers to add */ public void addInitializers(ApplicationContextInitializer<?>... initializers) { this.initializers.addAll(Arrays.asList(initializers)); } /** * Returns read-only ordered Set of the {@link ApplicationContextInitializer}s that * will be applied to the Spring {@link ApplicationContext}. * @return the initializers */ public Set<ApplicationContextInitializer<?>> getInitializers() { return asUnmodifiableOrderedSet(this.initializers); } /** * Sets the {@link ApplicationListener}s that will be applied to the SpringApplication * and registered with the {@link ApplicationContext}. * @param listeners the listeners to set */ public void setListeners(Collection<? extends ApplicationListener<?>> listeners) { this.listeners = new ArrayList<>(); this.listeners.addAll(listeners); } /** * Add {@link ApplicationListener}s to be applied to the SpringApplication and * registered with the {@link ApplicationContext}. * @param listeners the listeners to add */ public void addListeners(ApplicationListener<?>... listeners) { this.listeners.addAll(Arrays.asList(listeners)); } /** * Returns read-only ordered Set of the {@link ApplicationListener}s that will be * applied to the SpringApplication and registered with the {@link ApplicationContext} * . * @return the listeners */ public Set<ApplicationListener<?>> getListeners() { return asUnmodifiableOrderedSet(this.listeners); } /** * Static helper that can be used to run a {@link SpringApplication} from the * specified source using default settings. * @param primarySource the primary source to load * @param args the application arguments (usually passed from a Java main method) * @return the running {@link ApplicationContext} */ public static ConfigurableApplicationContext run(Class<?> primarySource, String... args) { return run(new Class<?>[] { primarySource }, args); } /** * Static helper that can be used to run a {@link SpringApplication} from the * specified sources using default settings and user supplied arguments. * @param primarySources the primary sources to load * @param args the application arguments (usually passed from a Java main method) * @return the running {@link ApplicationContext} */ public static ConfigurableApplicationContext run(Class<?>[] primarySources, String[] args) { return new SpringApplication(primarySources).run(args); } /** * A basic main that can be used to launch an application. This method is useful when * application sources are defined via a {@literal --spring.main.sources} command line * argument. * <p> * Most developers will want to define their own main method and call the * {@link #run(Class, String...) run} method instead. * @param args command line arguments * @throws Exception if the application cannot be started * @see SpringApplication#run(Class[], String[]) * @see SpringApplication#run(Class, String...) */ public static void main(String[] args) throws Exception { SpringApplication.run(new Class<?>[0], args); } /** * Static helper that can be used to exit a {@link SpringApplication} and obtain a * code indicating success (0) or otherwise. Does not throw exceptions but should * print stack traces of any encountered. Applies the specified * {@link ExitCodeGenerator} in addition to any Spring beans that implement * {@link ExitCodeGenerator}. In the case of multiple exit codes the highest value * will be used (or if all values are negative, the lowest value will be used) * @param context the context to close if possible * @param exitCodeGenerators exist code generators * @return the outcome (0 if successful) */ public static int exit(ApplicationContext context, ExitCodeGenerator... exitCodeGenerators) { Assert.notNull(context, "Context must not be null"); int exitCode = 0; try { try { ExitCodeGenerators generators = new ExitCodeGenerators(); Collection<ExitCodeGenerator> beans = context .getBeansOfType(ExitCodeGenerator.class).values(); generators.addAll(exitCodeGenerators); generators.addAll(beans); exitCode = generators.getExitCode(); if (exitCode != 0) { context.publishEvent(new ExitCodeEvent(context, exitCode)); } } finally { close(context); } } catch (Exception ex) { ex.printStackTrace(); exitCode = (exitCode == 0 ? 1 : exitCode); } return exitCode; } private static void close(ApplicationContext context) { if (context instanceof ConfigurableApplicationContext) { ConfigurableApplicationContext closable = (ConfigurableApplicationContext) context; closable.close(); } } private static <E> Set<E> asUnmodifiableOrderedSet(Collection<E> elements) { List<E> list = new ArrayList<>(); list.addAll(elements); list.sort(AnnotationAwareOrderComparator.INSTANCE); return new LinkedHashSet<>(list); } }
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.uberfire.annotations.processors; import java.io.FileNotFoundException; import java.util.List; import javax.tools.Diagnostic; import javax.tools.JavaFileObject; import org.junit.Test; import static org.junit.Assert.*; /** * Tests for Screen related class generation */ public class WorkbenchScreenProcessorTest extends AbstractProcessorTest { @Test public void testNoWorkbenchScreenAnnotation() throws FileNotFoundException { final Result result = new Result(); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), "org/uberfire/annotations/processors/WorkbenchScreenTest1" ); assertSuccessfulCompilation( diagnostics ); assertNull( result.getActualCode() ); } @Test public void testWorkbenchScreenMissingViewAnnotation() { final Result result = new Result(); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), "org/uberfire/annotations/processors/WorkbenchScreenTest2" ); assertFailedCompilation( diagnostics ); assertCompilationError( diagnostics, "org.uberfire.annotations.processors.WorkbenchScreenTest2Activity: The WorkbenchScreen must either extend IsWidget or provide a @WorkbenchPartView annotated method to return a com.google.gwt.user.client.ui.IsWidget." ); assertNull( result.getActualCode() ); } @Test public void testWorkbenchScreenHasViewAnnotationMissingTitleAnnotation() { final Result result = new Result(); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), "org/uberfire/annotations/processors/WorkbenchScreenTest3" ); assertFailedCompilation( diagnostics ); assertCompilationError( diagnostics, "org.uberfire.annotations.processors.WorkbenchScreenTest3Activity: The WorkbenchScreen must provide a @WorkbenchPartTitle annotated method to return a java.lang.String." ); assertNull( result.getActualCode() ); } @Test public void testWorkbenchScreenMissingViewAnnotationHasTitleAnnotation() { final Result result = new Result(); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), "org/uberfire/annotations/processors/WorkbenchScreenTest4" ); assertFailedCompilation( diagnostics ); assertCompilationError( diagnostics, "org.uberfire.annotations.processors.WorkbenchScreenTest4Activity: The WorkbenchScreen must either extend IsWidget or provide a @WorkbenchPartView annotated method to return a com.google.gwt.user.client.ui.IsWidget." ); assertNull( result.getActualCode() ); } @Test public void testWorkbenchScreenHasViewAnnotationAndHasTitleAnnotation() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest5"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest5.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenExtendsIsWidget() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest6"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest6.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasViewAnnotationAndExtendsIsWidget() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest7"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest7.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertCompilationWarning( diagnostics, "The WorkbenchScreen both extends com.google.gwt.user.client.ui.IsWidget and provides a @WorkbenchPartView annotated method. The annotated method will take precedence." ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenAllAnnotations() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest8"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest8.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenWorkbenchMenuAnnotationCorrectReturnType() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest9"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest9.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenWorkbenchMenuAnnotationWrongReturnType() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest10"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest10.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenOnStart0Parameter() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest11"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest11.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenOnStart1Parameter() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest12"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest12.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenOnStartMultipleMethods() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest13"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest13.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertCompilationWarning( diagnostics, "The WorkbenchScreen has methods for both @OnStartup() and @OnStartup(Place). Method @OnStartup(Place) will take precedence." ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenWorkbenchToolBarAnnotationCorrectReturnType() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest14"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest14.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenWorkbenchToolBarAnnotationWrongReturnType() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest15"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest15.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleWidget() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest16"; final Result result = new Result(); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertCompilationError( diagnostics, "org.uberfire.annotations.processors.WorkbenchScreenTest16Activity: The WorkbenchScreen must provide a @WorkbenchPartTitle annotated method to return a java.lang.String." ); assertNull( result.getActualCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidget() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest17"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest17.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithPreferredWidth() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest18"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest18.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithNegativePreferredWidth() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest19"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest19.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithPreferredHeight() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest20"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest20.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithNegativePreferredHeight() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest21"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest21.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithPreferredWidthAndHeight() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest22"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest22.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } @Test public void testWorkbenchScreenHasTitleAndTitleWidgetWithNegativePreferredWidthAndHeight() throws FileNotFoundException { final String pathCompilationUnit = "org/uberfire/annotations/processors/WorkbenchScreenTest23"; final String pathExpectedResult = "org/uberfire/annotations/processors/expected/WorkbenchScreenTest23.expected"; final Result result = new Result(); result.setExpectedCode( getExpectedSourceCode( pathExpectedResult ) ); final List<Diagnostic<? extends JavaFileObject>> diagnostics = compile( new WorkbenchScreenProcessor( new GenerationCompleteCallback() { @Override public void generationComplete( final String code ) { result.setActualCode( code ); } } ), pathCompilationUnit ); assertSuccessfulCompilation( diagnostics ); assertNotNull( result.getActualCode() ); assertNotNull( result.getExpectedCode() ); assertEquals( result.getActualCode(), result.getExpectedCode() ); } }
package com.u2ware.springfield.repository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessorFactory; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.data.domain.Sort; import org.springframework.data.repository.query.parser.Part; import org.springframework.data.repository.query.parser.Part.Type; import org.springframework.data.repository.query.parser.PartTree; import org.springframework.data.repository.query.parser.PartTree.OrPart; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; import com.mysema.query.types.Predicate; import com.mysema.query.types.expr.BooleanExpression; import com.mysema.query.types.path.BooleanPath; import com.mysema.query.types.path.ComparablePath; import com.mysema.query.types.path.PathBuilder; import com.mysema.query.types.path.StringPath; public class QueryMethodPredicate { private static final Logger logger = LoggerFactory.getLogger(QueryMethodPredicate.class); protected final Predicate predicate; protected final Sort sort; public QueryMethodPredicate(Class<?> entityClass, PathBuilder<?> builder, Object param){ String queryMethodName = quessQueryMethodName(param); PartTree partTree = new PartTree(queryMethodName, entityClass); BeanWrapper paramWrapper = PropertyAccessorFactory.forBeanPropertyAccess(param); this.predicate = createPredicate(partTree, builder, paramWrapper); this.sort = partTree.getSort(); } public Predicate getPredicate(){ return predicate; } public Sort getSort(){ return sort; } public Sort getSort(Sort... sorts){ Sort result = sort; for(Sort s : sorts){ result = result != null ? result.and(s) : s; } return result; } protected String quessQueryMethodName(Object query){ Class<?> beanClass = query.getClass(); String queryMethodName = ClassUtils.getShortNameAsProperty(beanClass); QueryMethod queryMethod = AnnotationUtils.findAnnotation(beanClass, QueryMethod.class); if(queryMethod != null && StringUtils.hasText(queryMethod.value())){ queryMethodName = queryMethod.value(); } if(! queryMethodName.toLowerCase().startsWith("find")){ queryMethodName = "findBy"; } logger.info(query.getClass() +" quessQueryMethodName is "+queryMethodName); return queryMethodName; } ///////////////////////////////////////////////////// // ///////////////////////////////////////////////////// private BooleanExpression createPredicate(PartTree tree, PathBuilder<?> builder, BeanWrapper paramWrapper) { BooleanExpression base = null; for (OrPart node : tree) { //logger.debug("OrPart : "+node.getClass()); BooleanExpression criteria = null; for (Part part : node) { //logger.debug("Part "+part.getClass()); if(part.getProperty() != null){ BooleanExpression newCriteria = create(part, builder, paramWrapper); if(newCriteria != null){ //logger.debug("ok...."); criteria = criteria == null ? newCriteria : and(criteria, newCriteria, builder, paramWrapper); } } } base = base == null ? criteria : or(base, criteria, builder, paramWrapper); } //logger.debug("base "+base); return base; } private BooleanExpression and(BooleanExpression base, BooleanExpression criteria, PathBuilder<?> builder, BeanWrapper paramWrapper) { return base.and(criteria); } private BooleanExpression or(BooleanExpression base, BooleanExpression criteria, PathBuilder<?> builder, BeanWrapper paramWrapper) { return base.or(criteria); } @SuppressWarnings("unchecked") private BooleanExpression create(Part part, PathBuilder<?> builder, BeanWrapper paramWrapper){ Type type = part.getType(); String name = part.getProperty().getSegment(); if(Type.BETWEEN.equals(type)){ Comparable<?> min = (Comparable<?>)paramWrapper.getPropertyValue(name+"Min"); Comparable<?> max = (Comparable<?>)paramWrapper.getPropertyValue(name+"Max"); if(min != null && max != null){ logger.info("set query parameter : "+name+"Min"); logger.info("set query parameter : "+name+"Max"); ComparablePath<Comparable<?>> property = builder.getComparable(name, paramWrapper.getPropertyType(name+"Min")); return property.between(min, max); } }else if(Type.AFTER.equals(type)){ }else if(Type.GREATER_THAN.equals(type)){ Comparable<?> greaterThan = (Comparable<?>)paramWrapper.getPropertyValue(name); if(greaterThan != null){ logger.info("set query parameter : "+name); ComparablePath<Comparable<?>> property = builder.getComparable(name, paramWrapper.getPropertyType(name)); return property.gt(greaterThan); } }else if(Type.GREATER_THAN_EQUAL.equals(type)){ Comparable<?> greaterThan = (Comparable<?>)paramWrapper.getPropertyValue(name); if(greaterThan != null){ logger.info("set query parameter : "+name); ComparablePath<Comparable<?>> property = builder.getComparable(name, paramWrapper.getPropertyType(name)); return property.goe(greaterThan); } }else if(Type.BEFORE.equals(type)){ }else if(Type.LESS_THAN.equals(type)){ Comparable<?> lessThan = (Comparable<?>)paramWrapper.getPropertyValue(name); if(lessThan != null){ logger.info("set query parameter : "+name); ComparablePath<Comparable<?>> property = builder.getComparable(name, paramWrapper.getPropertyType(name)); return property.lt(lessThan); } }else if(Type.LESS_THAN_EQUAL.equals(type)){ Comparable<?> lessThan = (Comparable<?>)paramWrapper.getPropertyValue(name); if(lessThan != null){ logger.info("set query parameter : "+name); ComparablePath<Comparable<?>> property = builder.getComparable(name, paramWrapper.getPropertyType(name)); return property.loe(lessThan); } }else if(Type.IS_NULL.equals(type)){ PathBuilder<?> property = builder.get(name, paramWrapper.getPropertyType(name)); return property.isNull(); }else if(Type.IS_NOT_NULL.equals(type)){ PathBuilder<?> property = builder.get(name, paramWrapper.getPropertyType(name)); return property.isNotNull(); }else if(Type.NOT_IN.equals(type)){ }else if(Type.IN.equals(type)){ }else if(Type.STARTING_WITH.equals(type)){ Object likeValue = paramWrapper.getPropertyValue(name); if(likeValue != null){ logger.info("set query parameter : "+name); StringPath property = builder.getString(name); return property.like(likeValue.toString()+"%"); } }else if(Type.ENDING_WITH.equals(type)){ Object likeValue = paramWrapper.getPropertyValue(name); if(likeValue != null){ logger.info("set query parameter : "+name); StringPath property = builder.getString(name); return property.like("%"+likeValue.toString()); } }else if(Type.CONTAINING.equals(type)){ Object likeValue = paramWrapper.getPropertyValue(name); if(likeValue != null){ logger.info("set query parameter : "+name); StringPath property = builder.getString(name); return property.like("%"+likeValue.toString()+"%"); } }else if(Type.LIKE.equals(type)){ Object likeValue = paramWrapper.getPropertyValue(name); if(likeValue != null){ logger.info("set query parameter : "+name); StringPath property = builder.getString(name); return property.like(likeValue.toString()); } }else if(Type.NOT_LIKE.equals(type)){ Object likeValue = paramWrapper.getPropertyValue(name); if(likeValue != null){ logger.info("set query parameter : "+name); StringPath property = builder.getString(name); return property.notLike(likeValue.toString()); } }else if(Type.TRUE.equals(type)){ Object trueValue = paramWrapper.getPropertyValue(name); if(trueValue != null){ logger.info("set query parameter : "+name); BooleanPath property = builder.getBoolean(name); return property.isTrue(); } }else if(Type.FALSE.equals(type)){ Object trueValue = paramWrapper.getPropertyValue(name); if(trueValue != null){ logger.info("set query parameter : "+name); BooleanPath property = builder.getBoolean(name); return property.isFalse(); } }else if(Type.SIMPLE_PROPERTY.equals(type)){ Object equalValue = paramWrapper.getPropertyValue(name); if(equalValue != null){ PathBuilder<Object> property = builder.get(name, paramWrapper.getPropertyType(name)); logger.info("set query parameter : "+name); return property.eq(equalValue); } }else if(Type.NEGATING_SIMPLE_PROPERTY.equals(type)){ Object equalValue = paramWrapper.getPropertyValue(name); if(equalValue != null){ PathBuilder<Object> property = builder.get(name, paramWrapper.getPropertyType(name)); logger.info("set query parameter : "+name); return property.eq(equalValue).not(); } }else{ throw new IllegalArgumentException("Unsupported keyword " + part.getType()); } return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.config.impl; import java.io.File; import org.apache.activemq.artemis.ArtemisConstants; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.config.ha.LiveOnlyPolicyConfiguration; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.RandomUtil; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class ConfigurationImplTest extends ActiveMQTestBase { protected Configuration conf; @Test public void testDefaults() { Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultScheduledThreadPoolMaxSize(), conf.getScheduledThreadPoolMaxSize()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultSecurityInvalidationInterval(), conf.getSecurityInvalidationInterval()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultSecurityEnabled(), conf.isSecurityEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultBindingsDirectory(), conf.getBindingsDirectory()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultCreateBindingsDir(), conf.isCreateBindingsDir()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalDir(), conf.getJournalDirectory()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultCreateJournalDir(), conf.isCreateJournalDir()); Assert.assertEquals(ConfigurationImpl.DEFAULT_JOURNAL_TYPE, conf.getJournalType()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultJournalSyncTransactional(), conf.isJournalSyncTransactional()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultJournalSyncNonTransactional(), conf.isJournalSyncNonTransactional()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalFileSize(), conf.getJournalFileSize()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalMinFiles(), conf.getJournalMinFiles()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalMaxIoAio(), conf.getJournalMaxIO_AIO()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalMaxIoNio(), conf.getJournalMaxIO_NIO()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultWildcardRoutingEnabled(), conf.isWildcardRoutingEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultTransactionTimeout(), conf.getTransactionTimeout()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMessageExpiryScanPeriod(), conf.getMessageExpiryScanPeriod()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMessageExpiryThreadPriority(), conf.getMessageExpiryThreadPriority()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultTransactionTimeoutScanPeriod(), conf.getTransactionTimeoutScanPeriod()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultManagementAddress(), conf.getManagementAddress()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultManagementNotificationAddress(), conf.getManagementNotificationAddress()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultClusterUser(), conf.getClusterUser()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultClusterPassword(), conf.getClusterPassword()); // OK Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultPersistenceEnabled(), conf.isPersistenceEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultPersistDeliveryCountBeforeDelivery(), conf.isPersistDeliveryCountBeforeDelivery()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultFileDeployerScanPeriod(), conf.getFileDeployerScanPeriod()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultThreadPoolMaxSize(), conf.getThreadPoolMaxSize()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultJmxManagementEnabled(), conf.isJMXManagementEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultConnectionTtlOverride(), conf.getConnectionTTLOverride()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultAsyncConnectionExecutionEnabled(), conf.isAsyncConnectionExecutionEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultPagingDir(), conf.getPagingDirectory()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultLargeMessagesDir(), conf.getLargeMessagesDirectory()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalCompactPercentage(), conf.getJournalCompactPercentage()); Assert.assertEquals(ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_AIO, conf.getJournalBufferTimeout_AIO()); Assert.assertEquals(ArtemisConstants.DEFAULT_JOURNAL_BUFFER_TIMEOUT_NIO, conf.getJournalBufferTimeout_NIO()); Assert.assertEquals(ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_AIO, conf.getJournalBufferSize_AIO()); Assert.assertEquals(ArtemisConstants.DEFAULT_JOURNAL_BUFFER_SIZE_NIO, conf.getJournalBufferSize_NIO()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultJournalLogWriteRate(), conf.isLogJournalWriteRate()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultJournalPerfBlastPages(), conf.getJournalPerfBlastPages()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultMessageCounterEnabled(), conf.isMessageCounterEnabled()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMessageCounterMaxDayHistory(), conf.getMessageCounterMaxDayHistory()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMessageCounterSamplePeriod(), conf.getMessageCounterSamplePeriod()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultIdCacheSize(), conf.getIDCacheSize()); Assert.assertEquals(ActiveMQDefaultConfiguration.isDefaultPersistIdCache(), conf.isPersistIDCache()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultServerDumpInterval(), conf.getServerDumpInterval()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMemoryWarningThreshold(), conf.getMemoryWarningThreshold()); Assert.assertEquals(ActiveMQDefaultConfiguration.getDefaultMemoryMeasureInterval(), conf.getMemoryMeasureInterval()); } @Test public void testSetGetAttributes() throws Exception { for (int j = 0; j < 100; j++) { int i = RandomUtil.randomInt(); conf.setScheduledThreadPoolMaxSize(i); Assert.assertEquals(i, conf.getScheduledThreadPoolMaxSize()); long l = RandomUtil.randomLong(); conf.setSecurityInvalidationInterval(l); Assert.assertEquals(l, conf.getSecurityInvalidationInterval()); boolean b = RandomUtil.randomBoolean(); conf.setSecurityEnabled(b); Assert.assertEquals(b, conf.isSecurityEnabled()); String s = RandomUtil.randomString(); conf.setBindingsDirectory(s); Assert.assertEquals(s, conf.getBindingsDirectory()); b = RandomUtil.randomBoolean(); conf.setCreateBindingsDir(b); Assert.assertEquals(b, conf.isCreateBindingsDir()); s = RandomUtil.randomString(); conf.setJournalDirectory(s); Assert.assertEquals(s, conf.getJournalDirectory()); b = RandomUtil.randomBoolean(); conf.setCreateJournalDir(b); Assert.assertEquals(b, conf.isCreateJournalDir()); i = RandomUtil.randomInt() % 2; JournalType journal = i == 0 ? JournalType.ASYNCIO : JournalType.NIO; conf.setJournalType(journal); Assert.assertEquals(journal, conf.getJournalType()); b = RandomUtil.randomBoolean(); conf.setJournalSyncTransactional(b); Assert.assertEquals(b, conf.isJournalSyncTransactional()); b = RandomUtil.randomBoolean(); conf.setJournalSyncNonTransactional(b); Assert.assertEquals(b, conf.isJournalSyncNonTransactional()); i = RandomUtil.randomInt(); conf.setJournalFileSize(i); Assert.assertEquals(i, conf.getJournalFileSize()); i = RandomUtil.randomInt(); conf.setJournalMinFiles(i); Assert.assertEquals(i, conf.getJournalMinFiles()); i = RandomUtil.randomInt(); conf.setJournalMaxIO_AIO(i); Assert.assertEquals(i, conf.getJournalMaxIO_AIO()); i = RandomUtil.randomInt(); conf.setJournalMaxIO_NIO(i); Assert.assertEquals(i, conf.getJournalMaxIO_NIO()); s = RandomUtil.randomString(); conf.setManagementAddress(new SimpleString(s)); Assert.assertEquals(s, conf.getManagementAddress().toString()); i = RandomUtil.randomInt(); conf.setMessageExpiryThreadPriority(i); Assert.assertEquals(i, conf.getMessageExpiryThreadPriority()); l = RandomUtil.randomLong(); conf.setMessageExpiryScanPeriod(l); Assert.assertEquals(l, conf.getMessageExpiryScanPeriod()); b = RandomUtil.randomBoolean(); conf.setPersistDeliveryCountBeforeDelivery(b); Assert.assertEquals(b, conf.isPersistDeliveryCountBeforeDelivery()); b = RandomUtil.randomBoolean(); conf.setEnabledAsyncConnectionExecution(b); Assert.assertEquals(b, conf.isAsyncConnectionExecutionEnabled()); b = RandomUtil.randomBoolean(); conf.setPersistenceEnabled(b); Assert.assertEquals(b, conf.isPersistenceEnabled()); b = RandomUtil.randomBoolean(); conf.setJMXManagementEnabled(b); Assert.assertEquals(b, conf.isJMXManagementEnabled()); l = RandomUtil.randomLong(); conf.setFileDeployerScanPeriod(l); Assert.assertEquals(l, conf.getFileDeployerScanPeriod()); l = RandomUtil.randomLong(); conf.setConnectionTTLOverride(l); Assert.assertEquals(l, conf.getConnectionTTLOverride()); i = RandomUtil.randomInt(); conf.setThreadPoolMaxSize(i); Assert.assertEquals(i, conf.getThreadPoolMaxSize()); SimpleString ss = RandomUtil.randomSimpleString(); conf.setManagementNotificationAddress(ss); Assert.assertEquals(ss, conf.getManagementNotificationAddress()); s = RandomUtil.randomString(); conf.setClusterUser(s); Assert.assertEquals(s, conf.getClusterUser()); i = RandomUtil.randomInt(); conf.setIDCacheSize(i); Assert.assertEquals(i, conf.getIDCacheSize()); b = RandomUtil.randomBoolean(); conf.setPersistIDCache(b); Assert.assertEquals(b, conf.isPersistIDCache()); i = RandomUtil.randomInt(); conf.setJournalCompactMinFiles(i); Assert.assertEquals(i, conf.getJournalCompactMinFiles()); i = RandomUtil.randomInt(); conf.setJournalCompactPercentage(i); Assert.assertEquals(i, conf.getJournalCompactPercentage()); i = RandomUtil.randomInt(); conf.setJournalBufferSize_AIO(i); Assert.assertEquals(i, conf.getJournalBufferSize_AIO()); i = RandomUtil.randomInt(); conf.setJournalBufferTimeout_AIO(i); Assert.assertEquals(i, conf.getJournalBufferTimeout_AIO()); i = RandomUtil.randomInt(); conf.setJournalBufferSize_NIO(i); Assert.assertEquals(i, conf.getJournalBufferSize_NIO()); i = RandomUtil.randomInt(); conf.setJournalBufferTimeout_NIO(i); Assert.assertEquals(i, conf.getJournalBufferTimeout_NIO()); b = RandomUtil.randomBoolean(); conf.setLogJournalWriteRate(b); Assert.assertEquals(b, conf.isLogJournalWriteRate()); i = RandomUtil.randomInt(); conf.setJournalPerfBlastPages(i); Assert.assertEquals(i, conf.getJournalPerfBlastPages()); l = RandomUtil.randomLong(); conf.setServerDumpInterval(l); Assert.assertEquals(l, conf.getServerDumpInterval()); s = RandomUtil.randomString(); conf.setPagingDirectory(s); Assert.assertEquals(s, conf.getPagingDirectory()); s = RandomUtil.randomString(); conf.setLargeMessagesDirectory(s); Assert.assertEquals(s, conf.getLargeMessagesDirectory()); b = RandomUtil.randomBoolean(); conf.setWildcardRoutingEnabled(b); Assert.assertEquals(b, conf.isWildcardRoutingEnabled()); l = RandomUtil.randomLong(); conf.setTransactionTimeout(l); Assert.assertEquals(l, conf.getTransactionTimeout()); b = RandomUtil.randomBoolean(); conf.setMessageCounterEnabled(b); Assert.assertEquals(b, conf.isMessageCounterEnabled()); l = RandomUtil.randomPositiveLong(); conf.setMessageCounterSamplePeriod(l); Assert.assertEquals(l, conf.getMessageCounterSamplePeriod()); i = RandomUtil.randomInt(); conf.setMessageCounterMaxDayHistory(i); Assert.assertEquals(i, conf.getMessageCounterMaxDayHistory()); l = RandomUtil.randomLong(); conf.setTransactionTimeoutScanPeriod(l); Assert.assertEquals(l, conf.getTransactionTimeoutScanPeriod()); s = RandomUtil.randomString(); conf.setClusterPassword(s); Assert.assertEquals(s, conf.getClusterPassword()); } } @Test public void testGetSetInterceptors() { final String name1 = "uqwyuqywuy"; final String name2 = "yugyugyguyg"; conf.getIncomingInterceptorClassNames().add(name1); conf.getIncomingInterceptorClassNames().add(name2); Assert.assertTrue(conf.getIncomingInterceptorClassNames().contains(name1)); Assert.assertTrue(conf.getIncomingInterceptorClassNames().contains(name2)); Assert.assertFalse(conf.getIncomingInterceptorClassNames().contains("iijij")); } @Test public void testSerialize() throws Exception { boolean b = RandomUtil.randomBoolean(); conf.setHAPolicyConfiguration(new LiveOnlyPolicyConfiguration()); int i = RandomUtil.randomInt(); conf.setScheduledThreadPoolMaxSize(i); Assert.assertEquals(i, conf.getScheduledThreadPoolMaxSize()); long l = RandomUtil.randomLong(); conf.setSecurityInvalidationInterval(l); Assert.assertEquals(l, conf.getSecurityInvalidationInterval()); b = RandomUtil.randomBoolean(); conf.setSecurityEnabled(b); Assert.assertEquals(b, conf.isSecurityEnabled()); String s = RandomUtil.randomString(); conf.setBindingsDirectory(s); Assert.assertEquals(s, conf.getBindingsDirectory()); b = RandomUtil.randomBoolean(); conf.setCreateBindingsDir(b); Assert.assertEquals(b, conf.isCreateBindingsDir()); s = RandomUtil.randomString(); conf.setJournalDirectory(s); Assert.assertEquals(s, conf.getJournalDirectory()); b = RandomUtil.randomBoolean(); conf.setCreateJournalDir(b); Assert.assertEquals(b, conf.isCreateJournalDir()); i = RandomUtil.randomInt() % 2; JournalType journal = i == 0 ? JournalType.ASYNCIO : JournalType.NIO; conf.setJournalType(journal); Assert.assertEquals(journal, conf.getJournalType()); b = RandomUtil.randomBoolean(); conf.setJournalSyncTransactional(b); Assert.assertEquals(b, conf.isJournalSyncTransactional()); b = RandomUtil.randomBoolean(); conf.setJournalSyncNonTransactional(b); Assert.assertEquals(b, conf.isJournalSyncNonTransactional()); i = RandomUtil.randomInt(); conf.setJournalFileSize(i); Assert.assertEquals(i, conf.getJournalFileSize()); i = RandomUtil.randomInt(); conf.setJournalMinFiles(i); Assert.assertEquals(i, conf.getJournalMinFiles()); i = RandomUtil.randomInt(); conf.setJournalMaxIO_AIO(i); Assert.assertEquals(i, conf.getJournalMaxIO_AIO()); i = RandomUtil.randomInt(); conf.setJournalMaxIO_NIO(i); Assert.assertEquals(i, conf.getJournalMaxIO_NIO()); s = RandomUtil.randomString(); conf.setManagementAddress(new SimpleString(s)); Assert.assertEquals(s, conf.getManagementAddress().toString()); i = RandomUtil.randomInt(); conf.setMessageExpiryThreadPriority(i); Assert.assertEquals(i, conf.getMessageExpiryThreadPriority()); l = RandomUtil.randomLong(); conf.setMessageExpiryScanPeriod(l); Assert.assertEquals(l, conf.getMessageExpiryScanPeriod()); b = RandomUtil.randomBoolean(); conf.setPersistDeliveryCountBeforeDelivery(b); Assert.assertEquals(b, conf.isPersistDeliveryCountBeforeDelivery()); b = RandomUtil.randomBoolean(); conf.setEnabledAsyncConnectionExecution(b); Assert.assertEquals(b, conf.isAsyncConnectionExecutionEnabled()); b = RandomUtil.randomBoolean(); conf.setPersistenceEnabled(b); Assert.assertEquals(b, conf.isPersistenceEnabled()); b = RandomUtil.randomBoolean(); conf.setJMXManagementEnabled(b); Assert.assertEquals(b, conf.isJMXManagementEnabled()); l = RandomUtil.randomLong(); conf.setFileDeployerScanPeriod(l); Assert.assertEquals(l, conf.getFileDeployerScanPeriod()); l = RandomUtil.randomLong(); conf.setConnectionTTLOverride(l); Assert.assertEquals(l, conf.getConnectionTTLOverride()); i = RandomUtil.randomInt(); conf.setThreadPoolMaxSize(i); Assert.assertEquals(i, conf.getThreadPoolMaxSize()); SimpleString ss = RandomUtil.randomSimpleString(); conf.setManagementNotificationAddress(ss); Assert.assertEquals(ss, conf.getManagementNotificationAddress()); s = RandomUtil.randomString(); conf.setClusterUser(s); Assert.assertEquals(s, conf.getClusterUser()); i = RandomUtil.randomInt(); conf.setIDCacheSize(i); Assert.assertEquals(i, conf.getIDCacheSize()); b = RandomUtil.randomBoolean(); conf.setPersistIDCache(b); Assert.assertEquals(b, conf.isPersistIDCache()); i = RandomUtil.randomInt(); conf.setJournalCompactMinFiles(i); Assert.assertEquals(i, conf.getJournalCompactMinFiles()); i = RandomUtil.randomInt(); conf.setJournalCompactPercentage(i); Assert.assertEquals(i, conf.getJournalCompactPercentage()); i = RandomUtil.randomInt(); conf.setJournalBufferSize_AIO(i); Assert.assertEquals(i, conf.getJournalBufferSize_AIO()); i = RandomUtil.randomInt(); conf.setJournalBufferTimeout_AIO(i); Assert.assertEquals(i, conf.getJournalBufferTimeout_AIO()); i = RandomUtil.randomInt(); conf.setJournalBufferSize_NIO(i); Assert.assertEquals(i, conf.getJournalBufferSize_NIO()); i = RandomUtil.randomInt(); conf.setJournalBufferTimeout_NIO(i); Assert.assertEquals(i, conf.getJournalBufferTimeout_NIO()); b = RandomUtil.randomBoolean(); conf.setLogJournalWriteRate(b); Assert.assertEquals(b, conf.isLogJournalWriteRate()); i = RandomUtil.randomInt(); conf.setJournalPerfBlastPages(i); Assert.assertEquals(i, conf.getJournalPerfBlastPages()); l = RandomUtil.randomLong(); conf.setServerDumpInterval(l); Assert.assertEquals(l, conf.getServerDumpInterval()); s = RandomUtil.randomString(); conf.setPagingDirectory(s); Assert.assertEquals(s, conf.getPagingDirectory()); s = RandomUtil.randomString(); conf.setLargeMessagesDirectory(s); Assert.assertEquals(s, conf.getLargeMessagesDirectory()); b = RandomUtil.randomBoolean(); conf.setWildcardRoutingEnabled(b); Assert.assertEquals(b, conf.isWildcardRoutingEnabled()); l = RandomUtil.randomLong(); conf.setTransactionTimeout(l); Assert.assertEquals(l, conf.getTransactionTimeout()); b = RandomUtil.randomBoolean(); conf.setMessageCounterEnabled(b); Assert.assertEquals(b, conf.isMessageCounterEnabled()); l = RandomUtil.randomPositiveLong(); conf.setMessageCounterSamplePeriod(l); Assert.assertEquals(l, conf.getMessageCounterSamplePeriod()); i = RandomUtil.randomInt(); conf.setMessageCounterMaxDayHistory(i); Assert.assertEquals(i, conf.getMessageCounterMaxDayHistory()); l = RandomUtil.randomLong(); conf.setTransactionTimeoutScanPeriod(l); Assert.assertEquals(l, conf.getTransactionTimeoutScanPeriod()); s = RandomUtil.randomString(); conf.setClusterPassword(s); Assert.assertEquals(s, conf.getClusterPassword()); // This will use serialization to perform a deep copy of the object Configuration conf2 = conf.copy(); Assert.assertTrue(conf.equals(conf2)); } @Test public void testResolvePath() throws Throwable { // Validate that the resolve method will work even with artemis.instance doesn't exist String oldProperty = System.getProperty("artemis.instance"); try { System.setProperty("artemis.instance", "/tmp/" + RandomUtil.randomString()); ConfigurationImpl configuration = new ConfigurationImpl(); configuration.setJournalDirectory("./data-journal"); File journalLocation = configuration.getJournalLocation(); Assert.assertFalse("This path shouldn't resolve to a real folder", journalLocation.exists()); } finally { if (oldProperty == null) { System.clearProperty("artemis.instance"); } else { System.setProperty("artemis.instance", oldProperty); } } } @Test public void testAbsolutePath() throws Throwable { // Validate that the resolve method will work even with artemis.instance doesn't exist String oldProperty = System.getProperty("artemis.instance"); File tempFolder = null; try { System.setProperty("artemis.instance", "/tmp/" + RandomUtil.randomString()); tempFolder = File.createTempFile("journal-folder", ""); tempFolder.delete(); tempFolder = new File(tempFolder.getAbsolutePath()); tempFolder.mkdirs(); System.out.println("TempFolder = " + tempFolder.getAbsolutePath()); ConfigurationImpl configuration = new ConfigurationImpl(); configuration.setJournalDirectory(tempFolder.getAbsolutePath()); File journalLocation = configuration.getJournalLocation(); Assert.assertTrue(journalLocation.exists()); } finally { if (oldProperty == null) { System.clearProperty("artemis.instance"); } else { System.setProperty("artemis.instance", oldProperty); } if (tempFolder != null) { tempFolder.delete(); } } } @Override @Before public void setUp() throws Exception { super.setUp(); conf = createConfiguration(); } protected Configuration createConfiguration() throws Exception { return new ConfigurationImpl(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.microsoft.azure.spring.autoconfigure.aad; import com.nimbusds.jose.jwk.source.RemoteJWKSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.DeprecatedConfigurationProperty; import org.springframework.validation.annotation.Validated; import javax.annotation.PostConstruct; import javax.validation.constraints.NotEmpty; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; /** * Configuration properties for Azure Active Directory Authentication. */ @Validated @ConfigurationProperties("azure.activedirectory") public class AADAuthenticationProperties { private static final Logger LOGGER = LoggerFactory.getLogger(AADAuthenticationProperties.class); private static final String DEFAULT_SERVICE_ENVIRONMENT = "global"; private static final long DEFAULT_JWK_SET_CACHE_LIFESPAN = TimeUnit.MINUTES.toMillis(5); /** * Default UserGroup configuration. */ private UserGroupProperties userGroup = new UserGroupProperties(); /** * Azure service environment/region name, e.g., cn, global */ private String environment = DEFAULT_SERVICE_ENVIRONMENT; /** * Registered application ID in Azure AD. * Must be configured when OAuth2 authentication is done in front end */ private String clientId; /** * API Access Key of the registered application. * Must be configured when OAuth2 authentication is done in front end */ private String clientSecret; /** * App ID URI which might be used in the <code>"aud"</code> claim of an <code>id_token</code>. */ private String appIdUri; /** * Connection Timeout for the JWKSet Remote URL call. */ private int jwtConnectTimeout = RemoteJWKSet.DEFAULT_HTTP_CONNECT_TIMEOUT; /* milliseconds */ /** * Read Timeout for the JWKSet Remote URL call. */ private int jwtReadTimeout = RemoteJWKSet.DEFAULT_HTTP_READ_TIMEOUT; /* milliseconds */ /** * Size limit in Bytes of the JWKSet Remote URL call. */ private int jwtSizeLimit = RemoteJWKSet.DEFAULT_HTTP_SIZE_LIMIT; /* bytes */ /** * The lifespan of the cached JWK set before it expires, default is 5 minutes. */ private long jwkSetCacheLifespan = DEFAULT_JWK_SET_CACHE_LIFESPAN; /** * Azure Tenant ID. */ private String tenantId; /** * If Telemetry events should be published to Azure AD. */ private boolean allowTelemetry = true; /** * If <code>true</code> activates the stateless auth filter {@link AADAppRoleStatelessAuthenticationFilter}. * The default is <code>false</code> which activates {@link AADAuthenticationFilter}. */ private Boolean sessionStateless = false; @DeprecatedConfigurationProperty( reason = "Configuration moved to UserGroup class to keep UserGroup properties together", replacement = "azure.activedirectory.user-group.allowed-groups") public List<String> getActiveDirectoryGroups() { return userGroup.getAllowedGroups(); } /** * Properties dedicated to changing the behavior of how the groups are mapped from the Azure AD response. Depending * on the graph API used the object will not be the same. */ public static class UserGroupProperties { /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroups = new ArrayList<>(); /** * Key of the JSON Node to get from the Azure AD response object that will be checked to contain the {@code * azure.activedirectory.user-group.value} to signify that this node is a valid {@code UserGroup}. */ @NotEmpty private String key = "objectType"; /** * Value of the JSON Node identified by the {@code azure.activedirectory.user-group.key} to validate the JSON * Node is a UserGroup. */ @NotEmpty private String value = Constants.OBJECT_TYPE_GROUP; /** * Key of the JSON Node containing the Azure Object ID for the {@code UserGroup}. */ @NotEmpty private String objectIDKey = "objectId"; public List<String> getAllowedGroups() { return allowedGroups; } public void setAllowedGroups(List<String> allowedGroups) { this.allowedGroups = allowedGroups; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getObjectIDKey() { return objectIDKey; } public void setObjectIDKey(String objectIDKey) { this.objectIDKey = objectIDKey; } @Override public String toString() { return "UserGroupProperties{" + "allowedGroups=" + allowedGroups + ", key='" + key + '\'' + ", value='" + value + '\'' + ", objectIDKey='" + objectIDKey + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } UserGroupProperties that = (UserGroupProperties) o; return Objects.equals(allowedGroups, that.allowedGroups) && Objects.equals(key, that.key) && Objects.equals(value, that.value) && Objects.equals(objectIDKey, that.objectIDKey); } @Override public int hashCode() { return Objects.hash(allowedGroups, key, value, objectIDKey); } } /** * Validates at least one of the user group properties are populated. */ @PostConstruct public void validateUserGroupProperties() { if (this.sessionStateless) { if (!this.getUserGroup().getAllowedGroups().isEmpty()) { LOGGER.warn("Group names are not supported if you set 'sessionSateless' to 'true'."); } } else if (this.getUserGroup().getAllowedGroups().isEmpty()) { throw new IllegalStateException("One of the User Group Properties must be populated. " + "Please populate azure.activedirectory.user-group.allowed-groups"); } } public UserGroupProperties getUserGroup() { return userGroup; } public void setUserGroup(UserGroupProperties userGroup) { this.userGroup = userGroup; } public String getEnvironment() { return environment; } public void setEnvironment(String environment) { this.environment = environment; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getClientSecret() { return clientSecret; } public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } @Deprecated public void setActiveDirectoryGroups(List<String> activeDirectoryGroups) { this.userGroup.setAllowedGroups(activeDirectoryGroups); } public String getAppIdUri() { return appIdUri; } public void setAppIdUri(String appIdUri) { this.appIdUri = appIdUri; } public int getJwtConnectTimeout() { return jwtConnectTimeout; } public void setJwtConnectTimeout(int jwtConnectTimeout) { this.jwtConnectTimeout = jwtConnectTimeout; } public int getJwtReadTimeout() { return jwtReadTimeout; } public void setJwtReadTimeout(int jwtReadTimeout) { this.jwtReadTimeout = jwtReadTimeout; } public int getJwtSizeLimit() { return jwtSizeLimit; } public void setJwtSizeLimit(int jwtSizeLimit) { this.jwtSizeLimit = jwtSizeLimit; } public long getJwkSetCacheLifespan() { return jwkSetCacheLifespan; } public void setJwkSetCacheLifespan(long jwkSetCacheLifespan) { this.jwkSetCacheLifespan = jwkSetCacheLifespan; } public String getTenantId() { return tenantId; } public void setTenantId(String tenantId) { this.tenantId = tenantId; } public boolean isAllowTelemetry() { return allowTelemetry; } public void setAllowTelemetry(boolean allowTelemetry) { this.allowTelemetry = allowTelemetry; } public Boolean getSessionStateless() { return sessionStateless; } public void setSessionStateless(Boolean sessionStateless) { this.sessionStateless = sessionStateless; } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.upgradeTests.BasicSetup Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.upgradeTests; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import com.pivotal.gemfirexd.internal.iapi.services.io.DerbyIOException; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; import junit.framework.Test; import junit.framework.TestSuite; /** * Basic fixtures and setup for the upgrade test, not * tied to any specific release. */ public class BasicSetup extends UpgradeChange { public static Test suite() { TestSuite suite = new TestSuite("Upgrade basic setup"); suite.addTestSuite(BasicSetup.class); return suite; } public BasicSetup(String name) { super(name); } /** * Simple test of the old version from the meta data. */ public void testOldVersion() throws SQLException { switch (getPhase()) { case PH_CREATE: case PH_POST_SOFT_UPGRADE: DatabaseMetaData dmd = getConnection().getMetaData(); assertEquals("Old major (driver): ", getOldMajor(), dmd.getDriverMajorVersion()); assertEquals("Old minor (driver): ", getOldMinor(), dmd.getDriverMinorVersion()); assertEquals("Old major (database): ", getOldMajor(), dmd.getDatabaseMajorVersion()); assertEquals("Old minor (database): ", getOldMinor(), dmd.getDatabaseMinorVersion()); break; } } /** * Test general DML. Just execute some INSERT/UPDATE/DELETE * statements in all phases to see that generally the database works. * @throws SQLException */ public void testDML() throws SQLException { final int phase = getPhase(); Statement s = createStatement(); switch (phase) { case PH_CREATE: s.executeUpdate("CREATE TABLE PHASE" + "(id INT NOT NULL, ok INT)"); s.executeUpdate("CREATE TABLE TABLE1" + "(id INT NOT NULL PRIMARY KEY, name varchar(200))"); break; case PH_SOFT_UPGRADE: break; case PH_POST_SOFT_UPGRADE: break; case PH_HARD_UPGRADE: break; } s.close(); PreparedStatement ps = prepareStatement( "INSERT INTO PHASE(id) VALUES (?)"); ps.setInt(1, phase); ps.executeUpdate(); ps.close(); ps = prepareStatement("INSERT INTO TABLE1 VALUES (?, ?)"); for (int i = 1; i < 20; i++) { ps.setInt(1, i + (phase * 100)); ps.setString(2, "p" + phase + "i" + i); ps.executeUpdate(); } ps.close(); ps = prepareStatement("UPDATE TABLE1 set name = name || 'U' " + " where id = ?"); for (int i = 1; i < 20; i+=3) { ps.setInt(1, i + (phase * 100)); ps.executeUpdate(); } ps.close(); ps = prepareStatement("DELETE FROM TABLE1 where id = ?"); for (int i = 1; i < 20; i+=4) { ps.setInt(1, i + (phase * 100)); ps.executeUpdate(); } ps.close(); commit(); } /** * Make sure table created in soft upgrade mode can be * accessed after shutdown. DERBY-2931 * @throws SQLException */ public void testCreateTable() throws SQLException { Statement stmt = createStatement(); try { stmt.executeUpdate("DROP table t"); } catch (SQLException se) { // ignore table does not exist error on // on drop table. assertSQLState("42Y55",se ); } stmt.executeUpdate("CREATE TABLE T (I INT)"); TestConfiguration.getCurrent().shutdownDatabase(); stmt = createStatement(); ResultSet rs = stmt.executeQuery("SELECT * from t"); JDBC.assertEmpty(rs); rs.close(); } /** * Test table with index can be read after * shutdown DERBY-2931 * @throws SQLException */ public void testIndex() throws SQLException { Statement stmt = createStatement(); try { stmt.executeUpdate("DROP table ti"); } catch (SQLException se) { // ignore table does not exist error on // on drop table. assertSQLState("42Y55",se ); } stmt.executeUpdate("CREATE TABLE TI (I INT primary key not null)"); stmt.executeUpdate("INSERT INTO TI values(1)"); stmt.executeUpdate("INSERT INTO TI values(2)"); stmt.executeUpdate("INSERT INTO TI values(3)"); TestConfiguration.getCurrent().shutdownDatabase(); stmt = createStatement(); ResultSet rs = stmt.executeQuery("SELECT * from TI ORDER BY I"); JDBC.assertFullResultSet(rs, new String[][] {{"1"},{"2"},{"3"}}); rs.close(); } /** * Ensure that after hard upgrade (with the old version) * we can no longer connect to the database. */ public void noConnectionAfterHardUpgrade() { switch (getPhase()) { case PH_POST_HARD_UPGRADE: try { getConnection(); } catch (SQLException e) { // Check the innermost of the nested exceptions SQLException sqle = getLastSQLException(e); String sqlState = sqle.getSQLState(); // while beta, XSLAP is expected, if not beta, XSLAN if (!(sqlState.equals("XSLAP")) && !(sqlState.equals("XSLAN"))) fail("expected an error indicating no connection"); } break; } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.jsonoutput; import java.io.BufferedOutputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import org.apache.commons.vfs2.FileObject; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.pentaho.di.core.Const; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Converts input rows to one or more XML files. * * @author Matt * @since 14-jan-2006 */ public class JsonOutput extends BaseStep implements StepInterface { private static Class<?> PKG = JsonOutput.class; // for i18n purposes, needed by Translator2!! private JsonOutputMeta meta; private JsonOutputData data; private interface CompatibilityFactory { public void execute( Object[] row ) throws KettleException; } @SuppressWarnings( "unchecked" ) private class CompatibilityMode implements CompatibilityFactory { public void execute( Object[] row ) throws KettleException { for ( int i = 0; i < data.nrFields; i++ ) { JsonOutputField outputField = meta.getOutputFields()[i]; ValueMetaInterface v = data.inputRowMeta.getValueMeta( data.fieldIndexes[i] ); // Create a new object with specified fields JSONObject jo = new JSONObject(); switch ( v.getType() ) { case ValueMeta.TYPE_BOOLEAN: jo.put( outputField.getElementName(), data.inputRowMeta.getBoolean( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_INTEGER: jo.put( outputField.getElementName(), data.inputRowMeta.getInteger( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_NUMBER: jo.put( outputField.getElementName(), data.inputRowMeta.getNumber( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_BIGNUMBER: jo.put( outputField.getElementName(), data.inputRowMeta.getBigNumber( row, data.fieldIndexes[i] ) ); break; default: jo.put( outputField.getElementName(), data.inputRowMeta.getString( row, data.fieldIndexes[i] ) ); break; } data.ja.add( jo ); } data.nrRow++; if ( data.nrRowsInBloc > 0 ) { // System.out.println("data.nrRow%data.nrRowsInBloc = "+ data.nrRow%data.nrRowsInBloc); if ( data.nrRow % data.nrRowsInBloc == 0 ) { // We can now output an object // System.out.println("outputting the row."); outPutRow( row ); } } } } @SuppressWarnings( "unchecked" ) private class FixedMode implements CompatibilityFactory { public void execute( Object[] row ) throws KettleException { // Create a new object with specified fields JSONObject jo = new JSONObject(); for ( int i = 0; i < data.nrFields; i++ ) { JsonOutputField outputField = meta.getOutputFields()[i]; ValueMetaInterface v = data.inputRowMeta.getValueMeta( data.fieldIndexes[i] ); switch ( v.getType() ) { case ValueMeta.TYPE_BOOLEAN: jo.put( outputField.getElementName(), data.inputRowMeta.getBoolean( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_INTEGER: jo.put( outputField.getElementName(), data.inputRowMeta.getInteger( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_NUMBER: jo.put( outputField.getElementName(), data.inputRowMeta.getNumber( row, data.fieldIndexes[i] ) ); break; case ValueMeta.TYPE_BIGNUMBER: jo.put( outputField.getElementName(), data.inputRowMeta.getBigNumber( row, data.fieldIndexes[i] ) ); break; default: jo.put( outputField.getElementName(), data.inputRowMeta.getString( row, data.fieldIndexes[i] ) ); break; } } data.ja.add( jo ); data.nrRow++; if ( data.nrRowsInBloc > 0 ) { // System.out.println("data.nrRow%data.nrRowsInBloc = "+ data.nrRow%data.nrRowsInBloc); if ( data.nrRow % data.nrRowsInBloc == 0 ) { // We can now output an object // System.out.println("outputting the row."); outPutRow( row ); } } } } private CompatibilityFactory compatibilityFactory; public JsonOutput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); // Here we decide whether or not to build the structure in // compatible mode or fixed mode JsonOutputMeta jsonOutputMeta = (JsonOutputMeta) ( stepMeta.getStepMetaInterface() ); if ( jsonOutputMeta.isCompatibilityMode() ) { compatibilityFactory = new CompatibilityMode(); } else { compatibilityFactory = new FixedMode(); } } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (JsonOutputMeta) smi; data = (JsonOutputData) sdi; Object[] r = getRow(); // This also waits for a row to be finished. if ( r == null ) { // no more input to be expected... if ( !data.rowsAreSafe ) { // Let's output the remaining unsafe data outPutRow( r ); } setOutputDone(); return false; } if ( first ) { first = false; data.inputRowMeta = getInputRowMeta(); data.inputRowMetaSize = data.inputRowMeta.size(); if ( data.outputValue ) { data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); } // Cache the field name indexes // data.nrFields = meta.getOutputFields().length; data.fieldIndexes = new int[data.nrFields]; for ( int i = 0; i < data.nrFields; i++ ) { data.fieldIndexes[i] = data.inputRowMeta.indexOfValue( meta.getOutputFields()[i].getFieldName() ); if ( data.fieldIndexes[i] < 0 ) { throw new KettleException( BaseMessages.getString( PKG, "JsonOutput.Exception.FieldNotFound" ) ); } JsonOutputField field = meta.getOutputFields()[i]; field.setElementName( environmentSubstitute( field.getElementName() ) ); } } data.rowsAreSafe = false; compatibilityFactory.execute( r ); if ( data.writeToFile && !data.outputValue ) { putRow( data.inputRowMeta, r ); // in case we want it go further... incrementLinesOutput(); } return true; } @SuppressWarnings( "unchecked" ) private void outPutRow( Object[] rowData ) throws KettleStepException { // We can now output an object data.jg = new JSONObject(); data.jg.put( data.realBlocName, data.ja ); String value = data.jg.toJSONString(); if ( data.outputValue && data.outputRowMeta != null ) { Object[] outputRowData = RowDataUtil.addValueData( rowData, data.inputRowMetaSize, value ); incrementLinesOutput(); putRow( data.outputRowMeta, outputRowData ); } if ( data.writeToFile ) { // Open a file if ( !openNewFile() ) { throw new KettleStepException( BaseMessages.getString( PKG, "JsonOutput.Error.OpenNewFile", buildFilename() ) ); } // Write data to file try { data.writer.write( value ); } catch ( Exception e ) { throw new KettleStepException( BaseMessages.getString( PKG, "JsonOutput.Error.Writing" ), e ); } // Close file closeFile(); } // Data are safe data.rowsAreSafe = true; data.ja = new JSONArray(); } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (JsonOutputMeta) smi; data = (JsonOutputData) sdi; if ( super.init( smi, sdi ) ) { data.writeToFile = ( meta.getOperationType() != JsonOutputMeta.OPERATION_TYPE_OUTPUT_VALUE ); data.outputValue = ( meta.getOperationType() != JsonOutputMeta.OPERATION_TYPE_WRITE_TO_FILE ); if ( data.outputValue ) { // We need to have output field name if ( Const.isEmpty( environmentSubstitute( meta.getOutputValue() ) ) ) { logError( BaseMessages.getString( PKG, "JsonOutput.Error.MissingOutputFieldName" ) ); stopAll(); setErrors( 1 ); return false; } } if ( data.writeToFile ) { // We need to have output field name if ( !meta.isServletOutput() && Const.isEmpty( meta.getFileName() ) ) { logError( BaseMessages.getString( PKG, "JsonOutput.Error.MissingTargetFilename" ) ); stopAll(); setErrors( 1 ); return false; } if ( !meta.isDoNotOpenNewFileInit() ) { if ( !openNewFile() ) { logError( BaseMessages.getString( PKG, "JsonOutput.Error.OpenNewFile", buildFilename() ) ); stopAll(); setErrors( 1 ); return false; } } } data.realBlocName = Const.NVL( environmentSubstitute( meta.getJsonBloc() ), "" ); data.nrRowsInBloc = Const.toInt( environmentSubstitute( meta.getNrRowsInBloc() ), 0 ); return true; } return false; } public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (JsonOutputMeta) smi; data = (JsonOutputData) sdi; if ( data.ja != null ) { data.ja = null; } if ( data.jg != null ) { data.jg = null; } closeFile(); super.dispose( smi, sdi ); } private void createParentFolder( String filename ) throws KettleStepException { if ( !meta.isCreateParentFolder() ) { return; } // Check for parent folder FileObject parentfolder = null; try { // Get parent folder parentfolder = KettleVFS.getFileObject( filename, getTransMeta() ).getParent(); if ( !parentfolder.exists() ) { if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "JsonOutput.Error.ParentFolderNotExist", parentfolder.getName() ) ); } parentfolder.createFolder(); if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "JsonOutput.Log.ParentFolderCreated" ) ); } } } catch ( Exception e ) { throw new KettleStepException( BaseMessages.getString( PKG, "JsonOutput.Error.ErrorCreatingParentFolder", parentfolder.getName() ) ); } finally { if ( parentfolder != null ) { try { parentfolder.close(); } catch ( Exception ex ) { /* Ignore */ } } } } public boolean openNewFile() { if ( data.writer != null ) { return true; } boolean retval = false; try { if ( meta.isServletOutput() ) { data.writer = getTrans().getServletPrintWriter(); } else { String filename = buildFilename(); createParentFolder( filename ); if ( meta.AddToResult() ) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( filename, getTransMeta() ), getTransMeta().getName(), getStepname() ); resultFile.setComment( BaseMessages.getString( PKG, "JsonOutput.ResultFilenames.Comment" ) ); addResultFile( resultFile ); } OutputStream outputStream; OutputStream fos = KettleVFS.getOutputStream( filename, getTransMeta(), meta.isFileAppended() ); outputStream = fos; if ( !Const.isEmpty( meta.getEncoding() ) ) { data.writer = new OutputStreamWriter( new BufferedOutputStream( outputStream, 5000 ), environmentSubstitute( meta .getEncoding() ) ); } else { data.writer = new OutputStreamWriter( new BufferedOutputStream( outputStream, 5000 ) ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JsonOutput.FileOpened", filename ) ); } data.splitnr++; } retval = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JsonOutput.Error.OpeningFile", e.toString() ) ); } return retval; } public String buildFilename() { return meta.buildFilename( environmentSubstitute( meta.getFileName() ), getCopy(), data.splitnr ); } private boolean closeFile() { if ( data.writer == null ) { return true; } boolean retval = false; try { data.writer.close(); data.writer = null; retval = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JsonOutput.Error.ClosingFile", e.toString() ) ); setErrors( 1 ); retval = false; } return retval; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.LabelConverter; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.LabelListConverter; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.LabelMapConverter; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsConverter; import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode; import com.google.devtools.build.lib.analysis.config.FragmentOptions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaOptimizationMode; import com.google.devtools.build.lib.rules.java.JavaConfiguration.OneVersionEnforcementLevel; import com.google.devtools.common.options.EnumConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.TriState; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * Command-line options for building Java targets */ public class JavaOptions extends FragmentOptions { /** Converter for the --java_classpath option. */ public static class JavaClasspathModeConverter extends EnumConverter<JavaClasspathMode> { public JavaClasspathModeConverter() { super(JavaClasspathMode.class, "Java classpath reduction strategy"); } } /** * Converter for the --java_optimization_mode option. */ public static class JavaOptimizationModeConverter extends EnumConverter<JavaOptimizationMode> { public JavaOptimizationModeConverter() { super(JavaOptimizationMode.class, "Java optimization strategy"); } } /** Converter for the --java_optimization_mode option. */ public static class OneVersionEnforcementLevelConverter extends EnumConverter<OneVersionEnforcementLevel> { public OneVersionEnforcementLevelConverter() { super(OneVersionEnforcementLevel.class, "Enforcement level for Java One Version violations"); } } @Option( name = "javabase", defaultValue = "@bazel_tools//tools/jdk:jdk", category = "version", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "JAVABASE used for the JDK invoked by Blaze. This is the " + "java_runtime_suite which will be used to execute " + "external Java commands." ) public Label javaBase; @Option( name = "java_toolchain", defaultValue = "@bazel_tools//tools/jdk:toolchain", category = "version", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "The name of the toolchain rule for Java." ) public Label javaToolchain; @Option( name = "host_java_toolchain", defaultValue = "@bazel_tools//tools/jdk:toolchain", category = "version", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "The Java toolchain used to build tools that are executed during a build." ) public Label hostJavaToolchain; @Option( name = "host_javabase", defaultValue = "@bazel_tools//tools/jdk:jdk", converter = LabelConverter.class, category = "version", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "JAVABASE used for the host JDK. This is the java_runtime_suite which is used to execute " + "tools during a build." ) public Label hostJavaBase; @Option( name = "javacopt", allowMultiple = true, defaultValue = "", category = "flags", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Additional options to pass to javac." ) public List<String> javacOpts; @Option( name = "jvmopt", allowMultiple = true, defaultValue = "", category = "flags", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Additional options to pass to the Java VM. These options will get added to the " + "VM startup options of each java_binary target." ) public List<String> jvmOpts; @Option( name = "use_ijars", defaultValue = "true", category = "strategy", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "If enabled, this option causes Java compilation to use interface jars. " + "This will result in faster incremental compilation, " + "but error messages can be different." ) public boolean useIjars; @Deprecated @Option( name = "use_src_ijars", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public boolean useSourceIjars; @Option( name = "java_header_compilation", defaultValue = "true", category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Compile ijars directly from source.", oldName = "experimental_java_header_compilation" ) public boolean headerCompilation; // TODO(cushon): delete flag after removing from global .blazerc @Deprecated @Option( name = "experimental_optimize_header_compilation_annotation_processing", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "This flag is a noop and scheduled for removal." ) public boolean optimizeHeaderCompilationAnnotationProcessing; @Option( name = "java_deps", defaultValue = "true", category = "strategy", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Generate dependency information (for now, compile-time classpath) per Java target." ) public boolean javaDeps; @Option( name = "java_classpath", allowMultiple = false, defaultValue = "javabuilder", converter = JavaClasspathModeConverter.class, category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Enables reduced classpaths for Java compilations.", oldName = "experimental_java_classpath" ) public JavaClasspathMode javaClasspath; @Option( name = "java_debug", defaultValue = "null", category = "testing", expansion = { "--test_arg=--wrapper_script_flag=--debug", "--test_output=streamed", "--test_strategy=exclusive", "--test_timeout=9999", "--nocache_test_results" }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Causes the Java virtual machine of a java test to wait for a connection from a " + "JDWP-compliant debugger (such as jdb) before starting the test. Implies " + "-test_output=streamed." ) public Void javaTestDebug; @Option( name = "strict_java_deps", allowMultiple = false, defaultValue = "default", converter = StrictDepsConverter.class, category = "semantics", documentationCategory = OptionDocumentationCategory.INPUT_STRICTNESS, effectTags = {OptionEffectTag.BUILD_FILE_SEMANTICS, OptionEffectTag.EAGERNESS_TO_EXIT}, help = "If true, checks that a Java target explicitly declares all directly used " + "targets as dependencies.", oldName = "strict_android_deps" ) public StrictDepsMode strictJavaDeps; // TODO(bazel-team): This flag should ideally default to true (and eventually removed). We have // been accidentally supplying JUnit and Hamcrest deps to java_test targets indirectly via the // BazelTestRunner, and setting this flag to true fixes that behaviour. @Option( name = "explicit_java_test_deps", defaultValue = "false", category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Explicitly specify a dependency to JUnit or Hamcrest in a java_test instead of " + " accidentally obtaining from the TestRunner's deps. Only works for bazel right now." ) public boolean explicitJavaTestDeps; @Option( name = "experimental_testrunner", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Use the experimental test runner in bazel which runs the tests under a separate " + "classloader. We must set the --explicit_java_test_deps flag with this to ensure " + "the test targets have their dependencies right." ) public boolean experimentalTestRunner; @Option( name = "javabuilder_top", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String javaBuilderTop; @Option( name = "singlejar_top", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String singleJarTop; @Option( name = "genclass_top", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String genClassTop; @Option( name = "ijar_top", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String iJarTop; @Option( name = "java_langtools", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String javaLangtoolsJar; @Option( name = "javac_bootclasspath", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String javacBootclasspath; @Option( name = "javac_extdir", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op. Kept here for backwards compatibility." ) public String javacExtdir; @Option( name = "host_java_launcher", defaultValue = "null", converter = LabelConverter.class, category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "The Java launcher used by tools that are executed during a build." ) public Label hostJavaLauncher; @Option( name = "java_launcher", defaultValue = "null", converter = LabelConverter.class, category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "The Java launcher to use when building Java binaries. " + "The \"launcher\" attribute overrides this flag. " ) public Label javaLauncher; @Option( name = "proguard_top", defaultValue = "null", category = "version", converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Specifies which version of ProGuard to use for code removal when building a Java " + "binary." ) public Label proguard; @Option( name = "extra_proguard_specs", allowMultiple = true, defaultValue = "", // Ignored converter = LabelConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Additional Proguard specs that will be used for all Proguard invocations. Note that " + "using this option only has an effect when Proguard is used anyway." ) public List<Label> extraProguardSpecs; /** * Comma-separated list of Mnemonic=label pairs of optimizers to run in the given order, treating * {@code Proguard} specially by substituting in the relevant Proguard binary automatically. All * optimizers must understand the same flags as Proguard. */ @Option( name = "experimental_bytecode_optimizers", defaultValue = "Proguard", converter = LabelMapConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Do not use." ) public Map<String, Label> bytecodeOptimizers; @Option( name = "translations", defaultValue = "auto", category = "semantics", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Translate Java messages; bundle all translations into the jar " + "for each affected rule." ) public TriState bundleTranslations; @Option( name = "message_translations", defaultValue = "", category = "semantics", allowMultiple = true, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "The message translations used for translating messages in Java targets." ) public List<String> translationTargets; @Option( name = "check_constraint", allowMultiple = true, defaultValue = "", category = "checking", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Check the listed constraint." ) public List<String> checkedConstraints; @Option( name = "experimental_disable_jvm", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Disables the Jvm configuration entirely." ) public boolean disableJvm; @Option( name = "java_optimization_mode", defaultValue = "legacy", converter = JavaOptimizationModeConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Applies desired link-time optimizations to Java binaries and tests." ) public JavaOptimizationMode javaOptimizationMode; @Option( name = "legacy_bazel_java_test", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Use the legacy mode of Bazel for java_test." ) public boolean legacyBazelJavaTest; @Option( name = "strict_deps_java_protos", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.BUILD_FILE_SEMANTICS, OptionEffectTag.EAGERNESS_TO_EXIT}, help = "When 'strict-deps' is on, .java files that depend on classes not declared in their rule's " + "'deps' fail to build. In other words, it's forbidden to depend on classes obtained " + "transitively. When true, Java protos are strict regardless of their 'strict_deps' " + "attribute." ) public boolean strictDepsJavaProtos; @Option( name = "experimental_java_header_compilation_disable_javac_fallback", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "If --java_header_compilation is set, report diagnostics from turbine instead of falling " + " back to javac. Diagnostics will be produced more quickly, but may be less helpful." ) public boolean headerCompilationDisableJavacFallback; @Option( name = "experimental_one_version_enforcement", defaultValue = "OFF", converter = OneVersionEnforcementLevelConverter.class, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "When enabled, enforce that a java_binary rule can't contain more than one version " + "of the same class file on the classpath. This enforcement can break the build, or " + "can just result in warnings." ) public OneVersionEnforcementLevel enforceOneVersion; @Option( name = "one_version_enforcement_on_java_tests", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "When enabled, and with experimental_one_version_enforcement set to a non-NONE value," + " enforce one version on java_test targets. This flag can be disabled to improve" + " incremental test performance at the expense of missing potential one version" + " violations." ) public boolean enforceOneVersionOnJavaTests; @Option( name = "experimental_allow_runtime_deps_on_neverlink", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = { OptionEffectTag.BUILD_FILE_SEMANTICS }, metadataTags = { OptionMetadataTag.EXPERIMENTAL }, help = "Flag to help transition from allowing to disallowing runtime_deps on neverlink" + " Java archives. The depot needs to be cleaned up to roll this out by default." ) public boolean allowRuntimeDepsOnNeverLink; @Option( name = "jplPropagateCcLinkParamsStore", defaultValue = "false", category = "rollout", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "Roll-out flag for making java_proto_library propagate CcLinkParamsStore. DO NOT USE." ) public boolean jplPropagateCcLinkParamsStore; @Option( name = "experimental_enable_jvm_configuration_make_variables", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, help = "If enabled, the Java configuration fragment supplies the JAVA and JAVABASE " + "Make variables. This option is used in the migration to remove them in favor of " + "requiring an explicit dependency on the Java runtime for rules that use them." ) public boolean enableMakeVariables; // Plugins are built using the host config. To avoid cycles we just don't propagate // this option to the host config. If one day we decide to use plugins when building // host tools, we can improve this by (for example) creating a compiler configuration that is // used only for building plugins. @Option( name = "plugin", converter = LabelListConverter.class, allowMultiple = true, defaultValue = "", category = "flags", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Plugins to use in the build. Currently works with java_plugin." ) public List<Label> pluginList; @Override public FragmentOptions getHost() { JavaOptions host = (JavaOptions) getDefault(); host.javaBase = hostJavaBase; host.jvmOpts = ImmutableList.of("-XX:ErrorFile=/dev/stderr"); host.javacOpts = javacOpts; host.javaToolchain = hostJavaToolchain; host.javaLauncher = hostJavaLauncher; // Java builds often contain complicated code generators for which // incremental build performance is important. host.useIjars = useIjars; host.headerCompilation = headerCompilation; host.headerCompilationDisableJavacFallback = headerCompilationDisableJavacFallback; host.javaDeps = javaDeps; host.javaClasspath = javaClasspath; host.strictJavaDeps = strictJavaDeps; host.enforceOneVersion = enforceOneVersion; // java_test targets can be used as a host tool, Ex: as a validating tool on a genrule. host.enforceOneVersionOnJavaTests = enforceOneVersionOnJavaTests; host.allowRuntimeDepsOnNeverLink = allowRuntimeDepsOnNeverLink; host.jplPropagateCcLinkParamsStore = jplPropagateCcLinkParamsStore; host.enableMakeVariables = enableMakeVariables; return host; } @Override public Map<String, Set<Label>> getDefaultsLabels(BuildConfiguration.Options commonOptions) { Map<String, Set<Label>> result = new HashMap<>(); result.put("JDK", ImmutableSet.of(javaBase, hostJavaBase)); result.put("JAVA_TOOLCHAIN", ImmutableSet.of(javaToolchain)); return result; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.testFramework; import com.intellij.ide.highlighter.ModuleFileType; import com.intellij.ide.highlighter.ProjectFileType; import com.intellij.ide.startup.impl.StartupManagerImpl; import com.intellij.idea.IdeaLogger; import com.intellij.idea.IdeaTestApplication; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.command.impl.UndoManagerImpl; import com.intellij.openapi.command.undo.UndoManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl; import com.intellij.openapi.module.EmptyModuleType; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ex.ProjectManagerEx; import com.intellij.openapi.project.impl.ProjectManagerImpl; import com.intellij.openapi.project.impl.TooManyProjectLeakedException; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ModuleRootModificationUtil; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.EmptyRunnable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.impl.local.LocalFileSystemImpl; import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFSImpl; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.codeStyle.CodeStyleSchemes; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.impl.DocumentCommitThread; import com.intellij.psi.impl.PsiManagerEx; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl; import com.intellij.util.PlatformUtils; import com.intellij.util.indexing.IndexableSetContributor; import com.intellij.util.indexing.IndexedRootsProvider; import com.intellij.util.ui.UIUtil; import junit.framework.TestCase; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * @author yole */ public abstract class PlatformTestCase extends UsefulTestCase implements DataProvider { public static final String TEST_DIR_PREFIX = "idea_test_"; protected static IdeaTestApplication ourApplication; protected ProjectManagerEx myProjectManager; protected Project myProject; protected Module myModule; protected static final Collection<File> myFilesToDelete = new HashSet<File>(); protected boolean myAssertionsInTestDetected; protected static final Logger LOG = Logger.getInstance("#com.intellij.testFramework.PlatformTestCase"); public static Thread ourTestThread; private static TestCase ourTestCase = null; public static final long DEFAULT_TEST_TIME = 300L; public static long ourTestTime = DEFAULT_TEST_TIME; private EditorListenerTracker myEditorListenerTracker; private ThreadTracker myThreadTracker; protected static boolean ourPlatformPrefixInitialized; private static Set<VirtualFile> ourEternallyLivingFilesCache; static { Logger.setFactory(TestLoggerFactory.class); } protected static long getTimeRequired() { return DEFAULT_TEST_TIME; } /** * If a temp directory is reused from some previous test run, there might be cached children in its VFS. * Ensure they're removed */ public static void synchronizeTempDirVfs(VirtualFile tempDir) { tempDir.getChildren(); tempDir.refresh(false, true); } @Nullable protected String getApplicationConfigDirPath() throws Exception { return null; } protected void initApplication() throws Exception { boolean firstTime = ourApplication == null; autodetectPlatformPrefix(); ourApplication = IdeaTestApplication.getInstance(getApplicationConfigDirPath()); ourApplication.setDataProvider(this); if (firstTime) { cleanPersistedVFSContent(); } } private static final String[] PREFIX_CANDIDATES = { "AppCode", "CLion", "CidrCommon", "Python", "PyCharmCore", "Ruby", "UltimateLangXml", "Idea", "PlatformLangXml" }; /** * @deprecated calling this method is no longer necessary */ public static void autodetectPlatformPrefix() { doAutodetectPlatformPrefix(); } public static void doAutodetectPlatformPrefix() { if (ourPlatformPrefixInitialized) { return; } URL resource = PlatformTestCase.class.getClassLoader().getResource("idea/ApplicationInfo.xml"); if (resource == null) { for (String candidate : PREFIX_CANDIDATES) { resource = PlatformTestCase.class.getClassLoader().getResource("META-INF/" + candidate + "Plugin.xml"); if (resource != null) { setPlatformPrefix(candidate); break; } } } } private static void cleanPersistedVFSContent() { ((PersistentFSImpl)PersistentFS.getInstance()).cleanPersistedContents(); } @Override protected CodeStyleSettings getCurrentCodeStyleSettings() { if (CodeStyleSchemes.getInstance().getCurrentScheme() == null) return new CodeStyleSettings(); return CodeStyleSettingsManager.getSettings(getProject()); } @Override protected void setUp() throws Exception { super.setUp(); if (ourTestCase != null) { String message = "Previous test " + ourTestCase + " hasn't called tearDown(). Probably overridden without super call."; ourTestCase = null; fail(message); } IdeaLogger.ourErrorsOccurred = null; LOG.info(getClass().getName() + ".setUp()"); initApplication(); myEditorListenerTracker = new EditorListenerTracker(); myThreadTracker = new ThreadTracker(); setUpProject(); storeSettings(); ourTestCase = this; if (myProject != null) { ProjectManagerEx.getInstanceEx().openTestProject(myProject); CodeStyleSettingsManager.getInstance(myProject).setTemporarySettings(new CodeStyleSettings()); InjectedLanguageManagerImpl.pushInjectors(getProject()); } DocumentCommitThread.getInstance().clearQueue(); UIUtil.dispatchAllInvocationEvents(); } public Project getProject() { return myProject; } public final PsiManager getPsiManager() { return PsiManager.getInstance(myProject); } public Module getModule() { return myModule; } protected void setUpProject() throws Exception { myProjectManager = ProjectManagerEx.getInstanceEx(); assertNotNull("Cannot instantiate ProjectManager component", myProjectManager); File projectFile = getIprFile(); myProject = doCreateProject(projectFile); myProjectManager.openTestProject(myProject); LocalFileSystem.getInstance().refreshIoFiles(myFilesToDelete); myProjectManager.openTestProject(myProject); setUpModule(); setUpJdk(); LightPlatformTestCase.clearUncommittedDocuments(getProject()); runStartupActivities(); ((FileTypeManagerImpl)FileTypeManager.getInstance()).drainReDetectQueue(); } protected Project doCreateProject(File projectFile) throws Exception { return createProject(projectFile, getClass().getName() + "." + getName()); } @NotNull public static Project createProject(File projectFile, String creationPlace) { try { Project project = ProjectManagerEx.getInstanceEx().newProject(FileUtil.getNameWithoutExtension(projectFile), projectFile.getPath(), false, false); assert project != null; project.putUserData(CREATION_PLACE, creationPlace); return project; } catch (TooManyProjectLeakedException e) { StringBuilder leakers = new StringBuilder(); leakers.append("Too many projects leaked: \n"); for (Project project : e.getLeakedProjects()) { String presentableString = getCreationPlace(project); leakers.append(presentableString); leakers.append("\n"); } fail(leakers.toString()); return null; } } @NotNull public static String getCreationPlace(@NotNull Project project) { String place = project.getUserData(CREATION_PLACE); Object base; try { base = project.isDisposed() ? "" : project.getBaseDir(); } catch (Exception e) { base = " (" + e + " while getting base dir)"; } return project + (place != null ? place : "") + base; } protected void runStartupActivities() { final StartupManagerImpl startupManager = (StartupManagerImpl)StartupManager.getInstance(myProject); startupManager.runStartupActivities(); startupManager.startCacheUpdate(); startupManager.runPostStartupActivities(); } protected File getIprFile() throws IOException { File tempFile = FileUtil.createTempFile(getName() + "_", ProjectFileType.DOT_DEFAULT_EXTENSION); myFilesToDelete.add(tempFile); return tempFile; } protected void setUpModule() { new WriteCommandAction.Simple(getProject()) { @Override protected void run() throws Throwable { myModule = createMainModule(); } }.execute().throwException(); } protected Module createMainModule() throws IOException { return createModule(myProject.getName()); } protected Module createModule(@NonNls final String moduleName) { return doCreateRealModule(moduleName); } protected Module doCreateRealModule(final String moduleName) { return doCreateRealModuleIn(moduleName, myProject, getModuleType()); } protected static Module doCreateRealModuleIn(String moduleName, final Project project, final ModuleType moduleType) { final VirtualFile baseDir = project.getBaseDir(); assertNotNull(baseDir); final File moduleFile = new File(FileUtil.toSystemDependentName(baseDir.getPath()), moduleName + ModuleFileType.DOT_DEFAULT_EXTENSION); FileUtil.createIfDoesntExist(moduleFile); myFilesToDelete.add(moduleFile); return new WriteAction<Module>() { @Override protected void run(@NotNull Result<Module> result) throws Throwable { VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(moduleFile); assertNotNull(virtualFile); Module module = ModuleManager.getInstance(project).newModule(virtualFile.getPath(), moduleType.getId()); module.getModuleFile(); result.setResult(module); } }.execute().getResultObject(); } protected ModuleType getModuleType() { return EmptyModuleType.getInstance(); } public static void cleanupApplicationCaches(Project project) { if (project != null && !project.isDisposed()) { UndoManagerImpl globalInstance = (UndoManagerImpl)UndoManager.getGlobalInstance(); if (globalInstance != null) { globalInstance.dropHistoryInTests(); } ((UndoManagerImpl)UndoManager.getInstance(project)).dropHistoryInTests(); ((PsiManagerEx)PsiManager.getInstance(project)).getFileManager().cleanupForNextTest(); } ProjectManagerImpl projectManager = (ProjectManagerImpl)ProjectManager.getInstance(); if (projectManager.isDefaultProjectInitialized()) { Project defaultProject = projectManager.getDefaultProject(); ((PsiManagerEx)PsiManager.getInstance(defaultProject)).getFileManager().cleanupForNextTest(); } LocalFileSystemImpl localFileSystem = (LocalFileSystemImpl)LocalFileSystem.getInstance(); if (localFileSystem != null) { localFileSystem.cleanupForNextTest(); } } private static Set<VirtualFile> eternallyLivingFiles() { if (ourEternallyLivingFilesCache != null) { return ourEternallyLivingFilesCache; } Set<VirtualFile> survivors = new HashSet<VirtualFile>(); for (IndexedRootsProvider provider : IndexedRootsProvider.EP_NAME.getExtensions()) { for (VirtualFile file : IndexableSetContributor.getRootsToIndex(provider)) { registerSurvivor(survivors, file); } } ourEternallyLivingFilesCache = survivors; return survivors; } public static void addSurvivingFiles(@NotNull Collection<VirtualFile> files) { for (VirtualFile each : files) { registerSurvivor(eternallyLivingFiles(), each); } } private static void registerSurvivor(Set<VirtualFile> survivors, VirtualFile file) { addSubTree(file, survivors); while (file != null && survivors.add(file)) { file = file.getParent(); } } private static void addSubTree(VirtualFile root, Set<VirtualFile> to) { if (root instanceof VirtualDirectoryImpl) { for (VirtualFile child : ((VirtualDirectoryImpl)root).getCachedChildren()) { if (child instanceof VirtualDirectoryImpl) { to.add(child); addSubTree(child, to); } } } } @Override protected void tearDown() throws Exception { CompositeException result = new CompositeException(); if (myProject != null) { try { LightPlatformTestCase.doTearDown(getProject(), ourApplication, false); } catch (Throwable e) { result.add(e); } } try { CompositeException damage = checkForSettingsDamage(); result.add(damage); } catch (Throwable e) { result.add(e); } try { Project project = getProject(); disposeProject(result); if (project != null) { try { InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project); } catch (AssertionError e) { result.add(e); } } try { for (final File fileToDelete : myFilesToDelete) { delete(fileToDelete); } LocalFileSystem.getInstance().refreshIoFiles(myFilesToDelete); } catch (Throwable e) { result.add(e); } if (!myAssertionsInTestDetected) { if (IdeaLogger.ourErrorsOccurred != null) { result.add(IdeaLogger.ourErrorsOccurred); } } try { super.tearDown(); } catch (Throwable e) { result.add(e); } try { if (myEditorListenerTracker != null) { myEditorListenerTracker.checkListenersLeak(); } } catch (AssertionError error) { result.add(error); } try { if (myThreadTracker != null) { myThreadTracker.checkLeak(); } } catch (AssertionError error) { result.add(error); } try { LightPlatformTestCase.checkEditorsReleased(); } catch (Throwable error) { result.add(error); } } finally { myProjectManager = null; myProject = null; myModule = null; myFilesToDelete.clear(); myEditorListenerTracker = null; myThreadTracker = null; ourTestCase = null; } if (!result.isEmpty()) throw result; } private void disposeProject(@NotNull CompositeException result) /* throws nothing */ { try { DocumentCommitThread.getInstance().clearQueue(); // sometimes SwingUtilities maybe confused about EDT at this point if (SwingUtilities.isEventDispatchThread()) { UIUtil.dispatchAllInvocationEvents(); } } catch (Exception e) { result.add(e); } try { if (myProject != null) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { ProjectManagerEx projectManager = ProjectManagerEx.getInstanceEx(); if (projectManager instanceof ProjectManagerImpl) { Collection<Project> projectsStillOpen = projectManager.closeTestProject(myProject); if (!projectsStillOpen.isEmpty()) { Project project = projectsStillOpen.iterator().next(); String message = "Test project is not disposed: " + project + ";\n created in: " + getCreationPlace(project); try { projectManager.closeAndDispose(project); } catch (Exception e) { // ignore, we already have something to throw } throw new AssertionError(message); } } Disposer.dispose(myProject); } }); } } catch (Exception e) { result.add(e); } finally { if (myProject != null) { try { PsiDocumentManager documentManager = myProject.getComponent(PsiDocumentManager.class, null); if (documentManager != null) { EditorFactory.getInstance().getEventMulticaster().removeDocumentListener((DocumentListener)documentManager); } } catch (Exception ignored) { } myProject = null; } } } protected void resetAllFields() { resetClassFields(getClass()); } @Override protected final <T extends Disposable> T disposeOnTearDown(T disposable) { Disposer.register(myProject, disposable); return disposable; } private void resetClassFields(final Class<?> aClass) { try { clearDeclaredFields(this, aClass); } catch (IllegalAccessException e) { LOG.error(e); } if (aClass == PlatformTestCase.class) return; resetClassFields(aClass.getSuperclass()); } private String getFullName() { return getClass().getName() + "." + getName(); } private void delete(File file) { boolean b = FileUtil.delete(file); if (!b && file.exists() && !myAssertionsInTestDetected) { fail("Can't delete " + file.getAbsolutePath() + " in " + getFullName()); } } protected void setUpJdk() { //final ProjectJdkEx jdk = ProjectJdkUtil.getDefaultJdk("java 1.4"); final Sdk jdk = getTestProjectJdk(); // ProjectJdkImpl jdk = ProjectJdkTable.getInstance().addJdk(defaultJdk); Module[] modules = ModuleManager.getInstance(myProject).getModules(); for (Module module : modules) { ModuleRootModificationUtil.setModuleSdk(module, jdk); } } @Nullable protected Sdk getTestProjectJdk() { return null; } @Override public void runBare() throws Throwable { if (!shouldRunTest()) return; replaceIdeEventQueueSafely(); try { runBareImpl(); } finally { try { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { cleanupApplicationCaches(getProject()); resetAllFields(); } }); } catch (Throwable e) { // Ignore } } } private void runBareImpl() throws Throwable { final Throwable[] throwables = new Throwable[1]; Runnable runnable = new Runnable() { @Override public void run() { ourTestThread = Thread.currentThread(); ourTestTime = getTimeRequired(); try { try { setUp(); } catch (Throwable e) { CompositeException result = new CompositeException(e); try { tearDown(); } catch (Throwable th) { result.add(th); } throw result; } try { myAssertionsInTestDetected = true; runTest(); myAssertionsInTestDetected = false; } catch (Throwable e) { throwables[0] = e; throw e; } finally { tearDown(); } } catch (Throwable throwable) { if (throwables[0] == null) { // report tearDown() problems if only no exceptions thrown from runTest() throwables[0] = throwable; } } finally { ourTestThread = null; } } }; runBareRunnable(runnable); if (IdeaLogger.ourErrorsOccurred != null) { throw IdeaLogger.ourErrorsOccurred; } if (throwables[0] != null) { throw throwables[0]; } // just to make sure all deferred Runnable's to finish waitForAllLaters(); if (IdeaLogger.ourErrorsOccurred != null) { throw IdeaLogger.ourErrorsOccurred; } /* if (++LEAK_WALKS % 1 == 0) { LeakHunter.checkLeak(ApplicationManager.getApplication(), ProjectImpl.class, new Processor<ProjectImpl>() { @Override public boolean process(ProjectImpl project) { return !project.isDefault() && !LightPlatformTestCase.isLight(project); } }); } */ } private static int LEAK_WALKS; private static void waitForAllLaters() throws InterruptedException, InvocationTargetException { for (int i = 0; i < 3; i++) { SwingUtilities.invokeAndWait(EmptyRunnable.getInstance()); } } protected boolean isRunInEdt() { return true; } protected void runBareRunnable(Runnable runnable) throws Throwable { if (isRunInEdt()) { SwingUtilities.invokeAndWait(runnable); } else { runnable.run(); } } protected boolean isRunInWriteAction() { return true; } @Override protected void invokeTestRunnable(@NotNull final Runnable runnable) throws Exception { final Exception[] e = new Exception[1]; Runnable runnable1 = new Runnable() { @Override public void run() { try { if (ApplicationManager.getApplication().isDispatchThread() && isRunInWriteAction()) { ApplicationManager.getApplication().runWriteAction(runnable); } else { runnable.run(); } } catch (Exception e1) { e[0] = e1; } } }; if (annotatedWith(WrapInCommand.class)) { CommandProcessor.getInstance().executeCommand(myProject, runnable1, "", null); } else { runnable1.run(); } if (e[0] != null) { throw e[0]; } } @Override public Object getData(String dataId) { return myProject == null ? null : new TestDataProvider(myProject).getData(dataId); } public static File createTempDir(@NonNls final String prefix) throws IOException { return createTempDir(prefix, true); } public static File createTempDir(@NonNls final String prefix, final boolean refresh) throws IOException { final File tempDirectory = FileUtilRt.createTempDirectory(TEST_DIR_PREFIX + prefix, null, false); myFilesToDelete.add(tempDirectory); if (refresh) { getVirtualFile(tempDirectory); } return tempDirectory; } @Nullable protected static VirtualFile getVirtualFile(final File file) { return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); } protected File createTempDirectory() throws IOException { return createTempDir(getTestName(true)); } protected File createTempDirectory(final boolean refresh) throws IOException { return createTempDir(getTestName(true), refresh); } protected File createTempFile(String name, @Nullable String text) throws IOException { File directory = createTempDirectory(); File file = new File(directory, name); if (!file.createNewFile()) { throw new IOException("Can't create " + file); } if (text != null) { FileUtil.writeToFile(file, text); } return file; } public static void setContentOnDisk(@NotNull File file, byte[] bom, @NotNull String content, @NotNull Charset charset) throws IOException { FileOutputStream stream = new FileOutputStream(file); if (bom != null) { stream.write(bom); } OutputStreamWriter writer = new OutputStreamWriter(stream, charset); try { writer.write(content); } finally { writer.close(); } } public static VirtualFile createTempFile(@NonNls @NotNull String ext, @Nullable byte[] bom, @NonNls @NotNull String content, @NotNull Charset charset) throws IOException { File temp = FileUtil.createTempFile("copy", "." + ext); setContentOnDisk(temp, bom, content, charset); myFilesToDelete.add(temp); final VirtualFile file = getVirtualFile(temp); assert file != null : temp; return file; } @Nullable protected PsiFile getPsiFile(final Document document) { return PsiDocumentManager.getInstance(getProject()).getPsiFile(document); } /** * @deprecated calling this method is no longer necessary */ public static void initPlatformLangPrefix() { initPlatformPrefix(IDEA_MARKER_CLASS, "PlatformLangXml"); } /** * This is the main point to set up your platform prefix. This allows you to use some sub-set of * core plugin descriptors to make initialization faster (e.g. for running tests in classpath of the module where the test is located). * It is calculated by some marker class presence in classpath. * Note that it applies NEGATIVE logic for detection: prefix will be set if only marker class * is NOT present in classpath. * Also, only the very FIRST call to this method will take effect. * * @param classToTest marker class qualified name e.g. {@link #IDEA_MARKER_CLASS}. * @param prefix platform prefix to be set up if marker class not found in classpath. * @deprecated calling this method is no longer necessary */ public static void initPlatformPrefix(String classToTest, String prefix) { if (!ourPlatformPrefixInitialized) { ourPlatformPrefixInitialized = true; boolean isUltimate = true; try { PlatformTestCase.class.getClassLoader().loadClass(classToTest); } catch (ClassNotFoundException e) { isUltimate = false; } if (!isUltimate) { setPlatformPrefix(prefix); } } } private static void setPlatformPrefix(String prefix) { System.setProperty(PlatformUtils.PLATFORM_PREFIX_KEY, prefix); ourPlatformPrefixInitialized = true; } @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.METHOD, ElementType.TYPE}) public @interface WrapInCommand { } protected static VirtualFile createChildData(@NotNull final VirtualFile dir, @NotNull @NonNls final String name) { return new WriteAction<VirtualFile>() { @Override protected void run(@NotNull Result<VirtualFile> result) throws Throwable { result.setResult(dir.createChildData(null, name)); } }.execute().throwException().getResultObject(); } protected static VirtualFile createChildDirectory(@NotNull final VirtualFile dir, @NotNull @NonNls final String name) { return new WriteAction<VirtualFile>() { @Override protected void run(@NotNull Result<VirtualFile> result) throws Throwable { result.setResult(dir.createChildDirectory(null, name)); } }.execute().throwException().getResultObject(); } protected static void delete(@NotNull final VirtualFile file) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { try { file.delete(null); } catch (IOException e) { fail(); } } }); } protected static void rename(@NotNull final VirtualFile vFile1, @NotNull final String newName) { new WriteCommandAction.Simple(null) { @Override protected void run() throws Throwable { vFile1.rename(this, newName); } }.execute().throwException(); } protected static void move(@NotNull final VirtualFile vFile1, @NotNull final VirtualFile newFile) { new WriteCommandAction.Simple(null) { @Override protected void run() throws Throwable { vFile1.move(this, newFile); } }.execute().throwException(); } protected static VirtualFile copy(@NotNull final VirtualFile file, @NotNull final VirtualFile newParent, @NotNull final String copyName) { final VirtualFile[] copy = new VirtualFile[1]; new WriteCommandAction.Simple(null) { @Override protected void run() throws Throwable { copy[0] = file.copy(this, newParent, copyName); } }.execute().throwException(); return copy[0]; } public static void setFileText(@NotNull final VirtualFile file, @NotNull final String text) throws IOException { new WriteAction() { @Override protected void run(@NotNull Result result) throws Throwable { VfsUtil.saveText(file, text); } }.execute().throwException(); } public static void setBinaryContent(final VirtualFile file, final byte[] content) { new WriteAction() { @Override protected void run(@NotNull Result result) throws Throwable { file.setBinaryContent(content); } }.execute().throwException(); } }
/** * <copyright> * * Copyright (c) 2010 SAP AG. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation * * </copyright> */ package org.eclipse.bpmn2.impl; import com.google.gwt.user.client.rpc.GwtTransient; import org.eclipse.bpmn2.Bpmn2Package; import org.eclipse.bpmn2.ItemDefinition; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Error</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.eclipse.bpmn2.impl.ErrorImpl#getErrorCode <em>Error Code</em>}</li> * <li>{@link org.eclipse.bpmn2.impl.ErrorImpl#getName <em>Name</em>}</li> * <li>{@link org.eclipse.bpmn2.impl.ErrorImpl#getStructureRef <em>Structure Ref</em>}</li> * </ul> * * @generated */ public class ErrorImpl extends RootElementImpl implements org.eclipse.bpmn2.Error { /** * The default value of the '{@link #getErrorCode() <em>Error Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getErrorCode() * @generated * @ordered */ protected static final String ERROR_CODE_EDEFAULT = null; /** * The cached value of the '{@link #getErrorCode() <em>Error Code</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getErrorCode() * @generated * @ordered */ @GwtTransient protected String errorCode = ERROR_CODE_EDEFAULT; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ @GwtTransient protected String name = NAME_EDEFAULT; /** * The cached value of the '{@link #getStructureRef() <em>Structure Ref</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getStructureRef() * @generated * @ordered */ @GwtTransient protected ItemDefinition structureRef; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ErrorImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Bpmn2Package.Literals.ERROR; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getErrorCode() { return errorCode; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setErrorCode(String newErrorCode) { String oldErrorCode = errorCode; errorCode = newErrorCode; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Bpmn2Package.ERROR__ERROR_CODE, oldErrorCode, errorCode)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Bpmn2Package.ERROR__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ItemDefinition getStructureRef() { if (structureRef != null && structureRef.eIsProxy()) { InternalEObject oldStructureRef = (InternalEObject) structureRef; structureRef = (ItemDefinition) eResolveProxy(oldStructureRef); if (structureRef != oldStructureRef) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, Bpmn2Package.ERROR__STRUCTURE_REF, oldStructureRef, structureRef)); } } return structureRef; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ItemDefinition basicGetStructureRef() { return structureRef; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setStructureRef(ItemDefinition newStructureRef) { ItemDefinition oldStructureRef = structureRef; structureRef = newStructureRef; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Bpmn2Package.ERROR__STRUCTURE_REF, oldStructureRef, structureRef)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Bpmn2Package.ERROR__ERROR_CODE: return getErrorCode(); case Bpmn2Package.ERROR__NAME: return getName(); case Bpmn2Package.ERROR__STRUCTURE_REF: if (resolve) return getStructureRef(); return basicGetStructureRef(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Bpmn2Package.ERROR__ERROR_CODE: setErrorCode((String) newValue); return; case Bpmn2Package.ERROR__NAME: setName((String) newValue); return; case Bpmn2Package.ERROR__STRUCTURE_REF: setStructureRef((ItemDefinition) newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Bpmn2Package.ERROR__ERROR_CODE: setErrorCode(ERROR_CODE_EDEFAULT); return; case Bpmn2Package.ERROR__NAME: setName(NAME_EDEFAULT); return; case Bpmn2Package.ERROR__STRUCTURE_REF: setStructureRef((ItemDefinition) null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Bpmn2Package.ERROR__ERROR_CODE: return ERROR_CODE_EDEFAULT == null ? errorCode != null : !ERROR_CODE_EDEFAULT.equals(errorCode); case Bpmn2Package.ERROR__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case Bpmn2Package.ERROR__STRUCTURE_REF: return structureRef != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuilder result = new StringBuilder(super.toString()); result.append(" (errorCode: "); result.append(errorCode); result.append(", name: "); result.append(name); result.append(')'); return result.toString(); } } //ErrorImpl
package de.bitshares_munich.smartcoinswallet; import android.Manifest; import android.app.Activity; import android.app.ProgressDialog; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.support.v4.app.ActivityCompat; import android.util.Log; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.google.zxing.BarcodeFormat; import com.google.zxing.EncodeHintType; import com.google.zxing.MultiFormatWriter; import com.google.zxing.WriterException; import com.google.zxing.common.BitMatrix; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import java.util.UUID; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; import de.bitshares_munich.models.TransactionSmartCoin; import de.bitshares_munich.utils.Application; import de.bitshares_munich.utils.IWebService; import de.bitshares_munich.utils.ServiceGenerator; import de.bitsharesmunich.graphenej.Invoice; import de.bitsharesmunich.graphenej.LineItem; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; /** * Created by Syed Muhammad Muzzammil on 5/16/16. */ public class ReceiveActivity extends BaseActivity { // Storage Permissions private static final int REQUEST_EXTERNAL_STORAGE = 1; private static String[] PERMISSIONS_STORAGE = { Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE }; @Bind(R.id.username) TextView tvUsername; @Bind(R.id.notfound) TextView notfound; @Bind(R.id.qrimage) ImageView qrimage; @Bind(R.id.tvBlockNumberHead_rcv_screen_activity) TextView tvBlockNumberHead; @Bind(R.id.tvAppVersion_rcv_screen_activity) TextView tvAppVersion; @Bind(R.id.ivSocketConnected_rcv_screen_activity) ImageView ivSocketConnected; ProgressDialog progressDialog; String price = ""; String currency = ""; String to = ""; String account_id = ""; String orderId = ""; Call<TransactionSmartCoin[]> transactionSmartcoinService; private String TAG = this.getClass().getName(); public static void verifyStoragePermissions(Activity activity) { // Check if we have write permission int permission = ActivityCompat.checkSelfPermission(activity, Manifest.permission.WRITE_EXTERNAL_STORAGE); if (permission != PackageManager.PERMISSION_GRANTED) { // We don't have permission so prompt the user ActivityCompat.requestPermissions( activity, PERMISSIONS_STORAGE, REQUEST_EXTERNAL_STORAGE ); } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.receive_activity); ButterKnife.bind(this); setBackButton(true); setTitle(getResources().getString(R.string.rcv_screen_name)); progressDialog = new ProgressDialog(this); orderId = UUID.randomUUID().toString(); Intent intent = getIntent(); if (intent.hasExtra(getString(R.string.to))) { to = intent.getStringExtra(getString(R.string.to)); String concate = this.getString(R.string.pay_to) + " : " + to; tvUsername.setText(concate); } if (intent.hasExtra(getString(R.string.account_id))) { account_id = intent.getStringExtra(getString(R.string.account_id)); } if (intent.hasExtra(getString(R.string.price))) { price = intent.getStringExtra(getString(R.string.price)); } else { price = "0"; } if (intent.hasExtra(getString(R.string.currency))) { currency = intent.getStringExtra(getString(R.string.currency)); } else { currency = "BTS"; } if (price.isEmpty()) { notfound.setText(getString(R.string.no_amount_requested)); } else { String concate = this.getString(R.string.amount) + ": " + price + " " + currency + " " + this.getString(R.string.requested); notfound.setText(concate); } tvAppVersion.setText("v" + BuildConfig.VERSION_NAME + getString(R.string.beta)); updateBlockNumberHead(); } @Override public void onWindowFocusChanged(boolean hasFocus) { super.onWindowFocusChanged(hasFocus); LineItem[] items = new LineItem[]{new LineItem("transfer", 1, Double.valueOf(price))}; Invoice invoice = new Invoice(to, "", "", currency.replace("bit", ""), items, "", ""); try { Bitmap bitmap = encodeAsBitmap(Invoice.toQrCode(invoice), "#006500"); qrimage.setImageBitmap(bitmap); } catch (WriterException e) { Log.e(TAG, "WriterException while trying to encode QR-code data. Msg: " + e.getMessage()); } } @OnClick(R.id.backbutton) void onBackButtonPressed() { super.onBackPressed(); } @OnClick(R.id.sharebtn) public void TellaFriend() { verifyStoragePermissions(this); qrimage.buildDrawingCache(); Bitmap bitmap = qrimage.getDrawingCache(); File mFile = savebitmap(bitmap); try { String shareText = ""; if (!price.isEmpty() && price != "0") { shareText = this.getString(R.string.please_pay) + " " + price + " " + currency + " " + this.getString(R.string.to) + " " + to; } else { shareText = this.getString(R.string.please_pay) + ": " + to; } Intent sharingIntent = new Intent(Intent.ACTION_SEND); Uri uri = null; uri = Uri.fromFile(mFile); sharingIntent.setData(uri); sharingIntent.setType("*/*"); sharingIntent.putExtra(Intent.EXTRA_SUBJECT, shareText); sharingIntent.putExtra(Intent.EXTRA_TEXT, shareText); sharingIntent.putExtra(Intent.EXTRA_STREAM, uri); startActivity(Intent.createChooser(sharingIntent, this.getString(R.string.share_qr_code))); } catch (Exception e) { } } private File savebitmap(Bitmap bmp) { String extStorageDirectory = Environment.getExternalStorageDirectory().toString() + File.separator + getResources().getString(R.string.folder_name); OutputStream outStream = null; File file = new File(extStorageDirectory, "QrImage" + ".png"); if (file.exists()) { file.delete(); file = new File(extStorageDirectory, "QrImage" + ".png"); } try { outStream = new FileOutputStream(file); bmp.compress(Bitmap.CompressFormat.PNG, 100, outStream); outStream.flush(); outStream.close(); } catch (Exception e) { e.printStackTrace(); return null; } return file; } Bitmap encodeAsBitmap(String str, String qrColor) throws WriterException { BitMatrix result; try { Map<EncodeHintType, Object> hints = new HashMap<EncodeHintType, Object>(); hints.put(EncodeHintType.MARGIN, 0); result = new MultiFormatWriter().encode(str, BarcodeFormat.QR_CODE, qrimage.getWidth(), qrimage.getHeight(), hints); } catch (IllegalArgumentException iae) { // Unsupported format return null; } int w = qrimage.getWidth(); int h = qrimage.getHeight(); int[] pixels = new int[w * h]; for (int y = 0; y < h; y++) { int offset = y * (w); for (int x = 0; x < w; x++) { pixels[offset + x] = result.get(x, y) ? Color.parseColor(qrColor) : Color.WHITE; } } Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); bitmap.setPixels(pixels, 0, w, 0, 0, w, h); return bitmap; } private void showDialog(String title, String msg) { if (progressDialog != null) { if (!progressDialog.isShowing()) { progressDialog.setTitle(title); progressDialog.setMessage(msg); progressDialog.show(); } } } private void hideDialog() { if (progressDialog != null) { if (progressDialog.isShowing()) { progressDialog.cancel(); } } } @OnClick(R.id.ivGotoKeypad) void gotoKeypad() { Intent intent = new Intent(getApplicationContext(), RequestActivity.class); intent.putExtra(getString(R.string.to), to); intent.putExtra(getString(R.string.account_id), account_id); intent.putExtra(SplashActivity.KEY_ASK_FOR_PIN, false); startActivity(intent); finish(); } private void updateBlockNumberHead() { final Handler handler = new Handler(); final Activity myActivity = this; final Runnable updateTask = new Runnable() { @Override public void run() { if (Application.isConnected()) { ivSocketConnected.setImageResource(R.drawable.icon_connecting); tvBlockNumberHead.setText(Application.blockHead); ivSocketConnected.clearAnimation(); } else { ivSocketConnected.setImageResource(R.drawable.icon_disconnecting); Animation myFadeInAnimation = AnimationUtils.loadAnimation(myActivity.getApplicationContext(), R.anim.flash); ivSocketConnected.startAnimation(myFadeInAnimation); } handler.postDelayed(this, 1000); } }; handler.postDelayed(updateTask, 1000); } @OnClick(R.id.OnClickSettings_rcv_screen_activity) void OnClickSettings() { Intent intent = new Intent(this, SettingActivity.class); startActivity(intent); } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; import com.squareup.okhttp.Connection; import com.squareup.okhttp.Request; import com.squareup.okhttp.Response; import com.squareup.okhttp.internal.AbstractOutputStream; import com.squareup.okhttp.internal.Util; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.CacheRequest; import java.net.ProtocolException; import java.net.Socket; import static com.squareup.okhttp.internal.Util.checkOffsetAndCount; import static com.squareup.okhttp.internal.http.StatusLine.HTTP_CONTINUE; public final class HttpTransport implements Transport { /** * The timeout to use while discarding a stream of input data. Since this is * used for connection reuse, this timeout should be significantly less than * the time it takes to establish a new connection. */ private static final int DISCARD_STREAM_TIMEOUT_MILLIS = 100; public static final int DEFAULT_CHUNK_LENGTH = 1024; private final HttpEngine httpEngine; private final InputStream socketIn; private final OutputStream socketOut; /** * This stream buffers the request headers and the request body when their * combined size is less than MAX_REQUEST_BUFFER_LENGTH. By combining them * we can save socket writes, which in turn saves a packet transmission. * This is socketOut if the request size is large or unknown. */ private OutputStream requestOut; public HttpTransport(HttpEngine httpEngine, OutputStream outputStream, InputStream inputStream) { this.httpEngine = httpEngine; this.socketOut = outputStream; this.requestOut = outputStream; this.socketIn = inputStream; } @Override public OutputStream createRequestBody(Request request) throws IOException { long contentLength = request.getContentLength(); if (httpEngine.bufferRequestBody) { if (contentLength > Integer.MAX_VALUE) { throw new IllegalStateException("Use setFixedLengthStreamingMode() or " + "setChunkedStreamingMode() for requests larger than 2 GiB."); } if (contentLength != -1) { // Buffer a request body of a known length. writeRequestHeaders(request); return new RetryableOutputStream((int) contentLength); } else { // Buffer a request body of an unknown length. Don't write request // headers until the entire body is ready; otherwise we can't set the // Content-Length header correctly. return new RetryableOutputStream(); } } if (request.isChunked()) { // Stream a request body of unknown length. writeRequestHeaders(request); return new ChunkedOutputStream(requestOut, DEFAULT_CHUNK_LENGTH); } if (contentLength != -1) { // Stream a request body of a known length. writeRequestHeaders(request); return new FixedLengthOutputStream(requestOut, contentLength); } throw new IllegalStateException( "Cannot stream a request body without chunked encoding or a known content length!"); } @Override public void flushRequest() throws IOException { requestOut.flush(); requestOut = socketOut; } @Override public void writeRequestBody(RetryableOutputStream requestBody) throws IOException { requestBody.writeToSocket(requestOut); } /** * Prepares the HTTP headers and sends them to the server. * * <p>For streaming requests with a body, headers must be prepared * <strong>before</strong> the output stream has been written to. Otherwise * the body would need to be buffered! * * <p>For non-streaming requests with a body, headers must be prepared * <strong>after</strong> the output stream has been written to and closed. * This ensures that the {@code Content-Length} header field receives the * proper value. */ public void writeRequestHeaders(Request request) throws IOException { httpEngine.writingRequestHeaders(); String requestLine = RequestLine.get(request, httpEngine.connection.getRoute().getProxy().type(), httpEngine.connection.getHttpMinorVersion()); writeRequest(requestOut, request.getHeaders(), requestLine); } @Override public Response.Builder readResponseHeaders() throws IOException { return readResponse(socketIn); } /** Returns bytes of a request header for sending on an HTTP transport. */ public static void writeRequest(OutputStream out, Headers headers, String requestLine) throws IOException { StringBuilder result = new StringBuilder(256); result.append(requestLine).append("\r\n"); for (int i = 0; i < headers.length(); i ++) { result.append(headers.getFieldName(i)) .append(": ") .append(headers.getValue(i)) .append("\r\n"); } result.append("\r\n"); out.write(result.toString().getBytes("ISO-8859-1")); } /** Parses bytes of a response header from an HTTP transport. */ public static Response.Builder readResponse(InputStream in) throws IOException { while (true) { String statusLineString = Util.readAsciiLine(in); StatusLine statusLine = new StatusLine(statusLineString); Response.Builder responseBuilder = new Response.Builder() .statusLine(statusLine) .header(SyntheticHeaders.SELECTED_TRANSPORT, "http/1.1"); Headers.Builder headersBuilder = new Headers.Builder(); headersBuilder.readHeaders(in); responseBuilder.headers(headersBuilder.build()); if (statusLine.code() != HTTP_CONTINUE) return responseBuilder; } } public boolean makeReusable(boolean streamCanceled, OutputStream requestBodyOut, InputStream responseBodyIn) { if (streamCanceled) { return false; } // We cannot reuse sockets that have incomplete output. if (requestBodyOut != null && !((AbstractOutputStream) requestBodyOut).isClosed()) { return false; } // If the request specified that the connection shouldn't be reused, don't reuse it. if (httpEngine.getRequest().hasConnectionClose()) { return false; } // If the response specified that the connection shouldn't be reused, don't reuse it. if (httpEngine.getResponse() != null && httpEngine.getResponse().hasConnectionClose()) { return false; } if (responseBodyIn instanceof UnknownLengthHttpInputStream) { return false; } if (responseBodyIn != null) { return discardStream(httpEngine, responseBodyIn); } return true; } /** * Discards the response body so that the connection can be reused. This * needs to be done judiciously, since it delays the current request in * order to speed up a potential future request that may never occur. * * <p>A stream may be discarded to encourage response caching (a response * cannot be cached unless it is consumed completely) or to enable connection * reuse. */ private static boolean discardStream(HttpEngine httpEngine, InputStream responseBodyIn) { Connection connection = httpEngine.connection; if (connection == null) return false; Socket socket = connection.getSocket(); if (socket == null) return false; try { int socketTimeout = socket.getSoTimeout(); socket.setSoTimeout(DISCARD_STREAM_TIMEOUT_MILLIS); try { Util.skipAll(responseBodyIn); return true; } finally { socket.setSoTimeout(socketTimeout); } } catch (IOException e) { return false; } } @Override public InputStream getTransferStream(CacheRequest cacheRequest) throws IOException { if (!httpEngine.hasResponseBody()) { return new FixedLengthInputStream(socketIn, cacheRequest, httpEngine, 0); } if (httpEngine.getResponse().isChunked()) { return new ChunkedInputStream(socketIn, cacheRequest, this); } if (httpEngine.getResponse().getContentLength() != -1) { return new FixedLengthInputStream(socketIn, cacheRequest, httpEngine, httpEngine.getResponse().getContentLength()); } // Wrap the input stream from the connection (rather than just returning // "socketIn" directly here), so that we can control its use after the // reference escapes. return new UnknownLengthHttpInputStream(socketIn, cacheRequest, httpEngine); } /** An HTTP body with a fixed length known in advance. */ private static final class FixedLengthOutputStream extends AbstractOutputStream { private final OutputStream socketOut; private long bytesRemaining; private FixedLengthOutputStream(OutputStream socketOut, long bytesRemaining) { this.socketOut = socketOut; this.bytesRemaining = bytesRemaining; } @Override public void write(byte[] buffer, int offset, int count) throws IOException { checkNotClosed(); checkOffsetAndCount(buffer.length, offset, count); if (count > bytesRemaining) { throw new ProtocolException("expected " + bytesRemaining + " bytes but received " + count); } socketOut.write(buffer, offset, count); bytesRemaining -= count; } @Override public void flush() throws IOException { if (closed) { return; // don't throw; this stream might have been closed on the caller's behalf } socketOut.flush(); } @Override public void close() throws IOException { if (closed) { return; } closed = true; if (bytesRemaining > 0) { throw new ProtocolException("unexpected end of stream"); } } } /** * An HTTP body with alternating chunk sizes and chunk bodies. Chunks are * buffered until {@code maxChunkLength} bytes are ready, at which point the * chunk is written and the buffer is cleared. */ private static final class ChunkedOutputStream extends AbstractOutputStream { private static final byte[] CRLF = { '\r', '\n' }; private static final byte[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; private static final byte[] FINAL_CHUNK = new byte[] { '0', '\r', '\n', '\r', '\n' }; /** Scratch space for up to 8 hex digits, and then a constant CRLF. */ private final byte[] hex = { 0, 0, 0, 0, 0, 0, 0, 0, '\r', '\n' }; private final OutputStream socketOut; private final int maxChunkLength; private final ByteArrayOutputStream bufferedChunk; private ChunkedOutputStream(OutputStream socketOut, int maxChunkLength) { this.socketOut = socketOut; this.maxChunkLength = Math.max(1, dataLength(maxChunkLength)); this.bufferedChunk = new ByteArrayOutputStream(maxChunkLength); } /** * Returns the amount of data that can be transmitted in a chunk whose total * length (data+headers) is {@code dataPlusHeaderLength}. This is presumably * useful to match sizes with wire-protocol packets. */ private int dataLength(int dataPlusHeaderLength) { int headerLength = 4; // "\r\n" after the size plus another "\r\n" after the data for (int i = dataPlusHeaderLength - headerLength; i > 0; i >>= 4) { headerLength++; } return dataPlusHeaderLength - headerLength; } @Override public synchronized void write(byte[] buffer, int offset, int count) throws IOException { checkNotClosed(); checkOffsetAndCount(buffer.length, offset, count); while (count > 0) { int numBytesWritten; if (bufferedChunk.size() > 0 || count < maxChunkLength) { // fill the buffered chunk and then maybe write that to the stream numBytesWritten = Math.min(count, maxChunkLength - bufferedChunk.size()); // TODO: skip unnecessary copies from buffer->bufferedChunk? bufferedChunk.write(buffer, offset, numBytesWritten); if (bufferedChunk.size() == maxChunkLength) { writeBufferedChunkToSocket(); } } else { // write a single chunk of size maxChunkLength to the stream numBytesWritten = maxChunkLength; writeHex(numBytesWritten); socketOut.write(buffer, offset, numBytesWritten); socketOut.write(CRLF); } offset += numBytesWritten; count -= numBytesWritten; } } /** * Equivalent to, but cheaper than writing Integer.toHexString().getBytes() * followed by CRLF. */ private void writeHex(int i) throws IOException { int cursor = 8; do { hex[--cursor] = HEX_DIGITS[i & 0xf]; } while ((i >>>= 4) != 0); socketOut.write(hex, cursor, hex.length - cursor); } @Override public synchronized void flush() throws IOException { if (closed) { return; // don't throw; this stream might have been closed on the caller's behalf } writeBufferedChunkToSocket(); socketOut.flush(); } @Override public synchronized void close() throws IOException { if (closed) { return; } closed = true; writeBufferedChunkToSocket(); socketOut.write(FINAL_CHUNK); } private void writeBufferedChunkToSocket() throws IOException { int size = bufferedChunk.size(); if (size <= 0) { return; } writeHex(size); bufferedChunk.writeTo(socketOut); bufferedChunk.reset(); socketOut.write(CRLF); } } /** An HTTP body with a fixed length specified in advance. */ private static class FixedLengthInputStream extends AbstractHttpInputStream { private long bytesRemaining; public FixedLengthInputStream(InputStream is, CacheRequest cacheRequest, HttpEngine httpEngine, long length) throws IOException { super(is, httpEngine, cacheRequest); bytesRemaining = length; if (bytesRemaining == 0) { endOfInput(); } } @Override public int read(byte[] buffer, int offset, int count) throws IOException { checkOffsetAndCount(buffer.length, offset, count); checkNotClosed(); if (bytesRemaining == 0) { return -1; } int read = in.read(buffer, offset, (int) Math.min(count, bytesRemaining)); if (read == -1) { unexpectedEndOfInput(); // the server didn't supply the promised content length throw new ProtocolException("unexpected end of stream"); } bytesRemaining -= read; cacheWrite(buffer, offset, read); if (bytesRemaining == 0) { endOfInput(); } return read; } @Override public int available() throws IOException { checkNotClosed(); return bytesRemaining == 0 ? 0 : (int) Math.min(in.available(), bytesRemaining); } @Override public void close() throws IOException { if (closed) { return; } if (bytesRemaining != 0 && !discardStream(httpEngine, this)) { unexpectedEndOfInput(); } closed = true; } } /** An HTTP body with alternating chunk sizes and chunk bodies. */ private static class ChunkedInputStream extends AbstractHttpInputStream { private static final int NO_CHUNK_YET = -1; private final HttpTransport transport; private int bytesRemainingInChunk = NO_CHUNK_YET; private boolean hasMoreChunks = true; ChunkedInputStream(InputStream is, CacheRequest cacheRequest, HttpTransport transport) throws IOException { super(is, transport.httpEngine, cacheRequest); this.transport = transport; } @Override public int read(byte[] buffer, int offset, int count) throws IOException { checkOffsetAndCount(buffer.length, offset, count); checkNotClosed(); if (!hasMoreChunks) { return -1; } if (bytesRemainingInChunk == 0 || bytesRemainingInChunk == NO_CHUNK_YET) { readChunkSize(); if (!hasMoreChunks) { return -1; } } int read = in.read(buffer, offset, Math.min(count, bytesRemainingInChunk)); if (read == -1) { unexpectedEndOfInput(); // the server didn't supply the promised chunk length throw new IOException("unexpected end of stream"); } bytesRemainingInChunk -= read; cacheWrite(buffer, offset, read); return read; } private void readChunkSize() throws IOException { // read the suffix of the previous chunk if (bytesRemainingInChunk != NO_CHUNK_YET) { Util.readAsciiLine(in); } String chunkSizeString = Util.readAsciiLine(in); int index = chunkSizeString.indexOf(";"); if (index != -1) { chunkSizeString = chunkSizeString.substring(0, index); } try { bytesRemainingInChunk = Integer.parseInt(chunkSizeString.trim(), 16); } catch (NumberFormatException e) { throw new ProtocolException("Expected a hex chunk size but was " + chunkSizeString); } if (bytesRemainingInChunk == 0) { hasMoreChunks = false; Headers trailers = new Headers.Builder() .readHeaders(transport.socketIn) .build(); httpEngine.receiveHeaders(trailers); endOfInput(); } } @Override public int available() throws IOException { checkNotClosed(); if (!hasMoreChunks || bytesRemainingInChunk == NO_CHUNK_YET) { return 0; } return Math.min(in.available(), bytesRemainingInChunk); } @Override public void close() throws IOException { if (closed) { return; } if (hasMoreChunks && !discardStream(httpEngine, this)) { unexpectedEndOfInput(); } closed = true; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.script.field.DocValuesField; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Base class for testing {@link Mapper}s. */ public abstract class MapperTestCase extends MapperServiceTestCase { protected abstract void minimalMapping(XContentBuilder b) throws IOException; /** * Writes the field and a sample value for it to the provided {@link XContentBuilder}. * To be overridden in case the field should not be written at all in documents, * like in the case of runtime fields. */ protected void writeField(XContentBuilder builder) throws IOException { builder.field("field"); builder.value(getSampleValueForDocument()); } /** * Returns a sample value for the field, to be used in a document */ protected abstract Object getSampleValueForDocument(); /** * Returns a sample value for the field, to be used when querying the field. Normally this is the same format as * what is indexed as part of a document, and returned by {@link #getSampleValueForDocument()}, but there * are cases where fields are queried differently frow how they are indexed e.g. token_count or runtime fields */ protected Object getSampleValueForQuery() { return getSampleValueForDocument(); } /** * This test verifies that the exists query created is the appropriate one, and aligns with the data structures * being created for a document with a value for the field. This can only be verified for the minimal mapping. * Field types that allow configurable doc_values or norms should write their own tests that creates the different * mappings combinations and invoke {@link #assertExistsQuery(MapperService)} to verify the behaviour. */ public final void testExistsQueryMinimalMapping() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); assertExistsQuery(mapperService); assertParseMinimalWarnings(); } protected void assertExistsQuery(MapperService mapperService) throws IOException { LuceneDocument fields = mapperService.documentMapper().parse(source(this::writeField)).rootDoc(); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); MappedFieldType fieldType = mapperService.fieldType("field"); Query query = fieldType.existsQuery(searchExecutionContext); assertExistsQuery(fieldType, query, fields); } protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) { if (fieldType.hasDocValues()) { assertThat(query, instanceOf(DocValuesFieldExistsQuery.class)); DocValuesFieldExistsQuery fieldExistsQuery = (DocValuesFieldExistsQuery) query; assertEquals("field", fieldExistsQuery.getField()); assertDocValuesField(fields, "field"); assertNoFieldNamesField(fields); } else if (fieldType.getTextSearchInfo().hasNorms()) { assertThat(query, instanceOf(NormsFieldExistsQuery.class)); NormsFieldExistsQuery normsFieldExistsQuery = (NormsFieldExistsQuery) query; assertEquals("field", normsFieldExistsQuery.getField()); assertHasNorms(fields, "field"); assertNoDocValuesField(fields, "field"); assertNoFieldNamesField(fields); } else { assertThat(query, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) query; assertEquals(FieldNamesFieldMapper.NAME, termQuery.getTerm().field()); // we always perform a term query against _field_names, even when the field // is not added to _field_names because it is not indexed nor stored assertEquals("field", termQuery.getTerm().text()); assertNoDocValuesField(fields, "field"); if (fieldType.isSearchable() || fieldType.isStored()) { assertNotNull(fields.getField(FieldNamesFieldMapper.NAME)); } else { assertNoFieldNamesField(fields); } } } protected static void assertNoFieldNamesField(LuceneDocument fields) { assertNull(fields.getField(FieldNamesFieldMapper.NAME)); } protected static void assertHasNorms(LuceneDocument doc, String field) { IndexableField[] fields = doc.getFields(field); for (IndexableField indexableField : fields) { IndexableFieldType indexableFieldType = indexableField.fieldType(); if (indexableFieldType.indexOptions() != IndexOptions.NONE) { assertFalse(indexableFieldType.omitNorms()); return; } } fail("field [" + field + "] should be indexed but it isn't"); } protected static void assertDocValuesField(LuceneDocument doc, String field) { IndexableField[] fields = doc.getFields(field); for (IndexableField indexableField : fields) { if (indexableField.fieldType().docValuesType().equals(DocValuesType.NONE) == false) { return; } } fail("doc_values not present for field [" + field + "]"); } protected static void assertNoDocValuesField(LuceneDocument doc, String field) { IndexableField[] fields = doc.getFields(field); for (IndexableField indexableField : fields) { assertEquals(DocValuesType.NONE, indexableField.fieldType().docValuesType()); } } protected <T> void assertDimension(boolean isDimension, Function<T, Boolean> checker) throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", isDimension); })); @SuppressWarnings("unchecked") // Syntactic sugar in tests T fieldType = (T) mapperService.fieldType("field"); assertThat(checker.apply(fieldType), equalTo(isDimension)); } protected <T> void assertMetricType(String metricType, Function<T, Enum<TimeSeriesParams.MetricType>> checker) throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { minimalMapping(b); b.field("time_series_metric", metricType); })); @SuppressWarnings("unchecked") // Syntactic sugar in tests T fieldType = (T) mapperService.fieldType("field"); assertThat(checker.apply(fieldType).name(), equalTo(metricType)); } public final void testEmptyName() { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> { b.startObject(""); minimalMapping(b); b.endObject(); }))); assertThat(e.getMessage(), containsString("name cannot be empty string")); assertParseMinimalWarnings(); } public final void testMinimalSerializesToItself() throws IOException { XContentBuilder orig = JsonXContent.contentBuilder().startObject(); createMapperService(fieldMapping(this::minimalMapping)).documentMapper().mapping().toXContent(orig, ToXContent.EMPTY_PARAMS); orig.endObject(); XContentBuilder parsedFromOrig = JsonXContent.contentBuilder().startObject(); createMapperService(orig).documentMapper().mapping().toXContent(parsedFromOrig, ToXContent.EMPTY_PARAMS); parsedFromOrig.endObject(); assertEquals(Strings.toString(orig), Strings.toString(parsedFromOrig)); assertParseMinimalWarnings(); } // TODO make this final once we remove FieldMapperTestCase2 public void testMinimalToMaximal() throws IOException { XContentBuilder orig = JsonXContent.contentBuilder().startObject(); createMapperService(fieldMapping(this::minimalMapping)).documentMapper().mapping().toXContent(orig, INCLUDE_DEFAULTS); orig.endObject(); XContentBuilder parsedFromOrig = JsonXContent.contentBuilder().startObject(); createMapperService(orig).documentMapper().mapping().toXContent(parsedFromOrig, INCLUDE_DEFAULTS); parsedFromOrig.endObject(); assertEquals(Strings.toString(orig), Strings.toString(parsedFromOrig)); assertParseMaximalWarnings(); } protected final void assertParseMinimalWarnings() { String[] warnings = getParseMinimalWarnings(); if (warnings.length > 0) { assertWarnings(warnings); } } protected final void assertParseMaximalWarnings() { String[] warnings = getParseMaximalWarnings(); if (warnings.length > 0) { assertWarnings(warnings); } } protected String[] getParseMinimalWarnings() { // Most mappers don't emit any warnings return Strings.EMPTY_ARRAY; } protected String[] getParseMaximalWarnings() { // Most mappers don't emit any warnings return Strings.EMPTY_ARRAY; } /** * Override to disable testing {@code meta} in fields that don't support it. */ protected boolean supportsMeta() { return true; } protected void metaMapping(XContentBuilder b) throws IOException { minimalMapping(b); } public final void testMeta() throws IOException { assumeTrue("Field doesn't support meta", supportsMeta()); XContentBuilder mapping = fieldMapping(b -> { metaMapping(b); b.field("meta", Collections.singletonMap("foo", "bar")); }); MapperService mapperService = createMapperService(mapping); assertEquals( XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(), XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2() ); mapping = fieldMapping(this::metaMapping); merge(mapperService, mapping); assertEquals( XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(), XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2() ); mapping = fieldMapping(b -> { metaMapping(b); b.field("meta", Collections.singletonMap("baz", "quux")); }); merge(mapperService, mapping); assertEquals( XContentHelper.convertToMap(BytesReference.bytes(mapping), false, mapping.contentType()).v2(), XContentHelper.convertToMap(mapperService.documentMapper().mappingSource().uncompressed(), false, mapping.contentType()).v2() ); } public final void testDeprecatedBoost() throws IOException { try { createMapperService(Version.V_7_10_0, fieldMapping(b -> { minimalMapping(b); b.field("boost", 2.0); })); String[] warnings = Strings.concatStringArrays( getParseMinimalWarnings(), new String[] { "Parameter [boost] on field [field] is deprecated and has no effect" } ); assertWarnings(warnings); } catch (MapperParsingException e) { assertThat(e.getMessage(), anyOf(containsString("Unknown parameter [boost]"), containsString("[boost : 2.0]"))); } MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(Version.V_8_0_0, fieldMapping(b -> { minimalMapping(b); b.field("boost", 2.0); }))); assertThat(e.getMessage(), anyOf(containsString("Unknown parameter [boost]"), containsString("[boost : 2.0]"))); assertParseMinimalWarnings(); } /** * Use a {@linkplain ValueFetcher} to extract values from doc values. */ protected final List<?> fetchFromDocValues(MapperService mapperService, MappedFieldType ft, DocValueFormat format, Object sourceValue) throws IOException { SetOnce<List<?>> result = new SetOnce<>(); withLuceneIndex( mapperService, iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field(ft.name(), sourceValue))).rootDoc()); }, iw -> { SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup()); ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft)); IndexSearcher searcher = newSearcher(iw); LeafReaderContext context = searcher.getIndexReader().leaves().get(0); lookup.source().setSegmentAndDocument(context, 0); valueFetcher.setNextReader(context); result.set(valueFetcher.fetchValues(lookup.source(), new ArrayList<>())); } ); return result.get(); } private class UpdateCheck { final XContentBuilder init; final XContentBuilder update; final Consumer<FieldMapper> check; private UpdateCheck(CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check) throws IOException { this.init = fieldMapping(MapperTestCase.this::minimalMapping); this.update = fieldMapping(b -> { minimalMapping(b); update.accept(b); }); this.check = check; } private UpdateCheck( CheckedConsumer<XContentBuilder, IOException> init, CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check ) throws IOException { this.init = fieldMapping(init); this.update = fieldMapping(update); this.check = check; } } private static class ConflictCheck { final XContentBuilder init; final XContentBuilder update; private ConflictCheck(XContentBuilder init, XContentBuilder update) { this.init = init; this.update = update; } } public class ParameterChecker { List<UpdateCheck> updateChecks = new ArrayList<>(); Map<String, ConflictCheck> conflictChecks = new HashMap<>(); /** * Register a check that a parameter can be updated, using the minimal mapping as a base * * @param update a field builder applied on top of the minimal mapping * @param check a check that the updated parameter has been applied to the FieldMapper */ public void registerUpdateCheck(CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check) throws IOException { updateChecks.add(new UpdateCheck(update, check)); } /** * Register a check that a parameter can be updated * * @param init the initial mapping * @param update the updated mapping * @param check a check that the updated parameter has been applied to the FieldMapper */ public void registerUpdateCheck( CheckedConsumer<XContentBuilder, IOException> init, CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check ) throws IOException { updateChecks.add(new UpdateCheck(init, update, check)); } /** * Register a check that a parameter update will cause a conflict, using the minimal mapping as a base * * @param param the parameter name, expected to appear in the error message * @param update a field builder applied on top of the minimal mapping */ public void registerConflictCheck(String param, CheckedConsumer<XContentBuilder, IOException> update) throws IOException { conflictChecks.put(param, new ConflictCheck(fieldMapping(MapperTestCase.this::minimalMapping), fieldMapping(b -> { minimalMapping(b); update.accept(b); }))); } /** * Register a check that a parameter update will cause a conflict * * @param param the parameter name, expected to appear in the error message * @param init the initial mapping * @param update the updated mapping */ public void registerConflictCheck(String param, XContentBuilder init, XContentBuilder update) { conflictChecks.put(param, new ConflictCheck(init, update)); } } protected abstract void registerParameters(ParameterChecker checker) throws IOException; public void testUpdates() throws IOException { ParameterChecker checker = new ParameterChecker(); registerParameters(checker); for (UpdateCheck updateCheck : checker.updateChecks) { MapperService mapperService = createMapperService(updateCheck.init); merge(mapperService, updateCheck.update); FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field"); updateCheck.check.accept(mapper); // do it again to ensure that we don't get conflicts the second time merge(mapperService, updateCheck.update); mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field"); updateCheck.check.accept(mapper); } for (String param : checker.conflictChecks.keySet()) { MapperService mapperService = createMapperService(checker.conflictChecks.get(param).init); // merging the same change is fine merge(mapperService, checker.conflictChecks.get(param).init); // merging the conflicting update should throw an exception Exception e = expectThrows( IllegalArgumentException.class, "No conflict when updating parameter [" + param + "]", () -> merge(mapperService, checker.conflictChecks.get(param).update) ); assertThat( e.getMessage(), anyOf(containsString("Cannot update parameter [" + param + "]"), containsString("different [" + param + "]")) ); } assertParseMaximalWarnings(); } public final void testTextSearchInfoConsistency() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); MappedFieldType fieldType = mapperService.fieldType("field"); if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE) { expectThrows(IllegalArgumentException.class, () -> fieldType.termQuery(null, null)); } else { SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); assertNotNull(fieldType.termQuery(getSampleValueForQuery(), searchExecutionContext)); } assertSearchable(fieldType); assertParseMinimalWarnings(); } protected void assertSearchable(MappedFieldType fieldType) { assertEquals(fieldType.isSearchable(), fieldType.getTextSearchInfo() != TextSearchInfo.NONE); } /** * Asserts that fetching a single value from doc values and from the native * {@link MappedFieldType#valueFetcher} produce the same results. * <p> * Generally this method covers many many random cases but rarely. So if * it fails its generally a good idea to capture its randomized * parameters into a new method so we can be sure we consistently test * any unique and interesting failure case. See the tests for * {@link DateFieldMapper} for some examples. */ public final void testFetch() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { MappedFieldType ft = mapperService.fieldType("field"); assertFetch(mapperService, "field", generateRandomInputValue(ft), randomFetchTestFormat()); } finally { assertParseMinimalWarnings(); } } /** * Asserts that fetching many values from doc values and from the native * {@link MappedFieldType#valueFetcher} produce the same results. * <p> * Generally this method covers many many random cases but rarely. So if * it fails its generally a good idea to capture its randomized * parameters into a new method so we can be sure we consistently test * any unique and interesting failure case. See the tests for * {@link DateFieldMapper} for some examples. */ public final void testFetchMany() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { MappedFieldType ft = mapperService.fieldType("field"); int count = between(2, 10); List<Object> values = new ArrayList<>(count); while (values.size() < count) { values.add(generateRandomInputValue(ft)); } assertFetch(mapperService, "field", values, randomFetchTestFormat()); } finally { assertParseMinimalWarnings(); } } protected final MapperService randomFetchTestMapper() throws IOException { return createMapperService(mapping(b -> { b.startObject("field"); randomFetchTestFieldConfig(b); b.endObject(); })); } /** * Field configuration for {@link #testFetch} and {@link #testFetchMany}. * Default implementation delegates to {@link #minimalMapping} but can * be overridden to randomize the field type and options. */ protected void randomFetchTestFieldConfig(XContentBuilder b) throws IOException { minimalMapping(b); } /** * A random format to use when tripping in {@link #testFetch} and * {@link #testFetchMany}. */ protected String randomFetchTestFormat() { return null; } /** * Test that dimension parameter is not updateable */ protected void registerDimensionChecks(ParameterChecker checker) throws IOException { // dimension cannot be updated checker.registerConflictCheck("time_series_dimension", b -> b.field("time_series_dimension", true)); checker.registerConflictCheck("time_series_dimension", b -> b.field("time_series_dimension", false)); checker.registerConflictCheck("time_series_dimension", fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", false); }), fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); })); checker.registerConflictCheck("time_series_dimension", fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); }), fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", false); })); } /** * Create a random {@code _source} value for this field. Must be compatible * with {@link XContentBuilder#value(Object)} and the field's parser. */ protected abstract Object generateRandomInputValue(MappedFieldType ft); /** * Assert that fetching a value using {@link MappedFieldType#valueFetcher} * produces the same value as fetching using doc values. */ protected void assertFetch(MapperService mapperService, String field, Object value, String format) throws IOException { MappedFieldType ft = mapperService.fieldType(field); SourceToParse source = source(b -> b.field(ft.name(), value)); ValueFetcher docValueFetcher = new DocValueFetcher( ft.docValueFormat(format, null), ft.fielddataBuilder("test", () -> null).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) ); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); when(searchExecutionContext.getForField(ft)).thenAnswer( inv -> { return fieldDataLookup().apply(ft, () -> { throw new UnsupportedOperationException(); }); } ); ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); ParsedDocument doc = mapperService.documentMapper().parse(source); withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { SourceLookup sourceLookup = new SourceLookup(); sourceLookup.setSegmentAndDocument(ir.leaves().get(0), 0); docValueFetcher.setNextReader(ir.leaves().get(0)); nativeFetcher.setNextReader(ir.leaves().get(0)); List<Object> fromDocValues = docValueFetcher.fetchValues(sourceLookup, new ArrayList<>()); List<Object> fromNative = nativeFetcher.fetchValues(sourceLookup, new ArrayList<>()); /* * The native fetcher uses byte, short, etc but doc values always * uses long or double. This difference is fine because on the outside * users can't see it. */ fromNative = fromNative.stream().map(o -> { if (o instanceof Integer || o instanceof Short || o instanceof Byte) { return ((Number) o).longValue(); } if (o instanceof Float) { return ((Float) o).doubleValue(); } return o; }).collect(toList()); if (dedupAfterFetch()) { fromNative = fromNative.stream().distinct().collect(Collectors.toList()); } /* * Doc values sort according to something appropriate to the field * and the native fetchers usually don't sort. We're ok with this * difference. But we have to convince the test we're ok with it. */ assertThat("fetching " + value, fromNative, containsInAnyOrder(fromDocValues.toArray())); }); } /** * A few field types (e.g. keyword fields) don't allow duplicate values, so in those cases we need to de-dup our expected values. * Field types where this is the case should overwrite this. The default is to not de-duplicate though. */ protected boolean dedupAfterFetch() { return false; } /** * @return whether or not this field type supports access to its values from a SearchLookup */ protected boolean supportsSearchLookup() { return true; } /** * Checks that field data from this field produces the same values for query-time * scripts and for index-time scripts */ public final void testIndexTimeFieldData() throws IOException { assumeTrue("Field type does not support access via search lookup", supportsSearchLookup()); MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); assertParseMinimalWarnings(); MappedFieldType fieldType = mapperService.fieldType("field"); if (fieldType.isAggregatable() == false) { return; // No field data available, so we ignore } SourceToParse source = source(this::writeField); ParsedDocument doc = mapperService.documentMapper().parse(source); withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { LeafReaderContext ctx = ir.leaves().get(0); DocValuesField<?> docValuesField = fieldType.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) .load(ctx) .getScriptField("test"); docValuesField.setNextDocId(0); DocumentLeafReader reader = new DocumentLeafReader(doc.rootDoc(), Collections.emptyMap()); DocValuesField<?> indexData = fieldType.fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) .load(reader.getContext()) .getScriptField("test"); indexData.setNextDocId(0); // compare index and search time fielddata assertThat(docValuesField.getScriptDocValues(), equalTo(indexData.getScriptDocValues())); }); } protected boolean supportsStoredFields() { return true; } protected void minimalStoreMapping(XContentBuilder b) throws IOException { minimalMapping(b); b.field("store", true); } /** * Checks that loading stored fields for this field produces the same set of values * for query time scripts and index time scripts */ public final void testIndexTimeStoredFieldsAccess() throws IOException { assumeTrue("Field type does not support stored fields", supportsStoredFields()); MapperService mapperService = createMapperService(fieldMapping(this::minimalStoreMapping)); assertParseMinimalWarnings(); MappedFieldType fieldType = mapperService.fieldType("field"); SourceToParse source = source(this::writeField); ParsedDocument doc = mapperService.documentMapper().parse(source); SearchLookup lookup = new SearchLookup(f -> fieldType, (f, s) -> { throw new UnsupportedOperationException(); }); withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { LeafReaderContext ctx = ir.leaves().get(0); LeafStoredFieldsLookup storedFields = lookup.getLeafSearchLookup(ctx).fields(); storedFields.setDocument(0); DocumentLeafReader reader = new DocumentLeafReader(doc.rootDoc(), Collections.emptyMap()); LeafStoredFieldsLookup indexStoredFields = lookup.getLeafSearchLookup(reader.getContext()).fields(); indexStoredFields.setDocument(0); // compare index and search time stored fields assertThat(storedFields.get("field").getValues(), equalTo(indexStoredFields.get("field").getValues())); }); } public final void testNullInput() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); if (allowsNullValues()) { ParsedDocument doc = mapper.parse(source(b -> b.nullField("field"))); assertThat(doc.docs().get(0).getFields("field").length, equalTo(0)); assertThat(doc.docs().get(0).getFields("_field_names").length, equalTo(0)); } else { expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> b.nullField("field")))); } assertWarnings(getParseMinimalWarnings()); } protected boolean allowsNullValues() { return true; } public final void testMinimalIsInvalidInRoutingPath() throws IOException { MapperService mapper = createMapperService(fieldMapping(this::minimalMapping)); try { IndexSettings settings = createIndexSettings( Version.CURRENT, Settings.builder() .put(IndexSettings.MODE.getKey(), "time_series") .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "field") .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2021-04-28T00:00:00Z") .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2021-04-29T00:00:00Z") .build() ); Exception e = expectThrows(IllegalArgumentException.class, () -> mapper.documentMapper().validate(settings, false)); assertThat(e.getMessage(), equalTo(minimalIsInvalidRoutingPathErrorMessage(mapper.mappingLookup().getMapper("field")))); } finally { assertParseMinimalWarnings(); } } protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { return "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" + mapper.name() + "] was [" + mapper.typeName() + "]."; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.LongWritable; /** * VectorUDAFCountMerge. Vectorized implementation for COUNT aggregate on reduce-side (merge). */ @Description(name = "count", value = "_FUNC_(expr) - Returns the merged sum value of expr (vectorized, type: long)") public class VectorUDAFCountMerge extends VectorAggregateExpression { private static final long serialVersionUID = 1L; /** * class for storing the current aggregate value. */ static class Aggregation implements AggregationBuffer { private static final long serialVersionUID = 1L; transient private long value; @Override public int getVariableSize() { throw new UnsupportedOperationException(); } @Override public void reset() { value = 0L; } } private VectorExpression inputExpression = null; transient private final LongWritable result; public VectorUDAFCountMerge(VectorExpression inputExpression) { this(); this.inputExpression = inputExpression; } public VectorUDAFCountMerge() { super(); result = new LongWritable(0); } private Aggregation getCurrentAggregationBuffer( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, int row) { VectorAggregationBufferRow mySet = aggregationBufferSets[row]; Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex); return myagg; } @Override public void aggregateInputSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, VectorizedRowBatch batch) throws HiveException { int batchSize = batch.size; if (batchSize == 0) { return; } inputExpression.evaluate(batch); LongColumnVector inputVector = (LongColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; long[] vector = inputVector.vector; if (inputVector.noNulls) { if (inputVector.isRepeating) { iterateNoNullsRepeatingWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], batchSize); } else { if (batch.selectedInUse) { iterateNoNullsSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, batch.selected, batchSize); } else { iterateNoNullsWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, batchSize); } } } else { if (inputVector.isRepeating) { if (batch.selectedInUse) { iterateHasNullsRepeatingSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], batchSize, batch.selected, inputVector.isNull); } else { iterateHasNullsRepeatingWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector[0], batchSize, inputVector.isNull); } } else { if (batch.selectedInUse) { iterateHasNullsSelectionWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, batchSize, batch.selected, inputVector.isNull); } else { iterateHasNullsWithAggregationSelection( aggregationBufferSets, aggregateIndex, vector, batchSize, inputVector.isNull); } } } } private void iterateNoNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long value, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += value; } } private void iterateNoNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long[] values, int[] selection, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += values[selection[i]]; } } private void iterateNoNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long[] values, int batchSize) { for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += values[i]; } } private void iterateHasNullsRepeatingSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long value, int batchSize, int[] selection, boolean[] isNull) { if (isNull[0]) { return; } for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += value; } } private void iterateHasNullsRepeatingWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long value, int batchSize, boolean[] isNull) { if (isNull[0]) { return; } for (int i=0; i < batchSize; ++i) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += value; } } private void iterateHasNullsSelectionWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long[] values, int batchSize, int[] selection, boolean[] isNull) { for (int j=0; j < batchSize; ++j) { int i = selection[j]; if (!isNull[i]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, j); myagg.value += values[i]; } } } private void iterateHasNullsWithAggregationSelection( VectorAggregationBufferRow[] aggregationBufferSets, int aggregateIndex, long[] values, int batchSize, boolean[] isNull) { for (int i=0; i < batchSize; ++i) { if (!isNull[i]) { Aggregation myagg = getCurrentAggregationBuffer( aggregationBufferSets, aggregateIndex, i); myagg.value += values[i]; } } } @Override public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch) throws HiveException { inputExpression.evaluate(batch); LongColumnVector inputVector = (LongColumnVector)batch. cols[this.inputExpression.getOutputColumn()]; int batchSize = batch.size; if (batchSize == 0) { return; } Aggregation myagg = (Aggregation)agg; long[] vector = inputVector.vector; if (inputVector.isRepeating) { if (inputVector.noNulls) { myagg.value += vector[0]*batchSize; } return; } if (!batch.selectedInUse && inputVector.noNulls) { iterateNoSelectionNoNulls(myagg, vector, batchSize); } else if (!batch.selectedInUse) { iterateNoSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull); } else if (inputVector.noNulls){ iterateSelectionNoNulls(myagg, vector, batchSize, batch.selected); } else { iterateSelectionHasNulls(myagg, vector, batchSize, inputVector.isNull, batch.selected); } } private void iterateSelectionHasNulls( Aggregation myagg, long[] vector, int batchSize, boolean[] isNull, int[] selected) { for (int j=0; j< batchSize; ++j) { int i = selected[j]; if (!isNull[i]) { myagg.value += vector[i]; } } } private void iterateSelectionNoNulls( Aggregation myagg, long[] vector, int batchSize, int[] selected) { for (int i=0; i< batchSize; ++i) { myagg.value += vector[selected[i]]; } } private void iterateNoSelectionHasNulls( Aggregation myagg, long[] vector, int batchSize, boolean[] isNull) { for(int i=0;i<batchSize;++i) { if (!isNull[i]) { myagg.value += vector[i]; } } } private void iterateNoSelectionNoNulls( Aggregation myagg, long[] vector, int batchSize) { for (int i=0;i<batchSize;++i) { myagg.value += vector[i]; } } @Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { return new Aggregation(); } @Override public void reset(AggregationBuffer agg) throws HiveException { Aggregation myAgg = (Aggregation) agg; myAgg.reset(); } @Override public Object evaluateOutput(AggregationBuffer agg) throws HiveException { Aggregation myagg = (Aggregation) agg; result.set (myagg.value); return result; } @Override public ObjectInspector getOutputObjectInspector() { return PrimitiveObjectInspectorFactory.writableLongObjectInspector; } @Override public int getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + model.primitive2() + model.primitive1(), model.memoryAlign()); } @Override public void init(AggregationDesc desc) throws HiveException { // No-op } public VectorExpression getInputExpression() { return inputExpression; } public void setInputExpression(VectorExpression inputExpression) { this.inputExpression = inputExpression; } }
/* * Copyright 2003-2015 Dave Griffith, Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.migration; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.psiutils.*; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.HashMap; import java.util.Map; public class UnnecessaryBoxingInspection extends BaseInspection { @SuppressWarnings("PublicField") public boolean onlyReportSuperfluouslyBoxed = false; @NonNls static final Map<String, String> boxedPrimitiveMap = new HashMap<String, String>(8); static { boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_INTEGER, "int"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_SHORT, "short"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_BOOLEAN, "boolean"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_LONG, "long"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_BYTE, "byte"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_FLOAT, "float"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_DOUBLE, "double"); boxedPrimitiveMap.put(CommonClassNames.JAVA_LANG_CHARACTER, "char"); } @Override @NotNull public String getDisplayName() { return InspectionGadgetsBundle.message("unnecessary.boxing.display.name"); } @Override public boolean isEnabledByDefault() { return true; } @Nullable @Override public JComponent createOptionsPanel() { return new SingleCheckboxOptionsPanel(InspectionGadgetsBundle.message("unnecessary.boxing.superfluous.option"), this, "onlyReportSuperfluouslyBoxed"); } @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("unnecessary.boxing.problem.descriptor"); } @Override public InspectionGadgetsFix buildFix(Object... infos) { return new UnnecessaryBoxingFix(); } private static class UnnecessaryBoxingFix extends InspectionGadgetsFix { @Override @NotNull public String getFamilyName() { return getName(); } @Override @NotNull public String getName() { return InspectionGadgetsBundle.message("unnecessary.boxing.remove.quickfix"); } @Override public void doFix(@NotNull Project project, ProblemDescriptor descriptor) throws IncorrectOperationException { final PsiCallExpression expression = (PsiCallExpression)descriptor.getPsiElement(); final PsiType boxedType = expression.getType(); if (boxedType == null) { return; } final PsiExpressionList argumentList = expression.getArgumentList(); if (argumentList == null) { return; } final PsiExpression[] arguments = argumentList.getExpressions(); if (arguments.length != 1) { return; } final PsiExpression unboxedExpression = arguments[0]; final PsiType unboxedType = unboxedExpression.getType(); if (unboxedType == null) { return; } final String cast = getCastString(unboxedType, boxedType); if (cast == null) { return; } final int precedence = ParenthesesUtils.getPrecedence(unboxedExpression); if (!cast.isEmpty() && precedence > ParenthesesUtils.TYPE_CAST_PRECEDENCE) { PsiReplacementUtil.replaceExpression(expression, cast + '(' + unboxedExpression.getText() + ')'); } else { PsiReplacementUtil.replaceExpression(expression, cast + unboxedExpression.getText()); } } @Nullable private static String getCastString(@NotNull PsiType fromType, @NotNull PsiType toType) { final String toTypeText = toType.getCanonicalText(); final String fromTypeText = fromType.getCanonicalText(); final String unboxedType = boxedPrimitiveMap.get(toTypeText); if (unboxedType == null) { return null; } if (fromTypeText.equals(unboxedType)) { return ""; } else { return '(' + unboxedType + ')'; } } } @Override public boolean shouldInspect(PsiFile file) { return PsiUtil.isLanguageLevel5OrHigher(file); } @Override public BaseInspectionVisitor buildVisitor() { return new UnnecessaryBoxingVisitor(); } private class UnnecessaryBoxingVisitor extends BaseInspectionVisitor { @Override public void visitNewExpression(@NotNull PsiNewExpression expression) { super.visitNewExpression(expression); final PsiType constructorType = expression.getType(); if (constructorType == null) { return; } final String constructorTypeText = constructorType.getCanonicalText(); if (!boxedPrimitiveMap.containsKey(constructorTypeText)) { return; } final PsiMethod constructor = expression.resolveConstructor(); if (constructor == null) { return; } final PsiParameterList parameterList = constructor.getParameterList(); if (parameterList.getParametersCount() != 1) { return; } final PsiParameter[] parameters = parameterList.getParameters(); final PsiParameter parameter = parameters[0]; final PsiType parameterType = parameter.getType(); final String parameterTypeText = parameterType.getCanonicalText(); final String boxableConstructorType = boxedPrimitiveMap.get(constructorTypeText); if (!boxableConstructorType.equals(parameterTypeText)) { return; } if (!canBeUnboxed(expression)) { return; } if (onlyReportSuperfluouslyBoxed) { final PsiType expectedType = ExpectedTypeUtils.findExpectedType(expression, false, true); if (!(expectedType instanceof PsiPrimitiveType)) { return; } } registerError(expression); } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final PsiExpressionList argumentList = expression.getArgumentList(); final PsiExpression[] arguments = argumentList.getExpressions(); if (arguments.length != 1) { return; } if (!(arguments[0].getType() instanceof PsiPrimitiveType)) { return; } final PsiReferenceExpression methodExpression = expression.getMethodExpression(); @NonNls final String referenceName = methodExpression.getReferenceName(); if (!"valueOf".equals(referenceName)) { return; } final PsiExpression qualifierExpression = methodExpression.getQualifierExpression(); if (!(qualifierExpression instanceof PsiReferenceExpression)) { return; } final PsiReferenceExpression referenceExpression = (PsiReferenceExpression)qualifierExpression; final String canonicalText = referenceExpression.getCanonicalText(); if (!boxedPrimitiveMap.containsKey(canonicalText)) { return; } if (!canBeUnboxed(expression)) { return; } registerError(expression); } private boolean canBeUnboxed(PsiCallExpression expression) { PsiElement parent = expression.getParent(); while (parent instanceof PsiParenthesizedExpression) { parent = parent.getParent(); } if (parent instanceof PsiExpressionStatement || parent instanceof PsiReferenceExpression) { return false; } else if (parent instanceof PsiTypeCastExpression) { final PsiTypeCastExpression castExpression = (PsiTypeCastExpression)parent; if (TypeUtils.isTypeParameter(castExpression.getType())) { return false; } } else if (parent instanceof PsiConditionalExpression) { final PsiConditionalExpression conditionalExpression = (PsiConditionalExpression)parent; final PsiExpression thenExpression = conditionalExpression.getThenExpression(); final PsiExpression elseExpression = conditionalExpression.getElseExpression(); if (elseExpression == null || thenExpression == null) { return false; } if (PsiTreeUtil.isAncestor(thenExpression, expression, false)) { final PsiType type = elseExpression.getType(); return type instanceof PsiPrimitiveType; } else if (PsiTreeUtil.isAncestor(elseExpression, expression, false)) { final PsiType type = thenExpression.getType(); return type instanceof PsiPrimitiveType; } else { return true; } } else if (parent instanceof PsiBinaryExpression) { final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)parent; final PsiExpression lhs = binaryExpression.getLOperand(); final PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { return false; } return PsiTreeUtil.isAncestor(rhs, expression, false) ? canBinaryExpressionBeUnboxed(lhs, rhs) : canBinaryExpressionBeUnboxed(rhs, lhs); } final PsiCallExpression containingMethodCallExpression = getParentMethodCallExpression(expression); return containingMethodCallExpression == null || isSameMethodCalledWithoutBoxing(containingMethodCallExpression, expression); } private boolean canBinaryExpressionBeUnboxed(PsiExpression lhs, PsiExpression rhs) { final PsiType rhsType = rhs.getType(); if (rhsType == null) { return false; } final PsiType lhsType = lhs.getType(); if (lhsType == null) { return false; } if (!(lhsType instanceof PsiPrimitiveType) && !ExpressionUtils.isAnnotatedNotNull(lhs)) { return false; } final PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(rhsType); return unboxedType != null && unboxedType.isAssignableFrom(lhsType); } @Nullable private PsiCallExpression getParentMethodCallExpression(@NotNull PsiElement expression) { final PsiElement parent = expression.getParent(); if (parent instanceof PsiParenthesizedExpression || parent instanceof PsiExpressionList) { return getParentMethodCallExpression(parent); } else if (parent instanceof PsiCallExpression) { return (PsiCallExpression)parent; } else { return null; } } private boolean isSameMethodCalledWithoutBoxing(@NotNull PsiCallExpression methodCallExpression, @NotNull PsiCallExpression boxingExpression) { final PsiExpressionList boxedArgumentList = boxingExpression.getArgumentList(); if (boxedArgumentList == null) { return false; } final PsiExpression[] arguments = boxedArgumentList.getExpressions(); if (arguments.length != 1) { return false; } final PsiExpression unboxedExpression = arguments[0]; final PsiMethod originalMethod = methodCallExpression.resolveMethod(); final PsiMethod otherMethod = MethodCallUtils.findMethodWithReplacedArgument(methodCallExpression, boxingExpression, unboxedExpression); return originalMethod == otherMethod; } } }
/* Derby - Class org.apache.derby.impl.sql.execute.GenericResultSetFactory Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.execute; import org.apache.derby.catalog.TypeDescriptor; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.ResultSet; import org.apache.derby.iapi.sql.conn.Authorizer; import org.apache.derby.iapi.sql.conn.LanguageConnectionContext; import org.apache.derby.iapi.sql.execute.NoPutResultSet; import org.apache.derby.iapi.sql.execute.ResultSetFactory; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.store.access.StaticCompiledOpenConglomInfo; import org.apache.derby.iapi.types.DataValueDescriptor; /** * ResultSetFactory provides a wrapper around all of * the result sets used in this execution implementation. * This removes the need of generated classes to do a new * and of the generator to know about all of the result * sets. Both simply know about this interface to getting * them. * <p> * In terms of modularizing, we can create just an interface * to this class and invoke the interface. Different implementations * would get the same information provided but could potentially * massage/ignore it in different ways to satisfy their * implementations. The practicality of this is to be seen. * <p> * The cost of this type of factory is that once you touch it, * you touch *all* of the possible result sets, not just * the ones you need. So the first time you touch it could * be painful ... that might be a problem for execution. * */ public class GenericResultSetFactory implements ResultSetFactory { // // ResultSetFactory interface // public GenericResultSetFactory() { } /** @see ResultSetFactory#getInsertResultSet @exception StandardException thrown on error */ public ResultSet getInsertResultSet(NoPutResultSet source, GeneratedMethod checkGM) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new InsertResultSet(source, checkGM, activation ); } /** @see ResultSetFactory#getInsertVTIResultSet @exception StandardException thrown on error */ public ResultSet getInsertVTIResultSet(NoPutResultSet source, NoPutResultSet vtiRS ) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new InsertVTIResultSet(source, vtiRS, activation ); } /** @see ResultSetFactory#getDeleteVTIResultSet @exception StandardException thrown on error */ public ResultSet getDeleteVTIResultSet(NoPutResultSet source) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new DeleteVTIResultSet(source, activation); } /** @see ResultSetFactory#getDeleteResultSet @exception StandardException thrown on error */ public ResultSet getDeleteResultSet(NoPutResultSet source) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new DeleteResultSet(source, activation ); } /** @see ResultSetFactory#getDeleteCascadeResultSet @exception StandardException thrown on error */ public ResultSet getDeleteCascadeResultSet(NoPutResultSet source, int constantActionItem, ResultSet[] dependentResultSets, String resultSetId) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new DeleteCascadeResultSet(source, activation, constantActionItem, dependentResultSets, resultSetId); } /** @see ResultSetFactory#getUpdateResultSet @exception StandardException thrown on error */ public ResultSet getUpdateResultSet(NoPutResultSet source, GeneratedMethod checkGM) throws StandardException { Activation activation = source.getActivation(); //The stress test failed with null pointer exception in here once and then //it didn't happen again. It can be a jit problem because after this null //pointer exception, the cleanup code in UpdateResultSet got a null //pointer exception too which can't happen since the cleanup code checks //for null value before doing anything. //In any case, if this ever happens again, hopefully the following //assertion code will catch it. if (SanityManager.DEBUG) { SanityManager.ASSERT(getAuthorizer(activation) != null, "Authorizer is null"); } getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new UpdateResultSet(source, checkGM, activation); } /** @see ResultSetFactory#getUpdateVTIResultSet @exception StandardException thrown on error */ public ResultSet getUpdateVTIResultSet(NoPutResultSet source) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new UpdateVTIResultSet(source, activation); } /** @see ResultSetFactory#getDeleteCascadeUpdateResultSet @exception StandardException thrown on error */ public ResultSet getDeleteCascadeUpdateResultSet(NoPutResultSet source, GeneratedMethod checkGM, int constantActionItem, int rsdItem) throws StandardException { Activation activation = source.getActivation(); getAuthorizer(activation).authorize(activation, Authorizer.SQL_WRITE_OP); return new UpdateResultSet(source, checkGM, activation, constantActionItem, rsdItem); } /** @see ResultSetFactory#getCallStatementResultSet @exception StandardException thrown on error */ public ResultSet getCallStatementResultSet(GeneratedMethod methodCall, Activation activation) throws StandardException { getAuthorizer(activation).authorize(activation, Authorizer.SQL_CALL_OP); return new CallStatementResultSet(methodCall, activation); } /** @see ResultSetFactory#getProjectRestrictResultSet @exception StandardException thrown on error */ public NoPutResultSet getProjectRestrictResultSet(NoPutResultSet source, GeneratedMethod restriction, GeneratedMethod projection, int resultSetNumber, GeneratedMethod constantRestriction, int mapRefItem, boolean reuseResult, boolean doesProjection, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new ProjectRestrictResultSet(source, source.getActivation(), restriction, projection, resultSetNumber, constantRestriction, mapRefItem, reuseResult, doesProjection, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getHashTableResultSet @exception StandardException thrown on error */ public NoPutResultSet getHashTableResultSet(NoPutResultSet source, GeneratedMethod singleTableRestriction, Qualifier[][] equijoinQualifiers, GeneratedMethod projection, int resultSetNumber, int mapRefItem, boolean reuseResult, int keyColItem, boolean removeDuplicates, long maxInMemoryRowCount, int initialCapacity, float loadFactor, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new HashTableResultSet(source, source.getActivation(), singleTableRestriction, equijoinQualifiers, projection, resultSetNumber, mapRefItem, reuseResult, keyColItem, removeDuplicates, maxInMemoryRowCount, initialCapacity, loadFactor, true, // Skip rows with 1 or more null key columns optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getSortResultSet @exception StandardException thrown on error */ public NoPutResultSet getSortResultSet(NoPutResultSet source, boolean distinct, boolean isInSortedOrder, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new SortResultSet(source, distinct, isInSortedOrder, orderItem, source.getActivation(), rowAllocator, maxRowSize, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getScalarAggregateResultSet @exception StandardException thrown on error */ public NoPutResultSet getScalarAggregateResultSet(NoPutResultSet source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, boolean singleInputRow, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new ScalarAggregateResultSet( source, isInSortedOrder, aggregateItem, source.getActivation(), rowAllocator, resultSetNumber, singleInputRow, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getDistinctScalarAggregateResultSet @exception StandardException thrown on error */ public NoPutResultSet getDistinctScalarAggregateResultSet(NoPutResultSet source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, boolean singleInputRow, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new DistinctScalarAggregateResultSet( source, isInSortedOrder, aggregateItem, orderItem, source.getActivation(), rowAllocator, maxRowSize, resultSetNumber, singleInputRow, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getGroupedAggregateResultSet @exception StandardException thrown on error */ public NoPutResultSet getGroupedAggregateResultSet(NoPutResultSet source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new GroupedAggregateResultSet( source, isInSortedOrder, aggregateItem, orderItem, source.getActivation(), rowAllocator, maxRowSize, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getDistinctGroupedAggregateResultSet @exception StandardException thrown on error */ public NoPutResultSet getDistinctGroupedAggregateResultSet(NoPutResultSet source, boolean isInSortedOrder, int aggregateItem, int orderItem, GeneratedMethod rowAllocator, int maxRowSize, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new DistinctGroupedAggregateResultSet( source, isInSortedOrder, aggregateItem, orderItem, source.getActivation(), rowAllocator, maxRowSize, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getAnyResultSet @exception StandardException thrown on error */ public NoPutResultSet getAnyResultSet(NoPutResultSet source, GeneratedMethod emptyRowFun, int resultSetNumber, int subqueryNumber, int pointOfAttachment, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new AnyResultSet(source, source.getActivation(), emptyRowFun, resultSetNumber, subqueryNumber, pointOfAttachment, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getOnceResultSet @exception StandardException thrown on error */ public NoPutResultSet getOnceResultSet(NoPutResultSet source, GeneratedMethod emptyRowFun, int cardinalityCheck, int resultSetNumber, int subqueryNumber, int pointOfAttachment, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new OnceResultSet(source, source.getActivation(), emptyRowFun, cardinalityCheck, resultSetNumber, subqueryNumber, pointOfAttachment, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getRowResultSet */ public NoPutResultSet getRowResultSet(Activation activation, GeneratedMethod row, boolean canCacheRow, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) { return new RowResultSet(activation, row, canCacheRow, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getVTIResultSet @exception StandardException thrown on error */ public NoPutResultSet getVTIResultSet(Activation activation, GeneratedMethod row, int resultSetNumber, GeneratedMethod constructor, String javaClassName, Qualifier[][] pushedQualifiers, int erdNumber, boolean version2, boolean reuseablePs, int ctcNumber, boolean isTarget, int scanIsolationLevel, double optimizerEstimatedRowCount, double optimizerEstimatedCost, boolean isDerbyStyleTableFunction, String returnType ) throws StandardException { return new VTIResultSet(activation, row, resultSetNumber, constructor, javaClassName, pushedQualifiers, erdNumber, version2, reuseablePs, ctcNumber, isTarget, scanIsolationLevel, optimizerEstimatedRowCount, optimizerEstimatedCost, isDerbyStyleTableFunction, returnType ); } /** a hash scan generator, for ease of use at present. @see ResultSetFactory#getHashScanResultSet @exception StandardException thrown on error */ public NoPutResultSet getHashScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] scanQualifiers, Qualifier[][] nextQualifiers, int initialCapacity, float loadFactor, int maxCapacity, int hashKeyColumn, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo)(activation.getPreparedStatement(). getSavedObject(scociItem)); return new HashScanResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, startKeyGetter, startSearchOperator, stopKeyGetter, stopSearchOperator, sameStartStopPosition, scanQualifiers, nextQualifiers, initialCapacity, loadFactor, maxCapacity, hashKeyColumn, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, forUpdate, colRefItem, lockMode, tableLocked, isolationLevel, true, // Skip rows with 1 or more null key columns optimizerEstimatedRowCount, optimizerEstimatedCost); } /** a distinct scan generator, for ease of use at present. @see ResultSetFactory#getHashScanResultSet @exception StandardException thrown on error */ public NoPutResultSet getDistinctScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, int hashKeyColumn, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, int colRefItem, int lockMode, boolean tableLocked, int isolationLevel, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo)(activation.getPreparedStatement(). getSavedObject(scociItem)); return new DistinctScanResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, hashKeyColumn, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, colRefItem, lockMode, tableLocked, isolationLevel, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** a minimal table scan generator, for ease of use at present. @see ResultSetFactory#getTableScanResultSet @exception StandardException thrown on error */ public NoPutResultSet getTableScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] qualifiers, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, boolean oneRowScan, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo)(activation.getPreparedStatement(). getSavedObject(scociItem)); return new TableScanResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, startKeyGetter, startSearchOperator, stopKeyGetter, stopSearchOperator, sameStartStopPosition, qualifiers, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, forUpdate, colRefItem, indexColItem, lockMode, tableLocked, isolationLevel, 1, // rowsPerRead is 1 if not a bulkTableScan oneRowScan, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** Table/Index scan where rows are read in bulk @see ResultSetFactory#getBulkTableScanResultSet @exception StandardException thrown on error */ public NoPutResultSet getBulkTableScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] qualifiers, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, int rowsPerRead, boolean oneRowScan, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { //Prior to Cloudscape 10.0 release, holdability was false by default. Programmers had to explicitly //set the holdability to true using JDBC apis. Since holdability was not true by default, we chose to disable the //prefetching for RR and Serializable when holdability was explicitly set to true. //But starting Cloudscape 10.0 release, in order to be DB2 compatible, holdability is set to true by default. //Because of that, we can not continue to disable the prefetching for RR and Serializable, since it causes //severe performance degradation - bug 5953. StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo)(activation.getPreparedStatement(). getSavedObject(scociItem)); return new BulkTableScanResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, startKeyGetter, startSearchOperator, stopKeyGetter, stopSearchOperator, sameStartStopPosition, qualifiers, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, forUpdate, colRefItem, indexColItem, lockMode, tableLocked, isolationLevel, rowsPerRead, oneRowScan, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** Multi-probing scan that probes an index for specific values contained in the received probe list. All index rows for which the first column equals probeVals[0] will be returned, followed by all rows for which the first column equals probeVals[1], and so on. Assumption is that we only get here if probeVals has at least one value. @see ResultSetFactory#getMultiProbeTableScanResultSet @exception StandardException thrown on error */ public NoPutResultSet getMultiProbeTableScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] qualifiers, DataValueDescriptor [] probeVals, int sortRequired, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, boolean oneRowScan, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo) activation.getPreparedStatement().getSavedObject(scociItem); return new MultiProbeTableScanResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, startKeyGetter, startSearchOperator, stopKeyGetter, stopSearchOperator, sameStartStopPosition, qualifiers, probeVals, sortRequired, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, forUpdate, colRefItem, indexColItem, lockMode, tableLocked, isolationLevel, oneRowScan, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getIndexRowToBaseRowResultSet @exception StandardException Thrown on error */ public NoPutResultSet getIndexRowToBaseRowResultSet( long conglomId, int scociItem, NoPutResultSet source, GeneratedMethod resultRowAllocator, int resultSetNumber, String indexName, int heapColRefItem, int allColRefItem, int heapOnlyColRefItem, int indexColMapItem, GeneratedMethod restriction, boolean forUpdate, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new IndexRowToBaseRowResultSet( conglomId, scociItem, source.getActivation(), source, resultRowAllocator, resultSetNumber, indexName, heapColRefItem, allColRefItem, heapOnlyColRefItem, indexColMapItem, restriction, forUpdate, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getWindowResultSet @exception StandardException Thrown on error */ public NoPutResultSet getWindowResultSet( Activation activation, NoPutResultSet source, GeneratedMethod rowAllocator, int resultSetNumber, int level, int erdNumber, GeneratedMethod restriction, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new WindowResultSet( activation, source, rowAllocator, resultSetNumber, level, erdNumber, restriction, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getNestedLoopJoinResultSet @exception StandardException thrown on error */ public NoPutResultSet getNestedLoopJoinResultSet(NoPutResultSet leftResultSet, int leftNumCols, NoPutResultSet rightResultSet, int rightNumCols, GeneratedMethod joinClause, int resultSetNumber, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { return new NestedLoopJoinResultSet(leftResultSet, leftNumCols, rightResultSet, rightNumCols, leftResultSet.getActivation(), joinClause, resultSetNumber, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); } /** @see ResultSetFactory#getHashJoinResultSet @exception StandardException thrown on error */ public NoPutResultSet getHashJoinResultSet(NoPutResultSet leftResultSet, int leftNumCols, NoPutResultSet rightResultSet, int rightNumCols, GeneratedMethod joinClause, int resultSetNumber, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { return new HashJoinResultSet(leftResultSet, leftNumCols, rightResultSet, rightNumCols, leftResultSet.getActivation(), joinClause, resultSetNumber, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); } /** @see ResultSetFactory#getNestedLoopLeftOuterJoinResultSet @exception StandardException thrown on error */ public NoPutResultSet getNestedLoopLeftOuterJoinResultSet(NoPutResultSet leftResultSet, int leftNumCols, NoPutResultSet rightResultSet, int rightNumCols, GeneratedMethod joinClause, int resultSetNumber, GeneratedMethod emptyRowFun, boolean wasRightOuterJoin, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { return new NestedLoopLeftOuterJoinResultSet(leftResultSet, leftNumCols, rightResultSet, rightNumCols, leftResultSet.getActivation(), joinClause, resultSetNumber, emptyRowFun, wasRightOuterJoin, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); } /** @see ResultSetFactory#getHashLeftOuterJoinResultSet @exception StandardException thrown on error */ public NoPutResultSet getHashLeftOuterJoinResultSet(NoPutResultSet leftResultSet, int leftNumCols, NoPutResultSet rightResultSet, int rightNumCols, GeneratedMethod joinClause, int resultSetNumber, GeneratedMethod emptyRowFun, boolean wasRightOuterJoin, boolean oneRowRightSide, boolean notExistsRightSide, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String userSuppliedOptimizerOverrides) throws StandardException { return new HashLeftOuterJoinResultSet(leftResultSet, leftNumCols, rightResultSet, rightNumCols, leftResultSet.getActivation(), joinClause, resultSetNumber, emptyRowFun, wasRightOuterJoin, oneRowRightSide, notExistsRightSide, optimizerEstimatedRowCount, optimizerEstimatedCost, userSuppliedOptimizerOverrides); } /** @see ResultSetFactory#getSetTransactionResultSet @exception StandardException thrown when unable to create the result set */ public ResultSet getSetTransactionResultSet(Activation activation) throws StandardException { getAuthorizer(activation).authorize(activation, Authorizer.SQL_ARBITARY_OP); return new SetTransactionResultSet(activation); } /** @see ResultSetFactory#getMaterializedResultSet @exception StandardException thrown on error */ public NoPutResultSet getMaterializedResultSet(NoPutResultSet source, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new MaterializedResultSet(source, source.getActivation(), resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** @see ResultSetFactory#getScrollInsensitiveResultSet @exception StandardException thrown on error */ public NoPutResultSet getScrollInsensitiveResultSet(NoPutResultSet source, Activation activation, int resultSetNumber, int sourceRowWidth, boolean scrollable, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { /* ResultSet tree is dependent on whether or not this is * for a scroll insensitive cursor. */ if (scrollable) { return new ScrollInsensitiveResultSet(source, activation, resultSetNumber, sourceRowWidth, optimizerEstimatedRowCount, optimizerEstimatedCost); } else { return source; } } /** @see ResultSetFactory#getNormalizeResultSet @exception StandardException thrown on error */ public NoPutResultSet getNormalizeResultSet(NoPutResultSet source, int resultSetNumber, int erdNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost, boolean forUpdate) throws StandardException { return new NormalizeResultSet(source, source.getActivation(), resultSetNumber, erdNumber, optimizerEstimatedRowCount, optimizerEstimatedCost, forUpdate); } /** @see ResultSetFactory#getCurrentOfResultSet */ public NoPutResultSet getCurrentOfResultSet(String cursorName, Activation activation, int resultSetNumber) { return new CurrentOfResultSet(cursorName, activation, resultSetNumber); } /** @see ResultSetFactory#getDDLResultSet @exception StandardException thrown on error */ public ResultSet getDDLResultSet(Activation activation) throws StandardException { getAuthorizer(activation).authorize(activation, Authorizer.SQL_DDL_OP); return getMiscResultSet( activation); } /** @see ResultSetFactory#getMiscResultSet @exception StandardException thrown on error */ public ResultSet getMiscResultSet(Activation activation) throws StandardException { getAuthorizer(activation).authorize(activation, Authorizer.SQL_ARBITARY_OP); return new MiscResultSet(activation); } /** a minimal union scan generator, for ease of use at present. @see ResultSetFactory#getUnionResultSet @exception StandardException thrown on error */ public NoPutResultSet getUnionResultSet(NoPutResultSet leftResultSet, NoPutResultSet rightResultSet, int resultSetNumber, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { return new UnionResultSet(leftResultSet, rightResultSet, leftResultSet.getActivation(), resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost); } public NoPutResultSet getSetOpResultSet( NoPutResultSet leftSource, NoPutResultSet rightSource, Activation activation, int resultSetNumber, long optimizerEstimatedRowCount, double optimizerEstimatedCost, int opType, boolean all, int intermediateOrderByColumnsSavedObject, int intermediateOrderByDirectionSavedObject, int intermediateOrderByNullsLowSavedObject) throws StandardException { return new SetOpResultSet( leftSource, rightSource, activation, resultSetNumber, optimizerEstimatedRowCount, optimizerEstimatedCost, opType, all, intermediateOrderByColumnsSavedObject, intermediateOrderByDirectionSavedObject, intermediateOrderByNullsLowSavedObject); } /** * A last index key sresult set returns the last row from * the index in question. It is used as an ajunct to max(). * * @param activation the activation for this result set, * which provides the context for the row allocation operation. * @param resultSetNumber The resultSetNumber for the ResultSet * @param resultRowAllocator a reference to a method in the activation * that creates a holder for the result row of the scan. May * be a partial row. <verbatim> * ExecRow rowAllocator() throws StandardException; </verbatim> * @param conglomId the conglomerate of the table to be scanned. * @param tableName The full name of the table * @param userSuppliedOptimizerOverrides Overrides specified by the user on the sql * @param indexName The name of the index, if one used to access table. * @param colRefItem An saved item for a bitSet of columns that * are referenced in the underlying table. -1 if * no item. * @param lockMode The lock granularity to use (see * TransactionController in access) * @param tableLocked Whether or not the table is marked as using table locking * (in sys.systables) * @param isolationLevel Isolation level (specified or not) to use on scans * @param optimizerEstimatedRowCount Estimated total # of rows by * optimizer * @param optimizerEstimatedCost Estimated total cost by optimizer * * @return the scan operation as a result set. * * @exception StandardException thrown when unable to create the * result set */ public NoPutResultSet getLastIndexKeyResultSet ( Activation activation, int resultSetNumber, GeneratedMethod resultRowAllocator, long conglomId, String tableName, String userSuppliedOptimizerOverrides, String indexName, int colRefItem, int lockMode, boolean tableLocked, int isolationLevel, double optimizerEstimatedRowCount, double optimizerEstimatedCost ) throws StandardException { return new LastIndexKeyResultSet( activation, resultSetNumber, resultRowAllocator, conglomId, tableName, userSuppliedOptimizerOverrides, indexName, colRefItem, lockMode, tableLocked, isolationLevel, optimizerEstimatedRowCount, optimizerEstimatedCost); } /** * a referential action dependent table scan generator. * @see ResultSetFactory#getTableScanResultSet * @exception StandardException thrown on error */ public NoPutResultSet getRaDependentTableScanResultSet( Activation activation, long conglomId, int scociItem, GeneratedMethod resultRowAllocator, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] qualifiers, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, boolean oneRowScan, double optimizerEstimatedRowCount, double optimizerEstimatedCost, String parentResultSetId, long fkIndexConglomId, int fkColArrayItem, int rltItem) throws StandardException { StaticCompiledOpenConglomInfo scoci = (StaticCompiledOpenConglomInfo)(activation.getPreparedStatement(). getSavedObject(scociItem)); return new DependentResultSet( conglomId, scoci, activation, resultRowAllocator, resultSetNumber, startKeyGetter, startSearchOperator, stopKeyGetter, stopSearchOperator, sameStartStopPosition, qualifiers, tableName, userSuppliedOptimizerOverrides, indexName, isConstraint, forUpdate, colRefItem, lockMode, tableLocked, isolationLevel, 1, oneRowScan, optimizerEstimatedRowCount, optimizerEstimatedCost, parentResultSetId, fkIndexConglomId, fkColArrayItem, rltItem); } static private Authorizer getAuthorizer(Activation activation) { LanguageConnectionContext lcc = activation.getLanguageConnectionContext(); return lcc.getAuthorizer(); } ///////////////////////////////////////////////////////////////// // // PUBLIC MINIONS // ///////////////////////////////////////////////////////////////// }
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.openstacknode.util; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import org.onosproject.net.Device; import org.onosproject.net.behaviour.BridgeConfig; import org.onosproject.net.behaviour.BridgeName; import org.onosproject.net.device.DeviceService; import org.onosproject.openstacknode.api.DpdkInterface; import org.onosproject.openstacknode.api.OpenstackAuth; import org.onosproject.openstacknode.api.OpenstackAuth.Perspective; import org.onosproject.openstacknode.api.OpenstackNode; import org.onosproject.ovsdb.controller.OvsdbClientService; import org.onosproject.ovsdb.controller.OvsdbController; import org.onosproject.ovsdb.controller.OvsdbInterface; import org.onosproject.ovsdb.controller.OvsdbNodeId; import org.openstack4j.api.OSClient; import org.openstack4j.api.client.IOSClientBuilder; import org.openstack4j.api.exceptions.AuthenticationException; import org.openstack4j.api.types.Facing; import org.openstack4j.core.transport.Config; import org.openstack4j.model.common.Identifier; import org.openstack4j.openstack.OSFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.security.cert.X509Certificate; import java.util.Dictionary; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeMap; import static org.onlab.util.Tools.get; /** * An utility that used in openstack node app. */ public final class OpenstackNodeUtil { private static final Logger log = LoggerFactory.getLogger(OpenstackNodeUtil.class); // keystone endpoint related variables private static final String DOMAIN_DEFAULT = "default"; private static final String KEYSTONE_V2 = "v2.0"; private static final String KEYSTONE_V3 = "v3"; private static final String SSL_TYPE = "SSL"; private static final int HEX_LENGTH = 16; private static final String OF_PREFIX = "of:"; private static final String ZERO = "0"; private static final String DPDK_DEVARGS = "dpdk-devargs"; private static final String NOT_AVAILABLE = "N/A"; /** * Prevents object installation from external. */ private OpenstackNodeUtil() { } /** * Checks whether the controller has a connection with an OVSDB that resides * inside the given openstack node. * * @param osNode openstack node * @param ovsdbPort ovsdb port * @param ovsdbController ovsdb controller * @param deviceService device service * @return true if the controller is connected to the OVSDB, false otherwise */ public static boolean isOvsdbConnected(OpenstackNode osNode, int ovsdbPort, OvsdbController ovsdbController, DeviceService deviceService) { OvsdbClientService client = getOvsdbClient(osNode, ovsdbPort, ovsdbController); return deviceService.isAvailable(osNode.ovsdb()) && client != null && client.isConnected(); } /** * Gets the ovsdb client with supplied openstack node. * * @param osNode openstack node * @param ovsdbPort ovsdb port * @param ovsdbController ovsdb controller * @return ovsdb client */ public static OvsdbClientService getOvsdbClient(OpenstackNode osNode, int ovsdbPort, OvsdbController ovsdbController) { OvsdbNodeId ovsdb = new OvsdbNodeId(osNode.managementIp(), ovsdbPort); return ovsdbController.getOvsdbClient(ovsdb); } /** * Obtains a connected openstack client. * * @param osNode openstack node * @return a connected openstack client */ public static OSClient getConnectedClient(OpenstackNode osNode) { OpenstackAuth auth = osNode.keystoneConfig().authentication(); String endpoint = buildEndpoint(osNode); Perspective perspective = auth.perspective(); Config config = getSslConfig(); try { if (endpoint.contains(KEYSTONE_V2)) { IOSClientBuilder.V2 builder = OSFactory.builderV2() .endpoint(endpoint) .tenantName(auth.project()) .credentials(auth.username(), auth.password()) .withConfig(config); if (perspective != null) { builder.perspective(getFacing(perspective)); } return builder.authenticate(); } else if (endpoint.contains(KEYSTONE_V3)) { Identifier project = Identifier.byName(auth.project()); Identifier domain = Identifier.byName(DOMAIN_DEFAULT); IOSClientBuilder.V3 builder = OSFactory.builderV3() .endpoint(endpoint) .credentials(auth.username(), auth.password(), domain) .scopeToProject(project, domain) .withConfig(config); if (perspective != null) { builder.perspective(getFacing(perspective)); } return builder.authenticate(); } else { log.warn("Unrecognized keystone version type"); return null; } } catch (AuthenticationException e) { log.error("Authentication failed due to {}", e.toString()); return null; } } /** * Gets Boolean property from the propertyName * Return null if propertyName is not found. * * @param properties properties to be looked up * @param propertyName the name of the property to look up * @return value when the propertyName is defined or return null */ public static Boolean getBooleanProperty(Dictionary<?, ?> properties, String propertyName) { Boolean value; try { String s = get(properties, propertyName); value = Strings.isNullOrEmpty(s) ? null : Boolean.valueOf(s); } catch (ClassCastException e) { value = null; } return value; } /** * Prints out the JSON string in pretty format. * * @param mapper Object mapper * @param jsonString JSON string * @return pretty formatted JSON string */ public static String prettyJson(ObjectMapper mapper, String jsonString) { try { Object jsonObject = mapper.readValue(jsonString, Object.class); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonObject); } catch (IOException e) { log.debug("Json string parsing exception caused by {}", e); } return null; } /** * Generates a DPID (of:0000000000000001) from an index value. * * @param index index value * @return generated DPID */ public static String genDpid(long index) { if (index < 0) { return null; } String hexStr = Long.toHexString(index); StringBuilder zeroPadding = new StringBuilder(); for (int i = 0; i < HEX_LENGTH - hexStr.length(); i++) { zeroPadding.append(ZERO); } return OF_PREFIX + zeroPadding.toString() + hexStr; } /** * Adds or removes a network interface (aka port) into a given bridge of openstack node. * * @param osNode openstack node * @param bridgeName bridge name * @param intfName interface name * @param deviceService device service * @param addOrRemove add port is true, remove it otherwise */ public static synchronized void addOrRemoveSystemInterface(OpenstackNode osNode, String bridgeName, String intfName, DeviceService deviceService, boolean addOrRemove) { Device device = deviceService.getDevice(osNode.ovsdb()); if (device == null || !device.is(BridgeConfig.class)) { log.info("device is null or this device if not ovsdb device"); return; } BridgeConfig bridgeConfig = device.as(BridgeConfig.class); if (addOrRemove) { bridgeConfig.addPort(BridgeName.bridgeName(bridgeName), intfName); } else { bridgeConfig.deletePort(BridgeName.bridgeName(bridgeName), intfName); } } /** * Adds or removes a dpdk interface into a given openstack node. * * @param osNode openstack node * @param dpdkInterface dpdk interface * @param ovsdbPort ovsdb port * @param ovsdbController ovsdb controller * @param addOrRemove add port is true, remove it otherwise */ public static synchronized void addOrRemoveDpdkInterface(OpenstackNode osNode, DpdkInterface dpdkInterface, int ovsdbPort, OvsdbController ovsdbController, boolean addOrRemove) { OvsdbClientService client = getOvsdbClient(osNode, ovsdbPort, ovsdbController); if (client == null) { log.info("Failed to get ovsdb client"); return; } if (addOrRemove) { Map<String, String> options = ImmutableMap.of(DPDK_DEVARGS, dpdkInterface.pciAddress()); OvsdbInterface.Builder builder = OvsdbInterface.builder() .name(dpdkInterface.intf()) .type(OvsdbInterface.Type.DPDK) .mtu(dpdkInterface.mtu()) .options(options); client.createInterface(dpdkInterface.deviceName(), builder.build()); } else { client.dropInterface(dpdkInterface.intf()); } } /** * Obtains the gateway node by openstack node. Note that the gateway * node is determined by device's device identifier. * * @param gws a collection of gateway nodes * @param openstackNode device identifier * @return the hostname of selected gateway node */ public static String getGwByComputeNode(Set<OpenstackNode> gws, OpenstackNode openstackNode) { int numOfGw = gws.size(); if (numOfGw == 0) { return NOT_AVAILABLE; } if (!openstackNode.type().equals(OpenstackNode.NodeType.COMPUTE)) { return NOT_AVAILABLE; } int gwIndex = Math.abs(openstackNode.intgBridge().hashCode()) % numOfGw; return getGwByIndex(gws, gwIndex).hostname(); } /** * Obtains gateway instance by giving index number. * * @param gws a collection of gateway nodes * @param index index number * @return gateway instance */ private static OpenstackNode getGwByIndex(Set<OpenstackNode> gws, int index) { Map<String, OpenstackNode> hashMap = new HashMap<>(); gws.forEach(gw -> hashMap.put(gw.hostname(), gw)); TreeMap<String, OpenstackNode> treeMap = new TreeMap<>(hashMap); Iterator<String> iteratorKey = treeMap.keySet().iterator(); int intIndex = 0; OpenstackNode gw = null; while (iteratorKey.hasNext()) { String key = iteratorKey.next(); if (intIndex == index) { gw = treeMap.get(key); } intIndex++; } return gw; } /** * Builds up and a complete endpoint URL from gateway node. * * @param node gateway node * @return a complete endpoint URL */ private static String buildEndpoint(OpenstackNode node) { OpenstackAuth auth = node.keystoneConfig().authentication(); StringBuilder endpointSb = new StringBuilder(); endpointSb.append(auth.protocol().name().toLowerCase()); endpointSb.append("://"); endpointSb.append(node.keystoneConfig().endpoint()); return endpointSb.toString(); } /** * Obtains the SSL config without verifying the certification. * * @return SSL config */ private static Config getSslConfig() { // we bypass the SSL certification verification for now // TODO: verify server side SSL using a given certification Config config = Config.newConfig().withSSLVerificationDisabled(); TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { public X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted(X509Certificate[] certs, String authType) { } public void checkServerTrusted(X509Certificate[] certs, String authType) { } } }; HostnameVerifier allHostsValid = (hostname, session) -> true; try { SSLContext sc = SSLContext.getInstance(SSL_TYPE); sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); HttpsURLConnection.setDefaultHostnameVerifier(allHostsValid); config.withSSLContext(sc); } catch (Exception e) { log.error("Failed to access OpenStack service due to {}", e.toString()); return null; } return config; } /** * Obtains the facing object with given openstack perspective. * * @param perspective keystone perspective * @return facing object */ private static Facing getFacing(Perspective perspective) { switch (perspective) { case PUBLIC: return Facing.PUBLIC; case ADMIN: return Facing.ADMIN; case INTERNAL: return Facing.INTERNAL; default: return null; } } }
// Copyright 2006 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax.util; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.base.Joiner; import com.google.common.truth.Ordered; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventCollector; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.events.util.EventCollectionApparatus; import com.google.devtools.build.lib.syntax.BazelLibrary; import com.google.devtools.build.lib.syntax.BuildFileAST; import com.google.devtools.build.lib.syntax.Environment; import com.google.devtools.build.lib.syntax.Environment.FailFastException; import com.google.devtools.build.lib.syntax.Environment.Phase; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Expression; import com.google.devtools.build.lib.syntax.Mutability; import com.google.devtools.build.lib.syntax.Parser; import com.google.devtools.build.lib.syntax.ParserInputSource; import com.google.devtools.build.lib.syntax.SkylarkUtils; import com.google.devtools.build.lib.syntax.Statement; import com.google.devtools.build.lib.testutil.TestConstants; import com.google.devtools.build.lib.testutil.TestMode; import java.util.LinkedList; import java.util.List; import org.junit.Before; /** * Base class for test cases that use parsing and evaluation services. */ public class EvaluationTestCase { private EventCollectionApparatus eventCollectionApparatus = new EventCollectionApparatus(EventKind.ALL_EVENTS); private TestMode testMode = TestMode.SKYLARK; protected Environment env; protected Mutability mutability = Mutability.create("test"); @Before public final void initialize() throws Exception { beforeInitialization(); env = newEnvironment(); } protected void beforeInitialization() throws Exception { // This method exists so that it can be overriden in MakeEnvironmentTest. // The problem is that MakeEnvironmentTest's initialization code (setting up makeEnvBuilder) // needs to run before initialize(), otherwise some tests fail with an NPE. // Consequently, we need this hack to ensure the right order of methods. } /** * Creates a standard Environment for tests in the BUILD language. * No PythonPreprocessing, mostly empty mutable Environment. */ public Environment newBuildEnvironment() { Environment env = Environment.builder(mutability) .useDefaultSemantics() .setGlobals(BazelLibrary.GLOBALS) .setEventHandler(getEventHandler()) .setPhase(Phase.LOADING) .build(); SkylarkUtils.setToolsRepository(env, TestConstants.TOOLS_REPOSITORY); return env; } /** * Creates an Environment for Skylark with a mostly empty initial environment. * For internal initialization or tests. */ public Environment newSkylarkEnvironment() { return Environment.builder(mutability) .useDefaultSemantics() .setGlobals(BazelLibrary.GLOBALS) .setEventHandler(getEventHandler()) .build(); } /** * Creates a new Environment suitable for the test case. Subclasses may override it to fit their * purpose and e.g. call newBuildEnvironment or newSkylarkEnvironment; or they may play with the * testMode to run tests in either or both kinds of Environment. Note that all Environment-s may * share the same Mutability, so don't close it. * * @return a fresh Environment. */ public Environment newEnvironment() throws Exception { return newEnvironmentWithSkylarkOptions(); } protected Environment newEnvironmentWithSkylarkOptions(String... skylarkOptions) throws Exception { if (testMode == null) { throw new IllegalArgumentException( "TestMode is null. Please set a Testmode via setMode() or set the " + "Environment manually by overriding newEnvironment()"); } return testMode.createEnvironment(getEventHandler(), skylarkOptions); } /** * Sets the specified {@code TestMode} and tries to create the appropriate {@code Environment} * * @param testMode * @throws Exception */ protected void setMode(TestMode testMode, String... skylarkOptions) throws Exception { this.testMode = testMode; env = newEnvironmentWithSkylarkOptions(skylarkOptions); } protected void enableSkylarkMode(String... skylarkOptions) throws Exception { setMode(TestMode.SKYLARK, skylarkOptions); } protected void enableBuildMode(String... skylarkOptions) throws Exception { setMode(TestMode.BUILD, skylarkOptions); } public ExtendedEventHandler getEventHandler() { return eventCollectionApparatus.reporter(); } public Environment getEnvironment() { return env; } protected BuildFileAST parseBuildFileASTWithoutValidation(String... input) { return BuildFileAST.parseString(getEventHandler(), input); } protected BuildFileAST parseBuildFileAST(String... input) { BuildFileAST ast = parseBuildFileASTWithoutValidation(input); return ast.validate(env, getEventHandler()); } protected List<Statement> parseFile(String... input) { return parseBuildFileAST(input).getStatements(); } /** Construct a ParserInputSource by concatenating multiple strings with newlines. */ private ParserInputSource makeParserInputSource(String... input) { return ParserInputSource.create(Joiner.on("\n").join(input), null); } /** Parses a statement, possibly followed by newlines. */ protected Statement parseStatement(Parser.ParsingLevel parsingLevel, String... input) { return Parser.parseStatement(makeParserInputSource(input), getEventHandler(), parsingLevel); } /** Parses an expression, possibly followed by newlines. */ protected Expression parseExpression(String... input) { return Parser.parseExpression(makeParserInputSource(input), getEventHandler()); } public EvaluationTestCase update(String varname, Object value) throws Exception { env.update(varname, value); return this; } public Object lookup(String varname) throws Exception { return env.lookup(varname); } public Object eval(String... input) throws Exception { if (testMode == TestMode.SKYLARK) { return BuildFileAST.eval(env, input); } return BuildFileAST.parseBuildString(env.getEventHandler(), input).eval(env); } public void checkEvalError(String msg, String... input) throws Exception { try { eval(input); fail("Expected error '" + msg + "' but got no error"); } catch (EvalException | FailFastException e) { assertThat(e).hasMessageThat().isEqualTo(msg); } } public void checkEvalErrorContains(String msg, String... input) throws Exception { try { eval(input); fail("Expected error containing '" + msg + "' but got no error"); } catch (EvalException | FailFastException e) { assertThat(e).hasMessageThat().contains(msg); } } public void checkEvalErrorDoesNotContain(String msg, String... input) throws Exception { try { eval(input); } catch (EvalException | FailFastException e) { assertThat(e).hasMessageThat().doesNotContain(msg); } } // Forward relevant methods to the EventCollectionApparatus public EvaluationTestCase setFailFast(boolean failFast) { eventCollectionApparatus.setFailFast(failFast); return this; } public EvaluationTestCase assertNoWarningsOrErrors() { eventCollectionApparatus.assertNoWarningsOrErrors(); return this; } public EventCollector getEventCollector() { return eventCollectionApparatus.collector(); } public Event assertContainsError(String expectedMessage) { return eventCollectionApparatus.assertContainsError(expectedMessage); } public Event assertContainsWarning(String expectedMessage) { return eventCollectionApparatus.assertContainsWarning(expectedMessage); } public Event assertContainsDebug(String expectedMessage) { return eventCollectionApparatus.assertContainsDebug(expectedMessage); } public EvaluationTestCase clearEvents() { eventCollectionApparatus.clear(); return this; } /** * Encapsulates a separate test which can be executed by a {@code TestMode} */ protected interface Testable { public void run() throws Exception; } /** * Base class for test cases that run in specific modes (e.g. Build and/or Skylark) */ protected abstract class ModalTestCase { private final SetupActions setup; protected ModalTestCase() { setup = new SetupActions(); } /** * Allows the execution of several statements before each following test * @param statements The statement(s) to be executed * @return This {@code ModalTestCase} */ public ModalTestCase setUp(String... statements) { setup.registerEval(statements); return this; } /** * Allows the update of the specified variable before each following test * @param name The name of the variable that should be updated * @param value The new value of the variable * @return This {@code ModalTestCase} */ public ModalTestCase update(String name, Object value) { setup.registerUpdate(name, value); return this; } /** * Evaluates two parameters and compares their results. * @param statement The statement to be evaluated * @param expectedEvalString The expression of the expected result * @return This {@code ModalTestCase} * @throws Exception */ public ModalTestCase testEval(String statement, String expectedEvalString) throws Exception { runTest(createComparisonTestable(statement, expectedEvalString, true)); return this; } /** * Evaluates the given statement and compares its result to the expected object * @param statement * @param expected * @return This {@code ModalTestCase} * @throws Exception */ public ModalTestCase testStatement(String statement, Object expected) throws Exception { runTest(createComparisonTestable(statement, expected, false)); return this; } /** * Evaluates the given statement and compares its result to the collection of expected objects * without considering their order * @param statement The statement to be evaluated * @param items The expected items * @return This {@code ModalTestCase} * @throws Exception */ public ModalTestCase testCollection(String statement, Object... items) throws Exception { runTest(collectionTestable(statement, false, items)); return this; } /** * Evaluates the given statement and compares its result to the collection of expected objects * while considering their order * @param statement The statement to be evaluated * @param items The expected items, in order * @return This {@code ModalTestCase} * @throws Exception */ public ModalTestCase testExactOrder(String statement, Object... items) throws Exception { runTest(collectionTestable(statement, true, items)); return this; } /** * Evaluates the given statement and checks whether the given error message appears * @param expectedError The expected error message * @param statements The statement(s) to be evaluated * @return This ModalTestCase * @throws Exception */ public ModalTestCase testIfExactError(String expectedError, String... statements) throws Exception { runTest(errorTestable(true, expectedError, statements)); return this; } /** * Evaluates the given statement and checks whether an error that contains the expected message * occurs * @param expectedError * @param statements * @return This ModalTestCase * @throws Exception */ public ModalTestCase testIfErrorContains(String expectedError, String... statements) throws Exception { runTest(errorTestable(false, expectedError, statements)); return this; } /** * Looks up the value of the specified variable and compares it to the expected value * @param name * @param expected * @return This ModalTestCase * @throws Exception */ public ModalTestCase testLookup(String name, Object expected) throws Exception { runTest(createLookUpTestable(name, expected)); return this; } /** * Creates a Testable that checks whether the evaluation of the given statement leads to the * expected error * @param statements * @param error * @param exactMatch If true, the error message has to be identical to the expected error * @return An instance of Testable that runs the error check */ protected Testable errorTestable( final boolean exactMatch, final String error, final String... statements) { return new Testable() { @Override public void run() throws Exception { if (exactMatch) { checkEvalError(error, statements); } else { checkEvalErrorContains(error, statements); } } }; } /** * Creates a testable that checks whether the evaluation of the given statement leads to a list * that contains exactly the expected objects * @param statement The statement to be evaluated * @param ordered Determines whether the order of the elements is checked as well * @param expected Expected objects * @return An instance of Testable that runs the check */ protected Testable collectionTestable( final String statement, final boolean ordered, final Object... expected) { return new Testable() { @Override public void run() throws Exception { Ordered tmp = assertThat((Iterable<?>) eval(statement)).containsExactly(expected); if (ordered) { tmp.inOrder(); } } }; } /** * Creates a testable that compares the evaluation of the given statement to a specified result * * @param statement The statement to be evaluated * @param expected Either the expected object or an expression whose evaluation leads to the * expected object * @param expectedIsExpression Signals whether {@code expected} is an object or an expression * @return An instance of Testable that runs the comparison */ protected Testable createComparisonTestable( final String statement, final Object expected, final boolean expectedIsExpression) { return new Testable() { @Override public void run() throws Exception { Object actual = eval(statement); Object realExpected = expected; // We could also print the actual object and compare the string to the expected // expression, but then the order of elements would matter. if (expectedIsExpression) { realExpected = eval((String) expected); } assertThat(actual).isEqualTo(realExpected); } }; } /** * Creates a Testable that looks up the given variable and compares its value to the expected * value * @param name * @param expected * @return An instance of Testable that does both lookup and comparison */ protected Testable createLookUpTestable(final String name, final Object expected) { return new Testable() { @Override public void run() throws Exception { assertThat(lookup(name)).isEqualTo(expected); } }; } /** * Executes the given Testable * @param testable * @throws Exception */ protected void runTest(Testable testable) throws Exception { run(new TestableDecorator(setup, testable)); } protected abstract void run(Testable testable) throws Exception; } /** * A simple decorator that allows the execution of setup actions before running a {@code Testable} */ static class TestableDecorator implements Testable { private final SetupActions setup; private final Testable decorated; public TestableDecorator(SetupActions setup, Testable decorated) { this.setup = setup; this.decorated = decorated; } /** * Executes all stored actions and updates plus the actual {@code Testable} */ @Override public void run() throws Exception { setup.executeAll(); decorated.run(); } } /** * A container for collection actions that should be executed before a test */ class SetupActions { private List<Testable> setup; public SetupActions() { setup = new LinkedList<>(); } /** * Registers a variable that has to be updated before a test * * @param name * @param value */ public void registerUpdate(final String name, final Object value) { setup.add( new Testable() { @Override public void run() throws Exception { EvaluationTestCase.this.update(name, value); } }); } /** * Registers a statement for evaluation prior to a test * * @param statements */ public void registerEval(final String... statements) { setup.add( new Testable() { @Override public void run() throws Exception { EvaluationTestCase.this.eval(statements); } }); } /** * Executes all stored actions and updates * @throws Exception */ public void executeAll() throws Exception { for (Testable testable : setup) { testable.run(); } } } /** * A class that executes each separate test in both modes (Build and Skylark) */ protected class BothModesTest extends ModalTestCase { private final String[] skylarkOptions; public BothModesTest(String... skylarkOptions) { this.skylarkOptions = skylarkOptions; } /** * Executes the given Testable in both Build and Skylark mode */ @Override protected void run(Testable testable) throws Exception { enableSkylarkMode(skylarkOptions); try { testable.run(); } catch (Exception e) { throw new Exception("While in Skylark mode", e); } enableBuildMode(skylarkOptions); try { testable.run(); } catch (Exception e) { throw new Exception("While in Build mode", e); } } } /** * A class that runs all tests in Build mode */ protected class BuildTest extends ModalTestCase { private final String[] skylarkOptions; public BuildTest(String... skylarkOptions) { this.skylarkOptions = skylarkOptions; } @Override protected void run(Testable testable) throws Exception { enableBuildMode(skylarkOptions); testable.run(); } } /** * A class that runs all tests in Skylark mode */ protected class SkylarkTest extends ModalTestCase { private final String[] skylarkOptions; public SkylarkTest(String... skylarkOptions) { this.skylarkOptions = skylarkOptions; } @Override protected void run(Testable testable) throws Exception { enableSkylarkMode(skylarkOptions); testable.run(); } } }
package org.cs4j.core.algorithms; import org.cs4j.core.SearchAlgorithm; import org.cs4j.core.SearchDomain; import org.cs4j.core.SearchDomain.Operator; import org.cs4j.core.SearchDomain.State; import org.cs4j.core.SearchResult; import org.cs4j.core.algorithms.SearchResultImpl.SolutionImpl; import org.cs4j.core.collections.BinHeap; import org.cs4j.core.collections.BucketHeap; import org.cs4j.core.collections.PackedElement; import org.cs4j.core.collections.SearchQueue; import sun.reflect.generics.reflectiveObjects.NotImplementedException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by sepetnit on 11/12/2015. * * This class implements Pure Heuristic Search (PHS) which is sometimes called Greedy Search as a kind of * Bounded-Suboptimal-Search algorithm * * The algorithm supports pruning which means that a maximum cost can be defined and all states whose h value * is greater than the maximum value can be pruned */ public class PHS implements SearchAlgorithm { private static final Map<String, Class> PHSPossibleParameters; // Declare the parameters that can be tunes before running the search static { PHSPossibleParameters = new HashMap<>(); PHSPossibleParameters.put("max-cost", Double.class); PHSPossibleParameters.put("reopen", Boolean.class); PHSPossibleParameters.put("rerun-if-not-found-and-nr", Boolean.class); } private static final int QID = 0; public enum HeapType {BIN, BUCKET} // TODO ... private HeapType heapType; // The domain for the search private SearchDomain domain; // Open list (frontier) private SearchQueue<Node> open; // Closed list (seen states) private Map<PackedElement, Node> closed; protected double maxCost; // Whether to perform reopening of states private boolean reopen; // Whether to re-run the algorithm with AR if solution not found and currently NR private boolean rerun; /** * The default Constructor for PHS (AR and maximum cost of DOUBLE.MAX_VALUE) */ public PHS() { this.heapType = HeapType.BIN; // Default values for parameters this.maxCost = Double.MAX_VALUE; this.reopen = true; // Initially, no rerun is allowed if NR failed this.reopen = false; } @Override public String getName() { return "phs"; } /** * Creates a heap according to the required type (Builder design pattern) * * @param heapType Type of the required heap (choose from the available types) * @param size Initial size of the heap * * NOTE: In case of unknown type, null is returned (no exception is thrown) * @return The created heap */ private SearchQueue<Node> buildHeap(HeapType heapType, int size) { SearchQueue<Node> heap = null; switch (heapType) { case BUCKET: heap = new BucketHeap<>(size, QID); break; case BIN: heap = new BinHeap<>(new NodeComparator(), 0); break; } return heap; } private void _initDataStructures() { // (Initial size is 100) this.open = buildHeap(heapType, 100); this.closed = new HashMap<>(); } @Override public Map<String, Class> getPossibleParameters() { return PHS.PHSPossibleParameters; } @Override public void setAdditionalParameter(String parameterName, String value) { switch (parameterName) { case "reopen": { this.reopen = Boolean.parseBoolean(value); break; } case "max-cost": { this.maxCost = Double.parseDouble(value); break; } case "rerun-if-not-found-and-nr": { this.rerun = Boolean.parseBoolean(value); break; } default: { System.err.println("No such parameter: " + parameterName + " (value: " + value + ")"); throw new NotImplementedException(); } } } public SearchResult _search(SearchDomain domain) { this.domain = domain; Node goal = null; // Initialize all the data structures required for the search this._initDataStructures(); SearchResultImpl result = new SearchResultImpl(); result.startTimer(); // Let's instantiate the initial state State currentState = domain.initialState(); // Create a graph node from this state Node initNode = new Node(currentState); // And add it to the frontier this.open.add(initNode); // The nodes are ordered in the closed list by their packed values this.closed.put(initNode.packed, initNode); // A trivial case if (domain.isGoal(currentState)) { goal = initNode; System.err.println("[WARNING] Trivial case occurred - something wrong?!"); assert false; } // Loop over the frontier while ((goal == null) && !this.open.isEmpty()) { // Take the first state (still don't remove it) Node currentNode = this.open.poll(); // Extract the state from the packed value of the node currentState = domain.unpack(currentNode.packed); // Expand the current node ++result.expanded; // Go over all the possible operators and apply them for (int i = 0; i < domain.getNumOperators(currentState); ++i) { Operator op = domain.getOperator(currentState, i); // Try to avoid loops if (op.equals(currentNode.pop)) { continue; } // Here we actually generate a new state ++result.generated; State childState = domain.applyOperator(currentState, op); Node childNode = new Node(childState, currentNode, currentState, op, op.reverse(currentState)); // Ignore the node if its f value is too big if (childNode.f > this.maxCost) { continue; } // If the generated node satisfies the goal condition - let' mark the goal and break if (domain.isGoal(childState)) { goal = childNode; break; } // Treat duplicates if (this.closed.containsKey(childNode.packed)) { // Count the duplicates ++result.duplicates; // Get the previous copy of this node (and extract it) Node dupChildNode = this.closed.get(childNode.packed); // All this is relevant only if we reached the node via a cheaper path if (dupChildNode.f > childNode.f) { // If false - let's check it! //assert dupChildNode.g > childNode.g; if (dupChildNode.g > childNode.g) { // In any case update the duplicate with the new values - we reached it via a shorter path dupChildNode.f = childNode.f; dupChildNode.g = childNode.g; dupChildNode.op = childNode.op; dupChildNode.pop = childNode.pop; dupChildNode.parent = childNode.parent; // In case the duplicate is also in the open list - let's just update it there // (since we updated g and f) if (dupChildNode.getIndex(this.open.getKey()) != -1) { ++result.opupdated; this.open.update(dupChildNode); // Otherwise, consider to reopen the node } else { // Return to OPEN list only if reopening is allowed if (this.reopen) { ++result.reopened; this.open.add(dupChildNode); } } } } // Otherwise, the node is new (hasn't been reached yet) } else { this.open.add(childNode); this.closed.put(childNode.packed, childNode); } } } result.stopTimer(); // If a goal was found: update the solution if (goal != null) { SolutionImpl solution = new SolutionImpl(this.domain); List<Operator> path = new ArrayList<>(); List<State> statesPath = new ArrayList<>(); System.out.println("[INFO] Solved - Generating output path."); double cost = 0; State currentPacked = domain.unpack(goal.packed); State currentParentPacked = null; for (Node currentNode = goal; currentNode != null; currentNode = currentNode.parent, currentPacked = currentParentPacked) { // If op of current node is not null that means that p has a parent if (currentNode.op != null) { path.add(currentNode.op); currentParentPacked = domain.unpack(currentNode.parent.packed); cost += currentNode.op.getCost(currentPacked, currentParentPacked); } statesPath.add(domain.unpack(currentNode.packed)); } // The actual size of the found path can be only lower the G value of the found goal assert cost <= goal.g; if (cost - goal.g < 0) { System.out.println("[INFO] Goal G is higher that the actual cost " + "(G: " + goal.g + ", Actual: " + cost + ")"); } // Assert path is at most of maxCost length assert path.size() <= this.maxCost; Collections.reverse(path); solution.addOperators(path); Collections.reverse(statesPath); solution.addStates(statesPath); solution.setCost(cost); result.addSolution(solution); } return result; } public SearchResult search(SearchDomain domain) { SearchResult toReturn = this._search(domain); if (!toReturn.hasSolution() && (!this.reopen && this.rerun)) { System.out.println("[INFO] PHS Failed with NR, tries again with AR"); this.reopen = true; SearchResult toReturnAR = this._search(domain); toReturnAR.increase(toReturn); // Revert to base state this.reopen = false; if (toReturnAR.hasSolution()) { System.out.println("[INFO] PHS with NR failed but PHS with AR succeeded."); } return toReturnAR; } return toReturn; } /** * The node class */ protected final class Node extends SearchQueueElementImpl implements BucketHeap.BucketHeapElement { private double f; private double g; private double h; private Operator op; private Operator pop; private Node parent; private PackedElement packed; private int[] secondaryIndex; private Node(State state, Node parent, State parentState, Operator op, Operator pop) { // Size of key super(1); // TODO: Why? this.secondaryIndex = new int[(heapType == HeapType.BUCKET) ? 2 : 1]; double cost = (op != null) ? op.getCost(state, parentState) : 0; this.h = state.getH(); // If each operation costs something, we should add the cost to the g value of the parent this.g = (parent != null) ? parent.g + cost : cost; // Start of PathMax if (parent != null) { double costsDiff = this.g - parent.g; this.h = Math.max(this.h, (parent.h - costsDiff)); } // End of PathMax this.f = this.g + this.h; // Parent node this.parent = parent; this.packed = PHS.this.domain.pack(state); this.pop = pop; this.op = op; } /** * A constructor of the class that instantiates only the state * * @param state The state which this node represents */ private Node(State state) { this(state, null, null, null, null); } @Override public void setSecondaryIndex(int key, int index) { this.secondaryIndex[key] = index; } @Override public int getSecondaryIndex(int key) { return this.secondaryIndex[key]; } @Override public double getRank(int level) { return (level == 0) ? this.h : this.g; } @Override public double getF() { return this.f; } } /** * The nodes comparator class */ protected final class NodeComparator implements Comparator<Node> { @Override public int compare(final Node a, final Node b) { // First compare by h (smaller is preferred), then by g (bigger is preferred) if (a.h < b.h) return -1; if (a.h > b.h) return 1; if (a.g > b.g) return -1; if (a.g < b.g) return 1; return 0; } } }
/* * * * Copyright (C) 2015 yelo.red * * * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * */ package com.vinaysshenoy.okulus; import android.graphics.Bitmap; import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.Drawable; /** Custom drawable class that takes care of the actual drawing */ class OkulusDrawable extends Drawable { private final RectF mRect = new RectF(); /** Rect used for drawing the border */ private RectF mBorderRect; /** Rect used for drawing the actual image */ private RectF mImageRect; private BitmapShader mBitmapShader; private final Paint mPaint; private float mBorderWidth; private int mBorderColor; private boolean mFullCircle; private float mCornerRadius; private float mShadowWidth; private int mShadowColor; private float mShadowRadius; private int mBitmapWidth; private int mBitmapHeight; private int mTouchSelectorColor; public OkulusDrawable(Bitmap bitmap, float cornerRadius, boolean fullCircle, float borderWidth, int borderColor, float shadowWidth, int shadowColor, float shadowRadius, int touchSelectorColor) { mCornerRadius = cornerRadius; updateBitmap(bitmap); mBorderWidth = borderWidth; mBorderColor = borderColor; mFullCircle = fullCircle; mShadowColor = shadowColor; mShadowRadius = shadowRadius; mShadowWidth = shadowWidth; mTouchSelectorColor = touchSelectorColor; mBorderRect = new RectF(); mImageRect = new RectF(); mPaint = new Paint(); mPaint.setAntiAlias(true); } /** * Updates the touch selector color * * @param touchSelectorColor The color to use as the touch selector */ public void setTouchSelectorColor(final int touchSelectorColor) { mTouchSelectorColor = touchSelectorColor; } /** * Creates a bitmap shader with a bitmap */ private BitmapShader getShaderForBitmap(Bitmap bitmap) { return new BitmapShader(bitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); } /** * Updates the drawable with a Bitmap. {@link OkulusImageView#invalidate()} must be called by * the caller after this method returns * * @param bitmap The Bitmap to set, or <code>null</code> to clear the bitmap being drawn */ public void updateBitmap(Bitmap bitmap) { if (bitmap == null) { mBitmapShader = null; mBitmapWidth = 0; mBitmapHeight = 0; } else { mBitmapWidth = bitmap.getWidth(); mBitmapHeight = bitmap.getHeight(); mBitmapShader = getShaderForBitmap(bitmap); } } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); mRect.set(0, 0, bounds.width(), bounds .height()); if (mFullCircle) { mCornerRadius = Math.abs(mRect.left - mRect.right) / 2; } if (mBorderWidth > 0) { initRectsWithBorders(); } else { initRectsWithoutBorders(); } } /** * Initializes the rects without borders, taking shadows into account */ private void initRectsWithoutBorders() { mImageRect.set(mRect); if (mShadowWidth > 0) { /* Shadows will be drawn to the right & bottom, * so adjust the image rect on the right & bottom */ mImageRect.right -= mShadowWidth; mImageRect.bottom -= mShadowWidth; } } /** * Initialize the rects with borders, taking shadows into account */ private void initRectsWithBorders() { mBorderRect.set(mRect); mBorderRect.inset(mBorderWidth / 1.3f, mBorderWidth / 1.3f); if (mShadowWidth > 0) { /* Shadows will be drawn to the right & bottom, * so adjust the border rect on the right & bottom. * * Since the image rect is calculated from the * border rect, the dimens will be accounted for. */ mBorderRect.right -= mShadowWidth; mBorderRect.bottom -= mShadowWidth; } mImageRect.set(mBorderRect); mImageRect.inset(mBorderWidth / 1.3f, mBorderWidth / 1.3f); } @Override public void draw(Canvas canvas) { mPaint.setShader(null); drawBordersAndShadow(canvas); if (mBitmapShader != null) { drawImage(canvas); } else { //TODO: Draw some custom background color here } if (mTouchSelectorColor != Color.TRANSPARENT) { drawTouchSelector(canvas); } } /** * Draws the touch selector on the canvas based on the View attributes * * @param canvas The canvas to draw the touch selector on */ private void drawTouchSelector(final Canvas canvas) { final int prevColor = mPaint.getColor(); mPaint.setShader(null); mPaint.setColor(mTouchSelectorColor); mPaint.setStyle(Paint.Style.FILL_AND_STROKE); if (mBorderWidth > 0) { canvas.drawRoundRect(mBorderRect, mCornerRadius, mCornerRadius, mPaint); } else { canvas.drawRoundRect(mImageRect, mCornerRadius, mCornerRadius, mPaint); } mPaint.setColor(prevColor); } /** * Draw the image on the canvas based on the View attributes * * @param canvas The canvas to draw the image on */ private void drawImage(final Canvas canvas) { mPaint.setShader(mBitmapShader); mPaint.setStyle(Paint.Style.FILL_AND_STROKE); canvas.drawRoundRect(mImageRect, mCornerRadius, mCornerRadius, mPaint); } /** * Draw the borders & shadows on the canvas based on the view attributes * * @param canvas The canvas to draw the borders on */ private void drawBordersAndShadow(final Canvas canvas) { if (mBorderWidth > 0) { mPaint.setShader(null); mPaint.setColor(mBorderColor); mPaint.setStrokeWidth(mBorderWidth); mPaint.setStyle(Paint.Style.STROKE); if (mShadowWidth > 0) { mPaint.setShadowLayer(mShadowRadius, mShadowWidth, mShadowWidth, mShadowColor); } canvas.drawRoundRect(mBorderRect, mCornerRadius, mCornerRadius, mPaint); mPaint.setShadowLayer(0f, 0f, 0f, mShadowColor); } } @Override public int getOpacity() { return PixelFormat.TRANSLUCENT; } @Override public void setAlpha(int alpha) { mPaint.setAlpha(alpha); } @Override public void setColorFilter(ColorFilter cf) { mPaint.setColorFilter(cf); } }
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krms.test; import org.apache.commons.lang.StringUtils; import org.junit.Before; import org.junit.Test; import org.kuali.rice.core.api.criteria.QueryByCriteria; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.krms.api.repository.action.ActionDefinition; import org.kuali.rice.krms.api.repository.rule.RuleDefinition; import org.kuali.rice.krms.api.repository.type.KrmsTypeDefinition; import java.util.Arrays; import java.util.List; import static org.junit.Assert.*; import static org.kuali.rice.core.api.criteria.PredicateFactory.equal; /** * RuleManagementActionDefinitionTest is to test the methods of ruleManagementServiceImpl relating to krms Actions * * Each test focuses on one of the methods. */ public class RuleManagementActionDefinitionTest extends RuleManagementBaseTest { @Override @Before public void setClassDiscriminator() { // set a unique discriminator for test objects of this class CLASS_DISCRIMINATOR = "RMADT"; } /** * Test testCreateAction() * * This test focuses specifically on the RuleManagementServiceImpl .createAction(ActionDefinition) method */ @Test public void testCreateAction() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t0 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t0"); KrmsTypeDefinition krmsTypeDefinition = createKrmsActionTypeDefinition(t0.namespaceName); RuleDefinition ruleDefintion = buildTestRuleDefinition(t0.namespaceName, t0.discriminator); ActionDefinition actionDefinition = ActionDefinition.Builder.create(t0.action0_Id, t0.action0_Name, t0.namespaceName,krmsTypeDefinition.getId(),ruleDefintion.getId(),1).build(); assertNull("action should not be in database", ruleManagementService.getAction(t0.action0_Id)); // primary statement for test actionDefinition = ruleManagementService.createAction(actionDefinition); ActionDefinition returnActionDefinition = ruleManagementService.getAction(actionDefinition.getId()); assertNotNull("created action not found", (Object) returnActionDefinition); assertEquals("create action error:", t0.action0_Id, returnActionDefinition.getId()); } /** * Test testUpdateAction() * * This test focuses specifically on the RuleManagementServiceImpl .testUpdateAction(ActionDefinition) method */ @Test public void testUpdateAction() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t1 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t1"); KrmsTypeDefinition krmsTypeDefinition = createKrmsActionTypeDefinition(t1.namespaceName); RuleDefinition ruleDefinition = buildTestRuleDefinition(t1.namespaceName, t1.object0); ActionDefinition actionDefinition = ActionDefinition.Builder.create(t1.action0_Id,t1.action0_Name, t1.namespaceName,krmsTypeDefinition.getId(),ruleDefinition.getId(),1).build(); assertNull("action should not be in database", ruleManagementService.getAction(t1.action0_Id)); actionDefinition = ruleManagementService.createAction(actionDefinition); ActionDefinition returnActionDefinition = ruleManagementService.getAction(actionDefinition.getId()); ActionDefinition.Builder builder = ActionDefinition.Builder.create(returnActionDefinition); builder.setDescription("ChangedDescr"); // primary statement for test ruleManagementService.updateAction(builder.build()); returnActionDefinition = ruleManagementService.getAction(actionDefinition.getId()); assertNotNull("action not found", returnActionDefinition); assertEquals("update action error:","ChangedDescr", returnActionDefinition.getDescription()); } /** * Test testDeleteAction() * * This test focuses specifically on the RuleManagementServiceImpl .testDeleteAction(ActionDefinition) method */ @Test public void testDeleteAction() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t2 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t2"); KrmsTypeDefinition krmsTypeDefinition = createKrmsActionTypeDefinition(t2.namespaceName); RuleDefinition ruleDefintion = buildTestRuleDefinition(t2.namespaceName, t2.object0); ActionDefinition actionDefinition = ActionDefinition.Builder.create(t2.action0_Id,t2.action0_Name, t2.namespaceName,krmsTypeDefinition.getId(),ruleDefintion.getId(),1).build(); assertNull("action should not be in database", ruleManagementService.getAction(t2.action0_Id)); actionDefinition = ruleManagementService.createAction(actionDefinition); actionDefinition = ruleManagementService.getAction(actionDefinition.getId()); assertNotNull("action not found", ruleManagementService.getAction(actionDefinition.getId())); try { // primary statement for test ruleManagementService.deleteAction(t2.action0_Id); fail("should fail deleteAction not implemented"); } catch (RiceIllegalArgumentException e) { // RiceIllegalArgumentException ("not implemented yet because not supported by the bo service"); } actionDefinition = ruleManagementService.getAction(actionDefinition.getId()); assertNotNull("action not found", (Object) actionDefinition); } /** * Test testGetAction() * * This test focuses specifically on the RuleManagementServiceImpl .testGetAction(Action_Id) method */ @Test public void testGetAction() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t3 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t3"); KrmsTypeDefinition krmsTypeDefinition = createKrmsActionTypeDefinition(t3.namespaceName); RuleDefinition ruleDefintion = buildTestRuleDefinition(t3.namespaceName, t3.object0); ActionDefinition actionDefinition = ActionDefinition.Builder.create(t3.action0_Id,t3.action0_Name, t3.namespaceName,krmsTypeDefinition.getId(),ruleDefintion.getId(),1).build(); assertNull("action should not be in database", ruleManagementService.getAction(t3.action0_Id)); actionDefinition = ruleManagementService.createAction(actionDefinition); // primary statement being tested ActionDefinition returnActionDefinition = ruleManagementService.getAction(actionDefinition.getId()); assertNotNull("action not found", (Object) returnActionDefinition); assertEquals("getAction error:", t3.action0_Id, returnActionDefinition.getId()); } /** * Test testGetActions() * * This test focuses specifically on the RuleManagementServiceImpl .testGetActions(List<Action_Id>) method */ @Test public void testGetActions() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t4 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t4"); RuleDefinition ruleDefinition0 = buildTestRuleDefinition(t4.namespaceName, t4.object0); RuleDefinition ruleDefinition1 = buildTestRuleDefinition(t4.namespaceName, t4.object1); RuleDefinition ruleDefinition2 = buildTestRuleDefinition(t4.namespaceName, t4.object2); RuleDefinition ruleDefinition3 = buildTestRuleDefinition(t4.namespaceName, t4.object3); buildTestActionDefinition(t4.action0_Id, t4.action0_Name, t4.action0_Descr, 1, ruleDefinition0.getId(), t4.namespaceName); buildTestActionDefinition(t4.action1_Id, t4.action1_Name, t4.action1_Descr, 1, ruleDefinition1.getId(), t4.namespaceName); buildTestActionDefinition(t4.action2_Id, t4.action2_Name, t4.action2_Descr, 1, ruleDefinition2.getId(), t4.namespaceName); buildTestActionDefinition(t4.action3_Id, t4.action3_Name, t4.action3_Descr, 1, ruleDefinition3.getId(), t4.namespaceName); List<String> actionIds = Arrays.asList(t4.action0_Id, t4.action1_Id, t4.action2_Id, t4.action3_Id); // primary statement being tested List<ActionDefinition> returnActionDefinitions = ruleManagementService.getActions(actionIds); assertEquals("incorrect number of actions returned",4,returnActionDefinitions.size()); // count the returned actions, returnActionDefinitions.size() may reflect nulls for not found int actionsFound = 0; for( ActionDefinition actionDefinition : returnActionDefinitions ) { if(actionIds.contains(actionDefinition.getId())) { actionsFound++; } } assertEquals("incorrect number of actions returned",4,actionsFound); assertEquals("action not found",t4.action0_Descr, ruleManagementService.getAction(t4.action0_Id).getDescription()); assertEquals("action not found",t4.action1_Descr, ruleManagementService.getAction(t4.action1_Id).getDescription()); assertEquals("action not found",t4.action2_Descr, ruleManagementService.getAction(t4.action2_Id).getDescription()); assertEquals("action not found",t4.action3_Descr, ruleManagementService.getAction(t4.action3_Id).getDescription()); } /** * Test testFindActionIds() * * This test focuses specifically on the RuleManagementServiceImpl .testFindActionIds(QueryByCriteria) method */ @Test public void testFindActionIds() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t5 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t5"); RuleDefinition ruleDefinition = buildTestRuleDefinition(t5.namespaceName, t5.object0); buildTestActionDefinition(t5.action0_Id, t5.action0_Name, t5.action0_Descr, 1, ruleDefinition.getId(), t5.namespaceName); QueryByCriteria.Builder builder = QueryByCriteria.Builder.create(); builder.setPredicates(equal("name", t5.action0_Name)); List<String> actionIds = ruleManagementService.findActionIds(builder.build()); if(!actionIds.contains(t5.action0_Id)){ fail("actionId not found"); } } /** * Tests whether the {@code ActionDefinition} cache is being evicted properly by checking the status the * dependent objects before and after creating an {@code ActionDefinition} (and consequently emptying the cache). * * <p> * The following object caches are affected: * {@code ActionDefinition}, {@code RuleDefinition} * </p> */ @Test public void testActionCacheEvict() { // get a set of unique object names for use by this test (discriminator passed can be any unique value within this class) RuleManagementBaseTestObjectNames t6 = new RuleManagementBaseTestObjectNames( CLASS_DISCRIMINATOR, "t6"); verifyEmptyAction(t6); RuleDefinition ruleDefinition = buildTestRuleDefinition(t6.namespaceName, t6.object0); buildTestActionDefinition(t6.action_Id, t6.action_Name, t6.action_Descr, 1, ruleDefinition.getId(), t6.namespaceName); verifyFullAction(t6); } private void verifyEmptyAction(RuleManagementBaseTestObjectNames t) { ActionDefinition action = ruleManagementService.getAction(t.action_Id); assertNull("Action is not null", action); RuleDefinition rule = ruleManagementService.getRule(t.rule_Id); assertFalse("Action in Rule found", rule != null); } private void verifyFullAction(RuleManagementBaseTestObjectNames t) { ActionDefinition action = ruleManagementService.getAction(t.action_Id); assertNotNull("Action is null", action); boolean foundRule = false; RuleDefinition rule = ruleManagementService.getRule(t.rule_Id); if (rule != null) { for (ActionDefinition ruleAction : rule.getActions()) { if (StringUtils.equals(t.rule_Id, ruleAction.getRuleId())) { foundRule = true; break; } } } assertTrue("Action in Rule not found", foundRule); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.jps.incremental.java; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileFilters; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.ExceptionUtil; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.concurrency.SequentialTaskExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.JBIterable; import com.intellij.util.containers.SmartHashSet; import com.intellij.util.execution.ParametersListUtil; import com.intellij.util.io.PersistentEnumeratorBase; import com.intellij.util.lang.JavaVersion; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.ModuleChunk; import org.jetbrains.jps.PathUtils; import org.jetbrains.jps.ProjectPaths; import org.jetbrains.jps.api.GlobalOptions; import org.jetbrains.jps.builders.BuildRootIndex; import org.jetbrains.jps.builders.DirtyFilesHolder; import org.jetbrains.jps.builders.FileProcessor; import org.jetbrains.jps.builders.impl.DirtyFilesHolderBase; import org.jetbrains.jps.builders.java.JavaBuilderExtension; import org.jetbrains.jps.builders.java.JavaBuilderUtil; import org.jetbrains.jps.builders.java.JavaCompilingTool; import org.jetbrains.jps.builders.java.JavaSourceRootDescriptor; import org.jetbrains.jps.builders.logging.ProjectBuilderLogger; import org.jetbrains.jps.builders.storage.BuildDataCorruptedException; import org.jetbrains.jps.cmdline.ProjectDescriptor; import org.jetbrains.jps.incremental.*; import org.jetbrains.jps.incremental.messages.BuildMessage; import org.jetbrains.jps.incremental.messages.CompilerMessage; import org.jetbrains.jps.incremental.messages.ProgressMessage; import org.jetbrains.jps.javac.*; import org.jetbrains.jps.model.JpsDummyElement; import org.jetbrains.jps.model.JpsProject; import org.jetbrains.jps.model.java.JpsJavaExtensionService; import org.jetbrains.jps.model.java.JpsJavaSdkType; import org.jetbrains.jps.model.java.LanguageLevel; import org.jetbrains.jps.model.java.compiler.*; import org.jetbrains.jps.model.library.sdk.JpsSdk; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.module.JpsModuleType; import org.jetbrains.jps.model.serialization.JpsModelSerializationDataService; import org.jetbrains.jps.model.serialization.PathMacroUtil; import org.jetbrains.jps.service.JpsServiceManager; import org.jetbrains.jps.service.SharedThreadPool; import javax.tools.*; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.net.ServerSocket; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.function.BiConsumer; import java.util.stream.Collectors; import static com.intellij.openapi.util.Pair.pair; /** * @author Eugene Zhuravlev * @since 21.09.2011 */ public class JavaBuilder extends ModuleLevelBuilder { private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.java.JavaBuilder"); private static final String JAVA_EXTENSION = "java"; public static final String BUILDER_NAME = "java"; public static final Key<Boolean> IS_ENABLED = Key.create("_java_compiler_enabled_"); public static final FileFilter JAVA_SOURCES_FILTER = FileFilters.withExtension(JAVA_EXTENSION); private static final Key<Boolean> PREFER_TARGET_JDK_COMPILER = GlobalContextKey.create("_prefer_target_jdk_javac_"); private static final Key<JavaCompilingTool> COMPILING_TOOL = Key.create("_java_compiling_tool_"); private static final Key<ConcurrentMap<String, Collection<String>>> COMPILER_USAGE_STATISTICS = Key.create("_java_compiler_usage_stats_"); private static final List<String> COMPILABLE_EXTENSIONS = Collections.singletonList(JAVA_EXTENSION); private static final Set<String> FILTERED_OPTIONS = ContainerUtil.newHashSet( "-target" ); private static final Set<String> FILTERED_SINGLE_OPTIONS = ContainerUtil.newHashSet( "-g", "-deprecation", "-nowarn", "-verbose", "-proc:none", "-proc:only", "-proceedOnError" ); private static final List<ClassPostProcessor> ourClassProcessors = new ArrayList<>(); private static final Set<JpsModuleType<?>> ourCompilableModuleTypes = new HashSet<>(); @Nullable private static final File ourDefaultRtJar; static { for (JavaBuilderExtension extension : JpsServiceManager.getInstance().getExtensions(JavaBuilderExtension.class)) { ourCompilableModuleTypes.addAll(extension.getCompilableModuleTypes()); } File rtJar = null; StringTokenizer tokenizer = new StringTokenizer(System.getProperty("sun.boot.class.path", ""), File.pathSeparator, false); while (tokenizer.hasMoreTokens()) { File file = new File(tokenizer.nextToken()); if ("rt.jar".equals(file.getName())) { rtJar = file; break; } } ourDefaultRtJar = rtJar; } public static void registerClassPostProcessor(ClassPostProcessor processor) { ourClassProcessors.add(processor); } private final Executor myTaskRunner; public JavaBuilder(Executor tasksExecutor) { super(BuilderCategory.TRANSLATOR); myTaskRunner = SequentialTaskExecutor.createSequentialApplicationPoolExecutor("JavaBuilder Pool", tasksExecutor); //add here class processors in the sequence they should be executed } @Override @NotNull public String getPresentableName() { return BUILDER_NAME; } @Override public void buildStarted(CompileContext context) { final String compilerId = getUsedCompilerId(context); if (LOG.isDebugEnabled()) { LOG.debug("Java compiler ID: " + compilerId); } JavaCompilingTool compilingTool = JavaBuilderUtil.findCompilingTool(compilerId); COMPILING_TOOL.set(context, compilingTool); COMPILER_USAGE_STATISTICS.set(context, new ConcurrentHashMap<>()); } @Override public void chunkBuildStarted(final CompileContext context, final ModuleChunk chunk) { // before the first compilation round starts: find and mark dirty all classes that depend on removed or moved classes so // that all such files are compiled in the first round. try { JavaBuilderUtil.markDirtyDependenciesForInitialRound(context, new DirtyFilesHolderBase<JavaSourceRootDescriptor, ModuleBuildTarget>(context) { @Override public void processDirtyFiles(@NotNull FileProcessor<JavaSourceRootDescriptor, ModuleBuildTarget> processor) throws IOException { FSOperations.processFilesToRecompile(context, chunk, processor); } }, chunk); } catch (IOException e) { throw new RuntimeException(e); } } @Override public void buildFinished(CompileContext context) { final ConcurrentMap<String, Collection<String>> stats = COMPILER_USAGE_STATISTICS.get(context); if (stats.size() == 1) { final Map.Entry<String, Collection<String>> entry = stats.entrySet().iterator().next(); final String compilerName = entry.getKey(); context.processMessage(new CompilerMessage("", BuildMessage.Kind.JPS_INFO, compilerName + " was used to compile java sources")); LOG.info(compilerName + " was used to compile " + entry.getValue()); } else { for (Map.Entry<String, Collection<String>> entry : stats.entrySet()) { final String compilerName = entry.getKey(); final Collection<String> moduleNames = entry.getValue(); context.processMessage(new CompilerMessage("", BuildMessage.Kind.JPS_INFO, moduleNames.size() == 1 ? compilerName + " was used to compile [" + moduleNames.iterator().next() + "]" : compilerName + " was used to compile " + moduleNames.size() + " modules" )); LOG.info(compilerName + " was used to compile " + moduleNames); } } } @Override public List<String> getCompilableFileExtensions() { return COMPILABLE_EXTENSIONS; } @Override public ExitCode build(@NotNull CompileContext context, @NotNull ModuleChunk chunk, @NotNull DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, @NotNull OutputConsumer outputConsumer) throws ProjectBuildException, IOException { JavaCompilingTool compilingTool = COMPILING_TOOL.get(context); if (!IS_ENABLED.get(context, Boolean.TRUE) || compilingTool == null) { return ExitCode.NOTHING_DONE; } return doBuild(context, chunk, dirtyFilesHolder, outputConsumer, compilingTool); } public ExitCode doBuild(@NotNull CompileContext context, @NotNull ModuleChunk chunk, @NotNull DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, @NotNull OutputConsumer outputConsumer, @NotNull JavaCompilingTool compilingTool) throws ProjectBuildException, IOException { try { final Set<File> filesToCompile = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); dirtyFilesHolder.processDirtyFiles((target, file, descriptor) -> { if (JAVA_SOURCES_FILTER.accept(file) && ourCompilableModuleTypes.contains(target.getModule().getModuleType())) { filesToCompile.add(file); } return true; }); int javaModulesCount = 0; if ((!filesToCompile.isEmpty() || dirtyFilesHolder.hasRemovedFiles()) && getTargetPlatformLanguageVersion(chunk.representativeTarget().getModule()) >= 9) { for (ModuleBuildTarget target : chunk.getTargets()) { if (JavaBuilderUtil.findModuleInfoFile(context, target) != null) { javaModulesCount++; } } } if (JavaBuilderUtil.isCompileJavaIncrementally(context)) { ProjectBuilderLogger logger = context.getLoggingManager().getProjectBuilderLogger(); if (logger.isEnabled() && !filesToCompile.isEmpty()) { logger.logCompiledFiles(filesToCompile, BUILDER_NAME, "Compiling files:"); } } if (javaModulesCount > 1) { String prefix = "Cannot compile a module cycle with multiple module-info.java files: "; String message = chunk.getModules().stream().map(JpsModule::getName).collect(Collectors.joining(", ", prefix, "")); context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, message)); return ExitCode.ABORT; } return compile(context, chunk, dirtyFilesHolder, filesToCompile, outputConsumer, compilingTool, javaModulesCount > 0); } catch (BuildDataCorruptedException | PersistentEnumeratorBase.CorruptedException | ProjectBuildException e) { throw e; } catch (Exception e) { LOG.info(e); String message = e.getMessage(); if (message == null || message.trim().isEmpty()) { message = "Internal error: \n" + ExceptionUtil.getThrowableText(e); } context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, message)); throw new StopBuildException(); } } private ExitCode compile(CompileContext context, ModuleChunk chunk, DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder, Collection<File> files, OutputConsumer outputConsumer, JavaCompilingTool compilingTool, boolean hasModules) throws Exception { ExitCode exitCode = ExitCode.NOTHING_DONE; final boolean hasSourcesToCompile = !files.isEmpty(); if (!hasSourcesToCompile && !dirtyFilesHolder.hasRemovedFiles()) { return exitCode; } final ProjectDescriptor pd = context.getProjectDescriptor(); JavaBuilderUtil.ensureModuleHasJdk(chunk.representativeTarget().getModule(), context, BUILDER_NAME); final Collection<File> classpath = ProjectPaths.getCompilationClasspath(chunk, false); final Collection<File> platformCp = ProjectPaths.getPlatformCompilationClasspath(chunk, false); // begin compilation round final OutputFilesSink outputSink = new OutputFilesSink(context, outputConsumer, JavaBuilderUtil.getDependenciesRegistrar(context), chunk.getPresentableShortName()); Collection<File> filesWithErrors = null; try { if (hasSourcesToCompile) { exitCode = ExitCode.OK; final Set<File> srcPath = new HashSet<>(); final BuildRootIndex index = pd.getBuildRootIndex(); for (ModuleBuildTarget target : chunk.getTargets()) { for (JavaSourceRootDescriptor rd : index.getTempTargetRoots(target, context)) { srcPath.add(rd.root); } } final DiagnosticSink diagnosticSink = new DiagnosticSink(context); final String chunkName = chunk.getName(); context.processMessage(new ProgressMessage("Parsing java... [" + chunk.getPresentableShortName() + "]")); final int filesCount = files.size(); boolean compiledOk = true; if (filesCount > 0) { LOG.info("Compiling " + filesCount + " java files; module: " + chunkName + (chunk.containsTests() ? " (tests)" : "")); if (LOG.isDebugEnabled()) { for (File file : files) { LOG.debug("Compiling " + file.getPath()); } LOG.debug(" classpath for " + chunkName + ":"); for (File file : classpath) { LOG.debug(" " + file.getAbsolutePath()); } LOG.debug(" platform classpath for " + chunkName + ":"); for (File file : platformCp) { LOG.debug(" " + file.getAbsolutePath()); } } try { compiledOk = compileJava(context, chunk, files, classpath, platformCp, srcPath, diagnosticSink, outputSink, compilingTool, hasModules); } finally { filesWithErrors = diagnosticSink.getFilesWithErrors(); } } context.checkCanceled(); if (!compiledOk && diagnosticSink.getErrorCount() == 0) { // unexpected exception occurred or compiler did not output any errors for some reason diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, "Compilation failed: internal java compiler error")); } if (diagnosticSink.getErrorCount() > 0) { diagnosticSink.report(new JpsInfoDiagnostic("Errors occurred while compiling module '" + chunkName + "'")); } if (!Utils.PROCEED_ON_ERROR_KEY.get(context, Boolean.FALSE) && diagnosticSink.getErrorCount() > 0) { throw new StopBuildException( "Compilation failed: errors: " + diagnosticSink.getErrorCount() + "; warnings: " + diagnosticSink.getWarningCount() ); } } } finally { JavaBuilderUtil.registerFilesToCompile(context, files); if (filesWithErrors != null) { JavaBuilderUtil.registerFilesWithErrors(context, filesWithErrors); } JavaBuilderUtil.registerSuccessfullyCompiled(context, outputSink.getSuccessfullyCompiled()); } return exitCode; } private boolean compileJava(CompileContext context, ModuleChunk chunk, Collection<File> files, Collection<File> originalClassPath, Collection<File> originalPlatformCp, Collection<File> sourcePath, DiagnosticOutputConsumer diagnosticSink, OutputFileConsumer outputSink, JavaCompilingTool compilingTool, boolean hasModules) { final Semaphore counter = new Semaphore(); COUNTER_KEY.set(context, counter); final Set<JpsModule> modules = chunk.getModules(); ProcessorConfigProfile profile = null; if (modules.size() == 1) { final JpsJavaCompilerConfiguration compilerConfig = JpsJavaExtensionService.getInstance().getCompilerConfiguration(context.getProjectDescriptor().getProject()); assert compilerConfig != null; profile = compilerConfig.getAnnotationProcessingProfile(modules.iterator().next()); } else { final String message = validateCycle(context, chunk); if (message != null) { diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, message)); return false; } } final Map<File, Set<File>> outs = buildOutputDirectoriesMap(context, chunk); try { final int targetLanguageLevel = getTargetPlatformLanguageVersion(chunk.representativeTarget().getModule()); final boolean shouldForkJavac = shouldForkCompilerProcess(context, chunk, targetLanguageLevel); // when forking external javac, compilers from SDK 1.6 and higher are supported Pair<String, Integer> forkSdk = null; if (shouldForkJavac) { forkSdk = getForkedJavacSdk(chunk, targetLanguageLevel); if (forkSdk == null) { String text = "Cannot start javac process for " + chunk.getName() + ": unknown JDK home path.\nPlease check project configuration."; diagnosticSink.report(new PlainMessageDiagnostic(Diagnostic.Kind.ERROR, text)); return false; } } final int compilerSdkVersion = forkSdk == null ? JavaVersion.current().feature : forkSdk.getSecond(); final Pair<List<String>, List<String>> vm_compilerOptions = getCompilationOptions( compilerSdkVersion, context, chunk, profile, compilingTool ); final List<String> vmOptions = vm_compilerOptions.first; final List<String> options = vm_compilerOptions.second; if (LOG.isDebugEnabled()) { String mode = shouldForkJavac ? "fork" : "in-process"; LOG.debug("Compiling chunk [" + chunk.getName() + "] with options: \"" + StringUtil.join(options, " ") + "\", mode=" + mode); } Collection<File> platformCp = calcEffectivePlatformCp(originalPlatformCp, options, compilingTool); if (platformCp == null) { String text = "Compact compilation profile was requested, but target platform for module \"" + chunk.getName() + "\"" + " differs from javac's platform (" + System.getProperty("java.version") + ")\n" + "Compilation profiles are not supported for such configuration"; context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, text)); return false; } Collection<File> classPath = originalClassPath; Collection<File> modulePath = Collections.emptyList(); if (hasModules) { // in Java 9, named modules are not allowed to read classes from the classpath // moreover, the compiler requires all transitive dependencies to be on the module path modulePath = ProjectPaths.getCompilationModulePath(chunk, false); classPath = Collections.emptyList(); } if (!platformCp.isEmpty()) { if (hasModules) { modulePath = JBIterable.from(platformCp).append(modulePath).toList(); platformCp = Collections.emptyList(); } else if ((getChunkSdkVersion(chunk)) >= 9) { // if chunk's SDK is 9 or higher, there is no way to specify full platform classpath // because platform classes are stored in jimage binary files with unknown format. // Because of this we are clearing platform classpath so that javac will resolve against its own boot classpath // and prepending additional jars from the JDK configuration to compilation classpath classPath = JBIterable.from(platformCp).append(classPath).toList(); platformCp = Collections.emptyList(); } } final ClassProcessingConsumer classesConsumer = new ClassProcessingConsumer(context, outputSink); final boolean rc; if (!shouldForkJavac) { updateCompilerUsageStatistics(context, compilingTool.getDescription(), chunk); rc = JavacMain.compile( options, files, classPath, platformCp, modulePath, sourcePath, outs, diagnosticSink, classesConsumer, context.getCancelStatus(), compilingTool ); } else { updateCompilerUsageStatistics(context, "javac " + forkSdk.getSecond(), chunk); final ExternalJavacManager server = ensureJavacServerStarted(context); rc = server.forkJavac( forkSdk.getFirst(), Utils.suggestForkedCompilerHeapSize(), vmOptions, options, platformCp, classPath, modulePath, sourcePath, files, outs, diagnosticSink, classesConsumer, compilingTool, context.getCancelStatus() ); } return rc; } finally { counter.waitFor(); } } private static void updateCompilerUsageStatistics(CompileContext context, String compilerName, ModuleChunk chunk) { final ConcurrentMap<String, Collection<String>> map = COMPILER_USAGE_STATISTICS.get(context); Collection<String> names = map.get(compilerName); if (names == null) { names = Collections.synchronizedSet(new HashSet<>()); final Collection<String> prev = map.putIfAbsent(compilerName, names); if (prev != null) { names = prev; } } for (JpsModule module : chunk.getModules()) { names.add(module.getName()); } } @Nullable public static String validateCycle(CompileContext context, ModuleChunk chunk) { final JpsJavaExtensionService javaExt = JpsJavaExtensionService.getInstance(); final JpsJavaCompilerConfiguration compilerConfig = javaExt.getCompilerConfiguration(context.getProjectDescriptor().getProject()); assert compilerConfig != null; final Set<JpsModule> modules = chunk.getModules(); Pair<String, LanguageLevel> pair = null; for (JpsModule module : modules) { final LanguageLevel moduleLevel = javaExt.getLanguageLevel(module); if (pair == null) { pair = pair(module.getName(), moduleLevel); // first value } else if (!Comparing.equal(pair.getSecond(), moduleLevel)) { return "Modules " + pair.getFirst() + " and " + module.getName() + " must have the same language level because of cyclic dependencies between them"; } } final JpsJavaCompilerOptions compilerOptions = compilerConfig.getCurrentCompilerOptions(); final Map<String, String> overrideMap = compilerOptions.ADDITIONAL_OPTIONS_OVERRIDE; if (!overrideMap.isEmpty()) { // check that options are consistently overridden for all modules in the cycle Pair<String, Set<String>> overridden = null; for (JpsModule module : modules) { final String opts = overrideMap.get(module.getName()); if (!StringUtil.isEmptyOrSpaces(opts)) { final Set<String> parsed = parseOptions(opts); if (overridden == null) { overridden = pair(module.getName(), parsed); } else { if (!overridden.second.equals(parsed)) { return "Modules " + overridden.first + " and " + module.getName() + " must have the same 'additional command line parameters' specified because of cyclic dependencies between them"; } } } else { context.processMessage(new CompilerMessage( BUILDER_NAME, BuildMessage.Kind.WARNING, "Some modules with cyclic dependencies [" + chunk.getName() + "] have 'additional command line parameters' overridden in project settings.\nThese compilation options were applied to all modules in the cycle." )); } } } // check that all chunk modules are excluded from annotation processing for (JpsModule module : modules) { final ProcessorConfigProfile prof = compilerConfig.getAnnotationProcessingProfile(module); if (prof.isEnabled()) { return "Annotation processing is not supported for module cycles. Please ensure that all modules from cycle [" + chunk.getName() + "] are excluded from annotation processing"; } } return null; } private static Set<String> parseOptions(String str) { final Set<String> result = new SmartHashSet<>(); StringTokenizer t = new StringTokenizer(str, " \n\t", false); while (t.hasMoreTokens()) { result.add(t.nextToken()); } return result; } private static boolean shouldUseReleaseOption(JpsJavaCompilerConfiguration config, int compilerVersion, int chunkSdkVersion, int targetPlatformVersion) { if (!config.useReleaseOption()) { return false; } // --release option is supported in java9+ and higher if (compilerVersion >= 9 && chunkSdkVersion > 0 && targetPlatformVersion > 0) { if (chunkSdkVersion < 9) { // target sdk is set explicitly and differs from compiler SDK, so for consistency we should link against it return false; } // chunkSdkVersion >= 9, so we have no rt.jar anymore and '-release' is the only cross-compilation option available // Only specify '--release' when cross-compilation is indeed really required. // Otherwise '--release' may not be compatible with other compilation options, e.g. exporting a package from system module return compilerVersion != targetPlatformVersion; } return false; } private static boolean shouldForkCompilerProcess(CompileContext context, ModuleChunk chunk, int chunkLanguageLevel) { if (!isJavac(COMPILING_TOOL.get(context))) { return false; // applicable to javac only } final int compilerSdkVersion = JavaVersion.current().feature; if (preferTargetJdkCompiler(context)) { final Pair<JpsSdk<JpsDummyElement>, Integer> sdkVersionPair = getAssociatedSdk(chunk); if (sdkVersionPair != null) { final Integer chunkSdkVersion = sdkVersionPair.second; if (chunkSdkVersion != compilerSdkVersion && chunkSdkVersion >= 6 /*min. supported compiler version*/) { // there is a special case because of difference in type inference behavior between javac8 and javac6-javac7 // so if corresponding JDK is associated with the module chunk, prefer compiler from this JDK over the newer compiler version return true; } } } if (compilerSdkVersion < 9 || chunkLanguageLevel <= 0) { // javac up to version 9 supports all previous releases // or // was not able to determine jdk version, so assuming in-process compiler return false; } // compilerSdkVersion is 9+ here, so applying JEP 182 "Retiring javac 'one plus three back'" policy return Math.abs(compilerSdkVersion - chunkLanguageLevel) > 3; } private static boolean isJavac(final JavaCompilingTool compilingTool) { return compilingTool != null && (compilingTool.getId() == JavaCompilers.JAVAC_ID || compilingTool.getId() == JavaCompilers.JAVAC_API_ID); } private static boolean preferTargetJdkCompiler(CompileContext context) { Boolean val = PREFER_TARGET_JDK_COMPILER.get(context); if (val == null) { final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getCompilerConfiguration(project); // default val = config != null? config.getCompilerOptions(JavaCompilers.JAVAC_ID).PREFER_TARGET_JDK_COMPILER : Boolean.TRUE; PREFER_TARGET_JDK_COMPILER.set(context, val); } return val; } // If platformCp of the build process is the same as the target platform, do not specify platformCp explicitly // this will allow javac to resolve against ct.sym file, which is required for the "compilation profiles" feature @Nullable private static Collection<File> calcEffectivePlatformCp(Collection<File> platformCp, List<String> options, JavaCompilingTool compilingTool) { if (ourDefaultRtJar == null || !isJavac(compilingTool)) { return platformCp; } boolean profileFeatureRequested = false; for (String option : options) { if ("-profile".equalsIgnoreCase(option)) { profileFeatureRequested = true; break; } } if (!profileFeatureRequested) { return platformCp; } boolean isTargetPlatformSameAsBuildRuntime = false; for (File file : platformCp) { if (FileUtil.filesEqual(file, ourDefaultRtJar)) { isTargetPlatformSameAsBuildRuntime = true; break; } } if (!isTargetPlatformSameAsBuildRuntime) { // compact profile was requested, but we have to use alternative platform classpath to meet project settings // consider this a compile error and let user re-configure the project return null; } // returning empty list will force default behaviour for platform classpath calculation // javac will resolve against its own bootclasspath and use ct.sym file when available return Collections.emptyList(); } private void submitAsyncTask(final CompileContext context, final Runnable taskRunnable) { Semaphore counter = COUNTER_KEY.get(context); assert counter != null; counter.down(); myTaskRunner.execute(() -> { try { taskRunnable.run(); } catch (Throwable e) { context.processMessage(new CompilerMessage(BUILDER_NAME, e)); } finally { counter.up(); } }); } @NotNull private static synchronized ExternalJavacManager ensureJavacServerStarted(@NotNull CompileContext context) { ExternalJavacManager server = ExternalJavacManager.KEY.get(context); if (server != null) { return server; } final int listenPort = findFreePort(); server = new ExternalJavacManager(Utils.getSystemRoot()) { @Override protected ExternalJavacProcessHandler createProcessHandler(@NotNull Process process, @NotNull String commandLine) { return new ExternalJavacProcessHandler(process, commandLine) { @Override @NotNull protected Future<?> executeOnPooledThread(@NotNull Runnable task) { return SharedThreadPool.getInstance().executeOnPooledThread(task); } }; } }; server.start(listenPort); ExternalJavacManager.KEY.set(context, server); return server; } private static int findFreePort() { try { final ServerSocket serverSocket = new ServerSocket(0); try { return serverSocket.getLocalPort(); } finally { //workaround for linux : calling close() immediately after opening socket //may result that socket is not closed synchronized (serverSocket) { try { serverSocket.wait(1); } catch (Throwable ignored) { } } serverSocket.close(); } } catch (IOException e) { e.printStackTrace(System.err); return ExternalJavacManager.DEFAULT_SERVER_PORT; } } private static final Key<String> USER_DEFINED_BYTECODE_TARGET = Key.create("_user_defined_bytecode_target_"); private static Pair<List<String>, List<String>> getCompilationOptions(int compilerSdkVersion, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile, @NotNull JavaCompilingTool compilingTool) { final List<String> compilationOptions = new ArrayList<>(); final List<String> vmOptions = new ArrayList<>(); final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerOptions compilerOptions = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(project).getCurrentCompilerOptions(); if (compilerOptions.DEBUGGING_INFO) { compilationOptions.add("-g"); } if (compilerOptions.DEPRECATION) { compilationOptions.add("-deprecation"); } if (compilerOptions.GENERATE_NO_WARNINGS) { compilationOptions.add("-nowarn"); } if (compilerOptions instanceof EclipseCompilerOptions) { final EclipseCompilerOptions eclipseOptions = (EclipseCompilerOptions)compilerOptions; if (eclipseOptions.PROCEED_ON_ERROR) { Utils.PROCEED_ON_ERROR_KEY.set(context, Boolean.TRUE); compilationOptions.add("-proceedOnError"); } } String customArgs = compilerOptions.ADDITIONAL_OPTIONS_STRING; final Map<String, String> overrideMap = compilerOptions.ADDITIONAL_OPTIONS_OVERRIDE; if (!overrideMap.isEmpty()) { for (JpsModule m : chunk.getModules()) { final String overridden = overrideMap.get(m.getName()); if (overridden != null) { customArgs = overridden; break; } } } if (customArgs != null) { BiConsumer<List<String>, String> appender = List::add; final JpsModule module = chunk.representativeTarget().getModule(); final File baseDirectory = JpsModelSerializationDataService.getBaseDirectory(module); if (baseDirectory != null) { //this is a temporary workaround to allow passing per-module compiler options for Eclipse compiler in form // -properties $MODULE_DIR$/.settings/org.eclipse.jdt.core.prefs final String moduleDirPath = FileUtil.toCanonicalPath(baseDirectory.getAbsolutePath()); appender = (strings, option) -> strings.add(StringUtil.replace(option, PathMacroUtil.DEPRECATED_MODULE_DIR, moduleDirPath)); } boolean skip = false; boolean targetOptionFound = false; for (final String userOption : ParametersListUtil.parse(customArgs)) { if (FILTERED_OPTIONS.contains(userOption)) { skip = true; targetOptionFound = "-target".equals(userOption); continue; } if (skip) { skip = false; if (targetOptionFound) { targetOptionFound = false; USER_DEFINED_BYTECODE_TARGET.set(context, userOption); } } else { if (!FILTERED_SINGLE_OPTIONS.contains(userOption)) { if (userOption.startsWith("-J-")) { vmOptions.add(userOption.substring("-J".length())); } else { appender.accept(compilationOptions, userOption); } } } } } for (ExternalJavacOptionsProvider extension : JpsServiceManager.getInstance().getExtensions(ExternalJavacOptionsProvider.class)) { vmOptions.addAll(extension.getOptions(compilingTool)); } addCompilationOptions(compilerSdkVersion, compilationOptions, context, chunk, profile); return pair(vmOptions, compilationOptions); } public static void addCompilationOptions(List<String> options, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile) { addCompilationOptions(JavaVersion.current().feature, options, context, chunk, profile); } private static void addCompilationOptions(int compilerSdkVersion, List<String> options, CompileContext context, ModuleChunk chunk, @Nullable ProcessorConfigProfile profile) { if (!options.contains("-encoding")) { final CompilerEncodingConfiguration config = context.getProjectDescriptor().getEncodingConfiguration(); final String encoding = config.getPreferredModuleChunkEncoding(chunk); if (config.getAllModuleChunkEncodings(chunk).size() > 1) { final StringBuilder msgBuilder = new StringBuilder(); msgBuilder.append("Multiple encodings set for module chunk ").append(chunk.getName()); if (encoding != null) { msgBuilder.append("\n\"").append(encoding).append("\" will be used by compiler"); } context.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.INFO, msgBuilder.toString())); } if (!StringUtil.isEmpty(encoding)) { options.add("-encoding"); options.add(encoding); } } addCrossCompilationOptions(compilerSdkVersion, options, context, chunk); if (!options.contains("--enable-preview")) { LanguageLevel level = JpsJavaExtensionService.getInstance().getLanguageLevel(chunk.representativeTarget().getModule()); if (level != null && level.isPreview()) { options.add("--enable-preview"); } } if (addAnnotationProcessingOptions(options, profile)) { final File srcOutput = ProjectPaths.getAnnotationProcessorGeneratedSourcesOutputDir( chunk.getModules().iterator().next(), chunk.containsTests(), profile ); if (srcOutput != null) { FileUtil.createDirectory(srcOutput); options.add("-s"); options.add(srcOutput.getPath()); } } } /** * @return true if annotation processing is enabled and corresponding options were added, false if profile is null or disabled */ public static boolean addAnnotationProcessingOptions(List<String> options, @Nullable AnnotationProcessingConfiguration profile) { if (profile == null || !profile.isEnabled()) { options.add("-proc:none"); return false; } // configuring annotation processing if (!profile.isObtainProcessorsFromClasspath()) { final String processorsPath = profile.getProcessorPath(); options.add("-processorpath"); options.add(FileUtil.toSystemDependentName(processorsPath.trim())); } final Set<String> processors = profile.getProcessors(); if (!processors.isEmpty()) { options.add("-processor"); options.add(StringUtil.join(processors, ",")); } for (Map.Entry<String, String> optionEntry : profile.getProcessorOptions().entrySet()) { options.add("-A" + optionEntry.getKey() + "=" + optionEntry.getValue()); } return true; } @NotNull public static String getUsedCompilerId(CompileContext context) { final JpsProject project = context.getProjectDescriptor().getProject(); final JpsJavaCompilerConfiguration config = JpsJavaExtensionService.getInstance().getCompilerConfiguration(project); return config == null ? JavaCompilers.JAVAC_ID : config.getJavaCompilerId(); } private static void addCrossCompilationOptions(int compilerSdkVersion, List<String> options, CompileContext context, ModuleChunk chunk) { final JpsJavaCompilerConfiguration compilerConfiguration = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration( context.getProjectDescriptor().getProject() ); final int languageLevel = getLanguageLevel(chunk.representativeTarget().getModule()); final int chunkSdkVersion = getChunkSdkVersion(chunk); int bytecodeTarget = getModuleBytecodeTarget(context, chunk, compilerConfiguration, languageLevel); if (shouldUseReleaseOption(compilerConfiguration, compilerSdkVersion, chunkSdkVersion, bytecodeTarget)) { options.add("--release"); options.add(complianceOption(bytecodeTarget)); return; } // using older -source, -target and -bootclasspath options if (languageLevel > 0) { options.add("-source"); options.add(complianceOption(languageLevel)); } if (bytecodeTarget > 0) { if (chunkSdkVersion > 0 && compilerSdkVersion > chunkSdkVersion) { // if compiler is newer than module JDK if (compilerSdkVersion >= bytecodeTarget) { // if user-specified bytecode version can be determined and is supported by compiler if (bytecodeTarget > chunkSdkVersion) { // and user-specified bytecode target level is higher than the highest one supported by the target JDK, // force compiler to use highest-available bytecode target version that is supported by the chunk JDK. bytecodeTarget = chunkSdkVersion; } } // otherwise let compiler display compilation error about incorrectly set bytecode target version } } else { if (chunkSdkVersion > 0 && compilerSdkVersion > chunkSdkVersion) { // force lower bytecode target level to match the version of the chunk JDK bytecodeTarget = chunkSdkVersion; } } if (bytecodeTarget > 0) { options.add("-target"); options.add(complianceOption(bytecodeTarget)); } } public static int getModuleBytecodeTarget(CompileContext context, ModuleChunk chunk, JpsJavaCompilerConfiguration compilerConfiguration) { return getModuleBytecodeTarget(context, chunk, compilerConfiguration, getLanguageLevel(chunk.representativeTarget().getModule())); } private static int getModuleBytecodeTarget(CompileContext context, ModuleChunk chunk, JpsJavaCompilerConfiguration compilerConfiguration, int languageLevel) { int bytecodeTarget = 0; for (JpsModule module : chunk.getModules()) { // use the lower possible target among modules that form the chunk final int moduleTarget = JpsJavaSdkType.parseVersion(compilerConfiguration.getByteCodeTargetLevel(module.getName())); if (moduleTarget > 0 && (bytecodeTarget == 0 || moduleTarget < bytecodeTarget)) { bytecodeTarget = moduleTarget; } } if (bytecodeTarget == 0) { if (languageLevel > 0) { // according to IDEA rule: if not specified explicitly, set target to be the same as source language level bytecodeTarget = languageLevel; } else { // last resort and backward compatibility: // check if user explicitly defined bytecode target in additional compiler options String value = USER_DEFINED_BYTECODE_TARGET.get(context); if (value != null) { bytecodeTarget = JpsJavaSdkType.parseVersion(value); } } } return bytecodeTarget; } private static String complianceOption(int major) { return JpsJavaSdkType.complianceOption(JavaVersion.compose(major)); } private static int getLanguageLevel(@NotNull JpsModule module) { final LanguageLevel level = JpsJavaExtensionService.getInstance().getLanguageLevel(module); return level != null ? level.toJavaVersion().feature : 0; } /** * The assumed module's source code language version. * Returns the version number, corresponding to the language level, associated with the given module. * If no language level set (neither on module- nor on project-level), the version of JDK associated with the module is returned. * If no JDK is associated, returns 0. */ private static int getTargetPlatformLanguageVersion(@NotNull JpsModule module) { final int level = getLanguageLevel(module); if (level > 0) { return level; } // when compiling, if language level is not explicitly set, it is assumed to be equal to // the highest possible language level supported by target JDK final JpsSdk<JpsDummyElement> sdk = module.getSdk(JpsJavaSdkType.INSTANCE); if (sdk != null) { return JpsJavaSdkType.getJavaVersion(sdk); } return 0; } private static int getChunkSdkVersion(ModuleChunk chunk) { int chunkSdkVersion = -1; for (JpsModule module : chunk.getModules()) { final JpsSdk<JpsDummyElement> sdk = module.getSdk(JpsJavaSdkType.INSTANCE); if (sdk != null) { final int moduleSdkVersion = JpsJavaSdkType.getJavaVersion(sdk); if (moduleSdkVersion != 0 /*could determine the version*/ && (chunkSdkVersion < 0 || chunkSdkVersion > moduleSdkVersion)) { chunkSdkVersion = moduleSdkVersion; } } } return chunkSdkVersion; } @Nullable private static Pair<String, Integer> getForkedJavacSdk(ModuleChunk chunk, int targetLanguageLevel) { final Pair<JpsSdk<JpsDummyElement>, Integer> sdkVersionPair = getAssociatedSdk(chunk); if (sdkVersionPair != null) { final int sdkVersion = sdkVersionPair.second; if (sdkVersion >= 6 && (sdkVersion < 9 || Math.abs(sdkVersion - targetLanguageLevel) <= 3)) { // current javac compiler does support required language level return pair(sdkVersionPair.first.getHomePath(), sdkVersion); } } final String fallbackJdkHome = System.getProperty(GlobalOptions.FALLBACK_JDK_HOME, null); if (fallbackJdkHome == null) { LOG.info("Fallback JDK is not specified. (See " + GlobalOptions.FALLBACK_JDK_HOME + " option)"); return null; } final String fallbackJdkVersion = System.getProperty(GlobalOptions.FALLBACK_JDK_VERSION, null); if (fallbackJdkVersion == null) { LOG.info("Fallback JDK version is not specified. (See " + GlobalOptions.FALLBACK_JDK_VERSION + " option)"); return null; } final int fallbackVersion = JpsJavaSdkType.parseVersion(fallbackJdkVersion); if (fallbackVersion < 6) { LOG.info("Version string for fallback JDK is '" + fallbackJdkVersion + "' (recognized as version '" + fallbackVersion + "')." + " At least version 6 is required."); return null; } return pair(fallbackJdkHome, fallbackVersion); } @Nullable private static Pair<JpsSdk<JpsDummyElement>, Integer> getAssociatedSdk(ModuleChunk chunk) { // assuming all modules in the chunk have the same associated JDK; // this constraint should be validated on build start final JpsSdk<JpsDummyElement> sdk = chunk.representativeTarget().getModule().getSdk(JpsJavaSdkType.INSTANCE); return sdk != null ? pair(sdk, JpsJavaSdkType.getJavaVersion(sdk)) : null; } @Override public void chunkBuildFinished(CompileContext context, ModuleChunk chunk) { JavaBuilderUtil.cleanupChunkResources(context); } private static Map<File, Set<File>> buildOutputDirectoriesMap(CompileContext context, ModuleChunk chunk) { final Map<File, Set<File>> map = new THashMap<>(FileUtil.FILE_HASHING_STRATEGY); for (ModuleBuildTarget target : chunk.getTargets()) { final File outputDir = target.getOutputDir(); if (outputDir == null) { continue; } final Set<File> roots = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); for (JavaSourceRootDescriptor descriptor : context.getProjectDescriptor().getBuildRootIndex().getTargetRoots(target, context)) { roots.add(descriptor.root); } map.put(outputDir, roots); } return map; } private static class DiagnosticSink implements DiagnosticOutputConsumer { private final CompileContext myContext; private volatile int myErrorCount; private volatile int myWarningCount; private final Set<File> myFilesWithErrors = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); private DiagnosticSink(CompileContext context) { myContext = context; } @Override public void javaFileLoaded(File file) { } @Override public void registerImports(final String className, final Collection<String> imports, final Collection<String> staticImports) { //submitAsyncTask(myContext, new Runnable() { // public void run() { // final Callbacks.Backend callback = DELTA_MAPPINGS_CALLBACK_KEY.get(myContext); // if (callback != null) { // callback.registerImports(className, imports, staticImports); // } // } //}); } @Override public void customOutputData(String pluginId, String dataName, byte[] data) { for (CustomOutputDataListener listener : JpsServiceManager.getInstance().getExtensions(CustomOutputDataListener.class)) { if (pluginId.equals(listener.getId())) { listener.processData(dataName, data); return; } } } @Override public void outputLineAvailable(String line) { if (!StringUtil.isEmpty(line)) { if (line.startsWith(ExternalJavacManager.STDOUT_LINE_PREFIX)) { //noinspection UseOfSystemOutOrSystemErr System.out.println(line); } else if (line.startsWith(ExternalJavacManager.STDERR_LINE_PREFIX)) { //noinspection UseOfSystemOutOrSystemErr System.err.println(line); } else if (line.contains("java.lang.OutOfMemoryError")) { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, "OutOfMemoryError: insufficient memory")); myErrorCount++; } else { final BuildMessage.Kind kind = getKindByMessageText(line); if (kind == BuildMessage.Kind.ERROR) { myErrorCount++; } else if (kind == BuildMessage.Kind.WARNING) { myWarningCount++; } myContext.processMessage(new CompilerMessage(BUILDER_NAME, kind, line)); } } } private static BuildMessage.Kind getKindByMessageText(String line) { final String lowercasedLine = line.toLowerCase(Locale.US); if (lowercasedLine.contains("error") || lowercasedLine.contains("requires target release")) { return BuildMessage.Kind.ERROR; } return BuildMessage.Kind.INFO; } @Override public void report(Diagnostic<? extends JavaFileObject> diagnostic) { final CompilerMessage.Kind kind; switch (diagnostic.getKind()) { case ERROR: kind = BuildMessage.Kind.ERROR; myErrorCount++; break; case MANDATORY_WARNING: case WARNING: kind = BuildMessage.Kind.WARNING; myWarningCount++; break; case NOTE: kind = BuildMessage.Kind.INFO; break; case OTHER: kind = diagnostic instanceof JpsInfoDiagnostic? BuildMessage.Kind.JPS_INFO : BuildMessage.Kind.OTHER; break; default: kind = BuildMessage.Kind.OTHER; } File sourceFile = null; try { // for eclipse compiler just an attempt to call getSource() may lead to an NPE, // so calling this method under try/catch to avoid induced compiler errors final JavaFileObject source = diagnostic.getSource(); sourceFile = source != null ? PathUtils.convertToFile(source.toUri()) : null; } catch (Exception e) { LOG.info(e); } final String srcPath; if (sourceFile != null) { if (kind == BuildMessage.Kind.ERROR) { myFilesWithErrors.add(sourceFile); } srcPath = FileUtil.toSystemIndependentName(sourceFile.getPath()); } else { srcPath = null; } String message = diagnostic.getMessage(Locale.US); if (Utils.IS_TEST_MODE) { LOG.info(message); } final CompilerMessage compilerMsg = new CompilerMessage( BUILDER_NAME, kind, message, srcPath, diagnostic.getStartPosition(), diagnostic.getEndPosition(), diagnostic.getPosition(), diagnostic.getLineNumber(), diagnostic.getColumnNumber() ); if (LOG.isDebugEnabled()) { LOG.debug(compilerMsg.toString()); } myContext.processMessage(compilerMsg); } int getErrorCount() { return myErrorCount; } int getWarningCount() { return myWarningCount; } @NotNull Collection<File> getFilesWithErrors() { return myFilesWithErrors; } } private class ClassProcessingConsumer implements OutputFileConsumer { private final CompileContext myContext; private final OutputFileConsumer myDelegateOutputFileSink; private ClassProcessingConsumer(CompileContext context, OutputFileConsumer sink) { myContext = context; myDelegateOutputFileSink = sink != null ? sink : fileObject -> { throw new RuntimeException("Output sink for compiler was not specified"); }; } @Override public void save(@NotNull final OutputFileObject fileObject) { // generated files must be saved synchronously, because some compilers (e.g. eclipse) // may want to read them for further compilation try { final BinaryContent content = fileObject.getContent(); final File file = fileObject.getFile(); if (content != null) { content.saveToFile(file); } else { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.WARNING, "Missing content for file " + file.getPath())); } } catch (IOException e) { myContext.processMessage(new CompilerMessage(BUILDER_NAME, BuildMessage.Kind.ERROR, e.getMessage())); } submitAsyncTask(myContext, () -> { try { for (ClassPostProcessor processor : ourClassProcessors) { processor.process(myContext, fileObject); } } finally { myDelegateOutputFileSink.save(fileObject); } }); } } private static final Key<Semaphore> COUNTER_KEY = Key.create("_async_task_counter_"); }
// Template Source: BaseMethodParameterSet.java.tt // ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.models; import com.microsoft.graph.models.WorkbookFunctionResult; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.gson.JsonObject; import java.util.EnumSet; import java.util.ArrayList; // **NOTE** This file was generated by a tool and any changes will be overwritten. /** * The class for the Workbook Functions Odd LPrice Parameter Set. */ public class WorkbookFunctionsOddLPriceParameterSet { /** * The settlement. * */ @SerializedName(value = "settlement", alternate = {"Settlement"}) @Expose @Nullable public com.google.gson.JsonElement settlement; /** * The maturity. * */ @SerializedName(value = "maturity", alternate = {"Maturity"}) @Expose @Nullable public com.google.gson.JsonElement maturity; /** * The last Interest. * */ @SerializedName(value = "lastInterest", alternate = {"LastInterest"}) @Expose @Nullable public com.google.gson.JsonElement lastInterest; /** * The rate. * */ @SerializedName(value = "rate", alternate = {"Rate"}) @Expose @Nullable public com.google.gson.JsonElement rate; /** * The yld. * */ @SerializedName(value = "yld", alternate = {"Yld"}) @Expose @Nullable public com.google.gson.JsonElement yld; /** * The redemption. * */ @SerializedName(value = "redemption", alternate = {"Redemption"}) @Expose @Nullable public com.google.gson.JsonElement redemption; /** * The frequency. * */ @SerializedName(value = "frequency", alternate = {"Frequency"}) @Expose @Nullable public com.google.gson.JsonElement frequency; /** * The basis. * */ @SerializedName(value = "basis", alternate = {"Basis"}) @Expose @Nullable public com.google.gson.JsonElement basis; /** * Instiaciates a new WorkbookFunctionsOddLPriceParameterSet */ public WorkbookFunctionsOddLPriceParameterSet() {} /** * Instiaciates a new WorkbookFunctionsOddLPriceParameterSet * @param builder builder bearing the parameters to initialize from */ protected WorkbookFunctionsOddLPriceParameterSet(@Nonnull final WorkbookFunctionsOddLPriceParameterSetBuilder builder) { this.settlement = builder.settlement; this.maturity = builder.maturity; this.lastInterest = builder.lastInterest; this.rate = builder.rate; this.yld = builder.yld; this.redemption = builder.redemption; this.frequency = builder.frequency; this.basis = builder.basis; } /** * Gets a new builder for the body * @return a new builder */ @Nonnull public static WorkbookFunctionsOddLPriceParameterSetBuilder newBuilder() { return new WorkbookFunctionsOddLPriceParameterSetBuilder(); } /** * Fluent builder for the WorkbookFunctionsOddLPriceParameterSet */ public static final class WorkbookFunctionsOddLPriceParameterSetBuilder { /** * The settlement parameter value */ @Nullable protected com.google.gson.JsonElement settlement; /** * Sets the Settlement * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withSettlement(@Nullable final com.google.gson.JsonElement val) { this.settlement = val; return this; } /** * The maturity parameter value */ @Nullable protected com.google.gson.JsonElement maturity; /** * Sets the Maturity * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withMaturity(@Nullable final com.google.gson.JsonElement val) { this.maturity = val; return this; } /** * The lastInterest parameter value */ @Nullable protected com.google.gson.JsonElement lastInterest; /** * Sets the LastInterest * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withLastInterest(@Nullable final com.google.gson.JsonElement val) { this.lastInterest = val; return this; } /** * The rate parameter value */ @Nullable protected com.google.gson.JsonElement rate; /** * Sets the Rate * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withRate(@Nullable final com.google.gson.JsonElement val) { this.rate = val; return this; } /** * The yld parameter value */ @Nullable protected com.google.gson.JsonElement yld; /** * Sets the Yld * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withYld(@Nullable final com.google.gson.JsonElement val) { this.yld = val; return this; } /** * The redemption parameter value */ @Nullable protected com.google.gson.JsonElement redemption; /** * Sets the Redemption * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withRedemption(@Nullable final com.google.gson.JsonElement val) { this.redemption = val; return this; } /** * The frequency parameter value */ @Nullable protected com.google.gson.JsonElement frequency; /** * Sets the Frequency * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withFrequency(@Nullable final com.google.gson.JsonElement val) { this.frequency = val; return this; } /** * The basis parameter value */ @Nullable protected com.google.gson.JsonElement basis; /** * Sets the Basis * @param val the value to set it to * @return the current builder object */ @Nonnull public WorkbookFunctionsOddLPriceParameterSetBuilder withBasis(@Nullable final com.google.gson.JsonElement val) { this.basis = val; return this; } /** * Instanciates a new WorkbookFunctionsOddLPriceParameterSetBuilder */ @Nullable protected WorkbookFunctionsOddLPriceParameterSetBuilder(){} /** * Buils the resulting body object to be passed to the request * @return the body object to pass to the request */ @Nonnull public WorkbookFunctionsOddLPriceParameterSet build() { return new WorkbookFunctionsOddLPriceParameterSet(this); } } /** * Gets the functions options from the properties that have been set * @return a list of function options for the request */ @Nonnull public java.util.List<com.microsoft.graph.options.FunctionOption> getFunctionOptions() { final ArrayList<com.microsoft.graph.options.FunctionOption> result = new ArrayList<>(); if(this.settlement != null) { result.add(new com.microsoft.graph.options.FunctionOption("settlement", settlement)); } if(this.maturity != null) { result.add(new com.microsoft.graph.options.FunctionOption("maturity", maturity)); } if(this.lastInterest != null) { result.add(new com.microsoft.graph.options.FunctionOption("lastInterest", lastInterest)); } if(this.rate != null) { result.add(new com.microsoft.graph.options.FunctionOption("rate", rate)); } if(this.yld != null) { result.add(new com.microsoft.graph.options.FunctionOption("yld", yld)); } if(this.redemption != null) { result.add(new com.microsoft.graph.options.FunctionOption("redemption", redemption)); } if(this.frequency != null) { result.add(new com.microsoft.graph.options.FunctionOption("frequency", frequency)); } if(this.basis != null) { result.add(new com.microsoft.graph.options.FunctionOption("basis", basis)); } return result; } }
package bart.model.errorgenerator.operator.deltadb.mainmemory; import bart.BartConstants; import bart.model.EGTask; import speedy.model.algebra.operators.ITupleIterator; import speedy.model.database.Attribute; import speedy.model.database.AttributeRef; import speedy.model.database.Cell; import speedy.model.database.CellRef; import speedy.model.database.IDatabase; import speedy.model.database.ITable; import speedy.model.database.IValue; import speedy.model.database.NullValue; import speedy.model.database.Tuple; import speedy.model.database.TupleOID; import speedy.model.database.mainmemory.MainMemoryDB; import speedy.model.database.mainmemory.datasource.DataSource; import speedy.model.database.mainmemory.datasource.INode; import speedy.model.database.mainmemory.datasource.IntegerOIDGenerator; import speedy.model.database.mainmemory.datasource.OID; import speedy.model.database.mainmemory.datasource.nodes.AttributeNode; import speedy.model.database.mainmemory.datasource.nodes.LeafNode; import speedy.model.database.mainmemory.datasource.nodes.SetNode; import speedy.model.database.mainmemory.datasource.nodes.TupleNode; import bart.model.dependency.Dependency; import bart.model.errorgenerator.operator.deltadb.IBuildDeltaDB; import bart.persistence.PersistenceConstants; import bart.persistence.Types; import bart.utility.BartUtility; import bart.utility.DependencyUtility; import bart.utility.ErrorGeneratorStats; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BuildMainMemoryDeltaDB implements IBuildDeltaDB { private static Logger logger = LoggerFactory.getLogger(BuildMainMemoryDeltaDB.class); @Override public MainMemoryDB generate(IDatabase database, EGTask task, String rootName) { long start = new Date().getTime(); List<AttributeRef> affectedAttributes = findAllAffectedAttributes(task); // List<AttributeRef> nonAffectedAttributes = findNonAffectedAttributes(scenario, affectedAttributes); INode schemaNode = new TupleNode(PersistenceConstants.DATASOURCE_ROOT_LABEL, IntegerOIDGenerator.getNextOID()); schemaNode.setRoot(true); generateSchema(schemaNode, (MainMemoryDB) database, affectedAttributes); DataSource deltaDataSource = new DataSource(PersistenceConstants.TYPE_META_INSTANCE, schemaNode); MainMemoryDB deltaDB = new MainMemoryDB(deltaDataSource); generateInstance(deltaDB, (MainMemoryDB) database, rootName, affectedAttributes); if (logger.isDebugEnabled()) logger.debug("Delta DB:\n" + deltaDB.toString()); long end = new Date().getTime(); ErrorGeneratorStats.getInstance().addStat(ErrorGeneratorStats.DELTA_DB_BUILDER, end - start); return deltaDB; } private List<AttributeRef> findAllAffectedAttributes(EGTask task) { List<AttributeRef> result = new ArrayList<AttributeRef>(); for (Dependency dc : task.getDCs()) { Set<AttributeRef> attributes = DependencyUtility.findRelevantAttributes(dc.getPremise()); for (AttributeRef attribute : attributes) { BartUtility.addIfNotContained(result, DependencyUtility.unAlias(attribute)); } } if (task.getConfiguration().isRandomErrors()) { for (String table : task.getConfiguration().getTablesForRandomErrors()) { Set<String> attributes = task.getConfiguration().getAttributesForRandomErrors(table); for (String attribute : attributes) { AttributeRef attributeRef = new AttributeRef(table, attribute); result.add(attributeRef); } } } return result; } private void generateSchema(INode schemaNode, MainMemoryDB database, List<AttributeRef> affectedAttributes) { for (String tableName : database.getTableNames()) { ITable table = database.getTable(tableName); List<Attribute> tableNonAffectedAttributes = new ArrayList<Attribute>(); for (Attribute attribute : table.getAttributes()) { if (affectedAttributes.contains(new AttributeRef(table.getName(), attribute.getName()))) { String deltaRelationName = BartUtility.getDeltaRelationName(table.getName(), attribute.getName()); INode setNodeSchema = new SetNode(deltaRelationName); schemaNode.addChild(setNodeSchema); TupleNode tupleNodeSchema = new TupleNode(deltaRelationName + "Tuple"); setNodeSchema.addChild(tupleNodeSchema); tupleNodeSchema.addChild(createAttributeSchema(BartConstants.STEP)); tupleNodeSchema.addChild(createAttributeSchema(BartConstants.TID)); tupleNodeSchema.addChild(createAttributeSchema(attribute.getName())); tupleNodeSchema.addChild(createAttributeSchema(BartConstants.GROUP_ID)); } else { tableNonAffectedAttributes.add(attribute); } } if (!tableNonAffectedAttributes.isEmpty()) { createTableForNonAffected(schemaNode, table.getName(), tableNonAffectedAttributes); } } createOccurrenceTables(schemaNode); } private void createTableForNonAffected(INode schemaNode, String tableName, List<Attribute> tableNonAffectedAttributes) { String deltaRelationName = tableName + BartConstants.NA_TABLE_SUFFIX; INode setNodeSchema = new SetNode(deltaRelationName); schemaNode.addChild(setNodeSchema); TupleNode tupleNodeSchema = new TupleNode(deltaRelationName + "Tuple"); setNodeSchema.addChild(tupleNodeSchema); tupleNodeSchema.addChild(createAttributeSchema(BartConstants.TID)); for (Attribute attribute : tableNonAffectedAttributes) { tupleNodeSchema.addChild(createAttributeSchema(attribute.getName())); } } private void createOccurrenceTables(INode schemaNode) { INode occurrenceSet = new SetNode(BartConstants.OCCURRENCE_TABLE); TupleNode occurrenceTuple = new TupleNode(BartConstants.OCCURRENCE_TABLE + "Tuple"); occurrenceSet.addChild(occurrenceTuple); occurrenceTuple.addChild(createAttributeSchema(BartConstants.STEP)); occurrenceTuple.addChild(createAttributeSchema(BartConstants.GROUP_ID)); occurrenceTuple.addChild(createAttributeSchema(BartConstants.CELL_OID)); occurrenceTuple.addChild(createAttributeSchema(BartConstants.CELL_TABLE)); occurrenceTuple.addChild(createAttributeSchema(BartConstants.CELL_ATTRIBUTE)); schemaNode.addChild(occurrenceSet); INode provenanceSet = new SetNode(BartConstants.PROVENANCE_TABLE); TupleNode provenanceTuple = new TupleNode(BartConstants.PROVENANCE_TABLE + "Tuple"); provenanceSet.addChild(provenanceTuple); provenanceTuple.addChild(createAttributeSchema(BartConstants.STEP)); provenanceTuple.addChild(createAttributeSchema(BartConstants.GROUP_ID)); provenanceTuple.addChild(createAttributeSchema(BartConstants.CELL_OID)); provenanceTuple.addChild(createAttributeSchema(BartConstants.CELL_TABLE)); provenanceTuple.addChild(createAttributeSchema(BartConstants.CELL_ATTRIBUTE)); provenanceTuple.addChild(createAttributeSchema(BartConstants.PROVENANCE_CELL_VALUE)); schemaNode.addChild(provenanceSet); } private AttributeNode createAttributeSchema(String attributeName) { AttributeNode attributeNodeInstance = new AttributeNode(attributeName); LeafNode leafNodeInstance = new LeafNode(Types.STRING); attributeNodeInstance.addChild(leafNodeInstance); return attributeNodeInstance; } private void generateInstance(MainMemoryDB deltaDB, MainMemoryDB database, String rootName, List<AttributeRef> affectedAttributes) { DataSource dataSource = deltaDB.getDataSource(); INode instanceNode = new TupleNode(PersistenceConstants.DATASOURCE_ROOT_LABEL, IntegerOIDGenerator.getNextOID()); instanceNode.setRoot(true); initOccurrenceTables(instanceNode); insertTargetTablesIntoDeltaDB(database, instanceNode, affectedAttributes, rootName); dataSource.addInstanceWithCheck(instanceNode); } private void insertTargetTablesIntoDeltaDB(MainMemoryDB database, INode instanceNode, List<AttributeRef> affectedAttributes, String rootName) { for (String tableName : database.getTableNames()) { ITable table = database.getTable(tableName); initInstanceNode(table, instanceNode, affectedAttributes); ITupleIterator it = table.getTupleIterator(); while (it.hasNext()) { Tuple tuple = it.next(); TupleOID tupleOID = tuple.getOid(); List<Cell> nonAffectedCells = new ArrayList<Cell>(); for (Cell cell : tuple.getCells()) { if (cell.getAttribute().equals(BartConstants.OID)) { continue; } if (affectedAttributes.contains(cell.getAttributeRef())) { String deltaRelationName = BartUtility.getDeltaRelationName(table.getName(), cell.getAttribute()); INode setNodeInstance = getSetNodeInstance(deltaRelationName, instanceNode); // if (setNodeInstance == null) { // setNodeInstance = new SetNode(deltaRelationName, IntegerOIDGenerator.getNextOID()); // instanceNode.addChild(setNodeInstance); // } OID oid = IntegerOIDGenerator.getNextOID(); TupleNode tupleNodeInstance = new TupleNode(deltaRelationName + "Tuple", oid); tupleNodeInstance.addChild(createAttributeInstance(BartConstants.TID, tupleOID)); tupleNodeInstance.addChild(createAttributeInstance(BartConstants.STEP, rootName)); IValue value = cell.getValue(); tupleNodeInstance.addChild(createAttributeInstance(cell.getAttribute(), value)); if (value instanceof NullValue && ((NullValue) value).isLabeledNull()) { CellRef cellRef = new CellRef(tupleOID, new AttributeRef(table.getName(), cell.getAttribute())); addTupleForNullOccurrence(value, cellRef, instanceNode); } setNodeInstance.addChild(tupleNodeInstance); } else { nonAffectedCells.add(cell); } } if (!nonAffectedCells.isEmpty()) { createTupleForNonAffectedCells(instanceNode, table.getName(), tupleOID, nonAffectedCells); } } it.close(); } } private void initInstanceNode(ITable table, INode instanceNode, List<AttributeRef> affectedAttributes) { for (Attribute attribute : table.getAttributes()) { if (attribute.getName().equals(BartConstants.OID)) { continue; } if (affectedAttributes.contains(new AttributeRef(attribute.getTableName(), attribute.getName()))) { String deltaRelationName = BartUtility.getDeltaRelationName(table.getName(), attribute.getName()); INode setNodeInstance = new SetNode(deltaRelationName, IntegerOIDGenerator.getNextOID()); instanceNode.addChild(setNodeInstance); } } } private void initOccurrenceTables(INode instanceNode) { instanceNode.addChild(new SetNode(BartConstants.OCCURRENCE_TABLE, IntegerOIDGenerator.getNextOID())); instanceNode.addChild(new SetNode(BartConstants.PROVENANCE_TABLE, IntegerOIDGenerator.getNextOID())); } private AttributeNode createAttributeInstance(String attributeName, Object value) { AttributeNode attributeNodeInstance = new AttributeNode(attributeName, IntegerOIDGenerator.getNextOID()); LeafNode leafNodeInstance = new LeafNode(Types.STRING, value); attributeNodeInstance.addChild(leafNodeInstance); return attributeNodeInstance; } private INode getSetNodeInstance(String deltaRelationName, INode instanceNode) { for (INode node : instanceNode.getChildren()) { if (node.getLabel().equals(deltaRelationName)) { return node; } } return null; } private void createTupleForNonAffectedCells(INode instanceNode, String tableName, TupleOID tupleOID, List<Cell> nonAffectedCells) { String deltaRelationName = tableName + BartConstants.NA_TABLE_SUFFIX; INode setNodeInstance = getSetNodeInstance(deltaRelationName, instanceNode); if (setNodeInstance == null) { setNodeInstance = new SetNode(deltaRelationName, IntegerOIDGenerator.getNextOID()); instanceNode.addChild(setNodeInstance); } OID oid = IntegerOIDGenerator.getNextOID(); TupleNode tupleNodeInstance = new TupleNode(deltaRelationName + "Tuple", oid); tupleNodeInstance.addChild(createAttributeInstance(BartConstants.TID, tupleOID)); for (Cell cell : nonAffectedCells) { tupleNodeInstance.addChild(createAttributeInstance(cell.getAttribute(), cell.getValue())); } setNodeInstance.addChild(tupleNodeInstance); } private void addTupleForNullOccurrence(IValue value, CellRef cellRef, INode instanceNode) { INode nullInsertSet = getSetNodeInstance(BartConstants.OCCURRENCE_TABLE, instanceNode); TupleNode nullInsertTuple = new TupleNode(BartConstants.OCCURRENCE_TABLE + "Tuple", IntegerOIDGenerator.getNextOID()); nullInsertSet.addChild(nullInsertTuple); nullInsertTuple.addChild(createAttributeInstance(BartConstants.GROUP_ID, value)); nullInsertTuple.addChild(createAttributeInstance(BartConstants.STEP, BartConstants.CHASE_STEP_ROOT)); nullInsertTuple.addChild(createAttributeInstance(BartConstants.CELL_OID, cellRef.getTupleOID())); nullInsertTuple.addChild(createAttributeInstance(BartConstants.CELL_TABLE, cellRef.getAttributeRef().getTableName())); nullInsertTuple.addChild(createAttributeInstance(BartConstants.CELL_ATTRIBUTE, cellRef.getAttributeRef().getName())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management; import static java.lang.management.ManagementFactory.getPlatformMBeanServer; import static java.util.concurrent.TimeUnit.MINUTES; import static org.apache.geode.cache.Region.*; import static org.apache.geode.management.internal.MBeanJMXAdapter.*; import static org.apache.geode.test.dunit.Host.*; import static org.apache.geode.test.dunit.Invoke.invokeInEveryVM; import static org.assertj.core.api.Assertions.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import org.apache.geode.test.junit.categories.FlakyTest; import org.awaitility.Awaitility; import org.awaitility.core.ConditionFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheListener; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.FixedPartitionAttributes; import org.apache.geode.cache.PartitionAttributes; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionAttributes; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.cache.query.data.Portfolio; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.DM; import org.apache.geode.internal.cache.AbstractRegion; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.TestObjectSizerImpl; import org.apache.geode.internal.cache.lru.LRUStatistics; import org.apache.geode.internal.cache.partitioned.fixed.SingleHopQuarterPartitionResolver; import org.apache.geode.management.internal.MBeanJMXAdapter; import org.apache.geode.management.internal.SystemManagementService; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.junit.categories.DistributedTest; /** * This class checks and verifies various data and operations exposed through RegionMXBean * interface. * </p> * Goal of the Test : RegionMBean gets created once region is created. Data like Region Attributes * data and stats are of proper value * </p> * TODO: complete refactoring this test to use ManagementTestRule */ @Category(DistributedTest.class) @SuppressWarnings({"serial", "unused"}) public class RegionManagementDUnitTest extends ManagementTestBase { private static final String REGION_NAME = "MANAGEMENT_TEST_REGION"; private static final String PARTITIONED_REGION_NAME = "MANAGEMENT_PAR_REGION"; private static final String FIXED_PR_NAME = "MANAGEMENT_FIXED_PR"; private static final String LOCAL_REGION_NAME = "TEST_LOCAL_REGION"; private static final String LOCAL_SUB_REGION_NAME = "TEST_LOCAL_SUB_REGION"; private static final String REGION_PATH = SEPARATOR + REGION_NAME; private static final String PARTITIONED_REGION_PATH = SEPARATOR + PARTITIONED_REGION_NAME; private static final String FIXED_PR_PATH = SEPARATOR + FIXED_PR_NAME; private static final String LOCAL_SUB_REGION_PATH = SEPARATOR + LOCAL_REGION_NAME + SEPARATOR + LOCAL_SUB_REGION_NAME; // field used in manager VM private static Region fixedPartitionedRegion; private static final AtomicReference<List<Notification>> MEMBER_NOTIFICATIONS_REF = new AtomicReference<>(); private static final AtomicReference<List<Notification>> SYSTEM_NOTIFICATIONS_REF = new AtomicReference<>(); @Manager private VM managerVM; @Member private VM[] memberVMs; @Before public void before() throws Exception { this.managerVM = getHost(0).getVM(0); this.memberVMs = new VM[3]; this.memberVMs[0] = getHost(0).getVM(1); this.memberVMs[1] = getHost(0).getVM(2); this.memberVMs[2] = getHost(0).getVM(3); } @After public void after() throws Exception { invokeInEveryVM(() -> MEMBER_NOTIFICATIONS_REF.set(null)); invokeInEveryVM(() -> SYSTEM_NOTIFICATIONS_REF.set(null)); disconnectAllFromDS_tmp(); } /** * Tests all Region MBean related Management APIs * <p> * a) Notification propagated to member MBean while a region is created * <p> * b) Creates and check a Distributed Region */ @Test public void testDistributedRegion() throws Exception { createMembersAndThenManagers_tmp(); // Adding notification listener for remote cache memberVMs addMemberNotificationListener(this.managerVM, 3); // TODO: why? for (VM memberVM : this.memberVMs) { createDistributedRegion_tmp(memberVM, REGION_NAME); verifyReplicateRegionAfterCreate(memberVM); } verifyRemoteDistributedRegion(this.managerVM, 3); for (VM memberVM : this.memberVMs) { closeRegion(memberVM, REGION_PATH); verifyReplicatedRegionAfterClose(memberVM); } verifyProxyCleanup(this.managerVM); verifyMemberNotifications(this.managerVM, REGION_NAME, 3); } /** * Tests all Region MBean related Management APIs * <p> * a) Notification propagated to member MBean while a region is created * <p> * b) Created and check a Partitioned Region */ @Test public void testPartitionedRegion() throws Exception { createMembersAndThenManagers_tmp(); // Adding notification listener for remote cache memberVMs addMemberNotificationListener(this.managerVM, 3); // TODO: why? for (VM memberVM : this.memberVMs) { createPartitionRegion_tmp(memberVM, PARTITIONED_REGION_NAME); verifyPartitionRegionAfterCreate(memberVM); } verifyRemotePartitionRegion(this.managerVM); for (VM memberVM : this.memberVMs) { closeRegion(memberVM, PARTITIONED_REGION_PATH); verifyPartitionRegionAfterClose(memberVM); } verifyMemberNotifications(this.managerVM, PARTITIONED_REGION_NAME, 3); } /** * Tests all Region MBean related Management APIs * <p> * a) Notification propagated to member MBean while a region is created * <p> * b) Creates and check a Fixed Partitioned Region */ @Test public void testFixedPRRegionMBean() throws Exception { createMembersAndThenManagers_tmp(); // Adding notification listener for remote cache memberVMs addMemberNotificationListener(this.managerVM, 3); // TODO: why? int primaryIndex = 0; for (VM memberVM : this.memberVMs) { List<FixedPartitionAttributes> fixedPartitionAttributesList = createFixedPartitionList(primaryIndex + 1); memberVM.invoke(() -> createFixedPartitionRegion(fixedPartitionAttributesList)); primaryIndex++; } verifyRemoteFixedPartitionRegion(this.managerVM); for (VM memberVM : this.memberVMs) { closeRegion(memberVM, FIXED_PR_PATH); } verifyMemberNotifications(this.managerVM, FIXED_PR_PATH, 3); } /** * Tests a Distributed Region at Managing Node side while region is created in a member node * asynchronously. */ @Category(FlakyTest.class) // GEODE-1930 @Test public void testRegionAggregate() throws Exception { createManagersAndThenMembers_tmp(); // Adding notification listener for remote cache memberVMs addSystemNotificationListener(this.managerVM); // TODO: why? for (VM memberVM : this.memberVMs) { createDistributedRegion_tmp(memberVM, REGION_NAME); } verifyDistributedMBean(this.managerVM, 3); createDistributedRegion_tmp(this.managerVM, REGION_NAME); verifyDistributedMBean(this.managerVM, 4); for (VM memberVM : this.memberVMs) { closeRegion(memberVM, REGION_PATH); } verifyProxyCleanup(this.managerVM); verifyDistributedMBean(this.managerVM, 1); closeRegion(this.managerVM, REGION_PATH); verifyDistributedMBean(this.managerVM, 0); // TODO: GEODE-1930: next line is too flaky and needs to be fixed // verifySystemNotifications(this.managerVM, REGION_NAME, 3); } @Test public void testNavigationAPIS() throws Exception { createManagersAndThenMembers_tmp(); for (VM memberVM : this.memberVMs) { createDistributedRegion_tmp(memberVM, REGION_NAME); createPartitionRegion_tmp(memberVM, PARTITIONED_REGION_NAME); } createDistributedRegion_tmp(this.managerVM, REGION_NAME); createPartitionRegion_tmp(this.managerVM, PARTITIONED_REGION_NAME); List<String> memberIds = new ArrayList<>(); for (VM memberVM : this.memberVMs) { memberIds.add(getDistributedMemberId_tmp(memberVM)); } verifyNavigationApis(this.managerVM, memberIds); for (VM memberVM : this.memberVMs) { closeRegion(memberVM, REGION_PATH); } closeRegion(this.managerVM, REGION_PATH); } @Test public void testSubRegions() throws Exception { createMembersAndThenManagers_tmp(); for (VM memberVM : this.memberVMs) { createLocalRegion_tmp(memberVM, LOCAL_REGION_NAME); createSubRegion_tmp(memberVM, LOCAL_REGION_NAME, LOCAL_SUB_REGION_NAME); } for (VM memberVM : this.memberVMs) { verifySubRegions(memberVM, LOCAL_SUB_REGION_PATH); } for (VM memberVM : this.memberVMs) { closeRegion(memberVM, LOCAL_REGION_NAME); verifyNullRegions(memberVM, LOCAL_SUB_REGION_NAME); } } @Test public void testSpecialRegions() throws Exception { createMembersAndThenManagers_tmp(); createSpecialRegion(this.memberVMs[0]); verifySpecialRegion(this.managerVM); } @Category(FlakyTest.class) // GEODE-1930 @Test public void testLruStats() throws Exception { createMembersAndThenManagers_tmp(); for (VM memberVM : this.memberVMs) { createDiskRegion(memberVM); } verifyEntrySize(this.managerVM, 3); } private void createMembersAndThenManagers_tmp() throws Exception { initManagement(false); } private void createManagersAndThenMembers_tmp() throws Exception { initManagement(true); } private void disconnectAllFromDS_tmp() { disconnectAllFromDS(); } private ManagementService getManagementService_tmp() { return getManagementService(); } private Cache getCache_tmp() { return getCache(); } private void closeRegion(final VM anyVM, final String regionPath) { anyVM.invoke("closeRegion", () -> getCache_tmp().getRegion(regionPath).close()); } private void createSpecialRegion(final VM memberVM) { memberVM.invoke("createSpecialRegion", () -> { AttributesFactory attributesFactory = new AttributesFactory(); attributesFactory.setValueConstraint(Portfolio.class); RegionAttributes regionAttributes = attributesFactory.create(); Cache cache = getCache_tmp(); cache.createRegion("p-os", regionAttributes); cache.createRegion("p_os", regionAttributes); }); } private void verifySpecialRegion(final VM managerVM) { managerVM.invoke("verifySpecialRegion", () -> { awaitDistributedRegionMXBean("/p-os", 1); // TODO: why? awaitDistributedRegionMXBean("/p_os", 1); }); } private void createDiskRegion(final VM memberVM) { memberVM.invoke("createDiskRegion", () -> { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setEvictionAttributes(EvictionAttributes.createLRUMemoryAttributes(20, new TestObjectSizerImpl(), EvictionAction.LOCAL_DESTROY)); Region region = getCache_tmp().createRegion(REGION_NAME, factory.create()); LRUStatistics lruStats = ((AbstractRegion) region).getEvictionController().getLRUHelper().getStats(); assertThat(lruStats).isNotNull(); RegionMXBean regionMXBean = getManagementService_tmp().getLocalRegionMBean(REGION_PATH); assertThat(regionMXBean).isNotNull(); int total; for (total = 0; total < 100; total++) { // TODO: why so many? int[] array = new int[250]; array[0] = total; region.put(new Integer(total), array); } assertThat(regionMXBean.getEntrySize()).isGreaterThan(0); }); } private void verifyEntrySize(final VM managerVM, final int expectedMembers) { managerVM.invoke("verifyEntrySize", () -> { DistributedRegionMXBean distributedRegionMXBean = awaitDistributedRegionMXBean(REGION_PATH, expectedMembers); assertThat(distributedRegionMXBean).isNotNull(); assertThat(distributedRegionMXBean.getEntrySize()).isGreaterThan(0); }); } private void verifySubRegions(final VM memberVM, final String subRegionPath) { memberVM.invoke("verifySubRegions", () -> { RegionMXBean regionMXBean = getManagementService_tmp().getLocalRegionMBean(subRegionPath); assertThat(regionMXBean).isNotNull(); }); } private void verifyNullRegions(final VM memberVM, final String subRegionPath) { memberVM.invoke("verifyNullRegions", () -> { RegionMXBean regionMXBean = getManagementService_tmp().getLocalRegionMBean(subRegionPath); assertThat(regionMXBean).isNull(); }); } private void verifyNavigationApis(final VM managerVM, final List<String> memberIds) { managerVM.invoke("verifyNavigationApis", () -> { ManagementService service = getManagementService_tmp(); assertThat(service.getDistributedSystemMXBean()).isNotNull(); awaitMemberCount(4); DistributedSystemMXBean distributedSystemMXBean = service.getDistributedSystemMXBean(); assertThat(distributedSystemMXBean.listDistributedRegionObjectNames()).hasSize(2); assertThat(distributedSystemMXBean.fetchDistributedRegionObjectName(PARTITIONED_REGION_PATH)) .isNotNull(); assertThat(distributedSystemMXBean.fetchDistributedRegionObjectName(REGION_PATH)).isNotNull(); ObjectName actualName = distributedSystemMXBean.fetchDistributedRegionObjectName(PARTITIONED_REGION_PATH); ObjectName expectedName = getDistributedRegionMbeanName(PARTITIONED_REGION_PATH); assertThat(actualName).isEqualTo(expectedName); actualName = distributedSystemMXBean.fetchDistributedRegionObjectName(REGION_PATH); expectedName = getDistributedRegionMbeanName(REGION_PATH); assertThat(actualName).isEqualTo(expectedName); for (String memberId : memberIds) { ObjectName objectName = getMemberMBeanName(memberId); awaitMemberMXBeanProxy(objectName); ObjectName[] objectNames = distributedSystemMXBean.fetchRegionObjectNames(objectName); assertThat(objectNames).isNotNull(); assertThat(objectNames).hasSize(2); List<ObjectName> listOfNames = Arrays.asList(objectNames); expectedName = getRegionMBeanName(memberId, PARTITIONED_REGION_PATH); assertThat(listOfNames).contains(expectedName); expectedName = getRegionMBeanName(memberId, REGION_PATH); assertThat(listOfNames).contains(expectedName); } for (String memberId : memberIds) { ObjectName objectName = getMemberMBeanName(memberId); awaitMemberMXBeanProxy(objectName); expectedName = getRegionMBeanName(memberId, PARTITIONED_REGION_PATH); awaitRegionMXBeanProxy(expectedName); actualName = distributedSystemMXBean.fetchRegionObjectName(memberId, PARTITIONED_REGION_PATH); assertThat(actualName).isEqualTo(expectedName); expectedName = getRegionMBeanName(memberId, REGION_PATH); awaitRegionMXBeanProxy(expectedName); actualName = distributedSystemMXBean.fetchRegionObjectName(memberId, REGION_PATH); assertThat(actualName).isEqualTo(expectedName); } }); } /** * Invoked in controller VM */ private List<FixedPartitionAttributes> createFixedPartitionList(final int primaryIndex) { List<FixedPartitionAttributes> fixedPartitionAttributesList = new ArrayList<>(); if (primaryIndex == 1) { fixedPartitionAttributesList .add(FixedPartitionAttributes.createFixedPartition("Q1", true, 3)); fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q2", 3)); fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q3", 3)); } if (primaryIndex == 2) { fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q1", 3)); fixedPartitionAttributesList .add(FixedPartitionAttributes.createFixedPartition("Q2", true, 3)); fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q3", 3)); } if (primaryIndex == 3) { fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q1", 3)); fixedPartitionAttributesList.add(FixedPartitionAttributes.createFixedPartition("Q2", 3)); fixedPartitionAttributesList .add(FixedPartitionAttributes.createFixedPartition("Q3", true, 3)); } return fixedPartitionAttributesList; } /** * Invoked in member VMs */ private void createFixedPartitionRegion( final List<FixedPartitionAttributes> fixedPartitionAttributesList) { SystemManagementService service = getSystemManagementService_tmp(); PartitionAttributesFactory partitionAttributesFactory = new PartitionAttributesFactory(); partitionAttributesFactory.setRedundantCopies(2).setTotalNumBuckets(12); for (FixedPartitionAttributes fixedPartitionAttributes : fixedPartitionAttributesList) { partitionAttributesFactory.addFixedPartitionAttributes(fixedPartitionAttributes); } partitionAttributesFactory.setPartitionResolver(new SingleHopQuarterPartitionResolver()); AttributesFactory attributesFactory = new AttributesFactory(); attributesFactory.setPartitionAttributes(partitionAttributesFactory.create()); fixedPartitionedRegion = getCache_tmp().createRegion(FIXED_PR_NAME, attributesFactory.create()); assertThat(fixedPartitionedRegion).isNotNull(); RegionMXBean regionMXBean = service.getLocalRegionMBean(FIXED_PR_PATH); RegionAttributes regionAttributes = fixedPartitionedRegion.getAttributes(); PartitionAttributesData partitionAttributesData = regionMXBean.listPartitionAttributes(); verifyPartitionData(regionAttributes, partitionAttributesData); FixedPartitionAttributesData[] fixedPartitionAttributesData = regionMXBean.listFixedPartitionAttributes(); assertThat(fixedPartitionAttributesData).isNotNull(); assertThat(fixedPartitionAttributesData).hasSize(3); for (int i = 0; i < fixedPartitionAttributesData.length; i++) { // TODO: add real assertions // LogWriterUtils.getLogWriter().info("<ExpectedString> Fixed PR Data is " + // fixedPartitionAttributesData[i] + "</ExpectedString> "); } } private void addMemberNotificationListener(final VM managerVM, final int expectedMembers) { managerVM.invoke("addMemberNotificationListener", () -> { Set<DistributedMember> otherMemberSet = getOtherNormalMembers_tmp(); assertThat(otherMemberSet).hasSize(expectedMembers); SystemManagementService service = getSystemManagementService_tmp(); List<Notification> notifications = new ArrayList<>(); MEMBER_NOTIFICATIONS_REF.set(notifications); for (DistributedMember member : otherMemberSet) { MemberNotificationListener listener = new MemberNotificationListener(notifications); ObjectName objectName = service.getMemberMBeanName(member); awaitMemberMXBeanProxy(objectName); getPlatformMBeanServer().addNotificationListener(objectName, listener, null, null); } }); } /** * Add a Notification listener to DistributedSystemMBean which should gather all the notifications * which are propagated through all individual MemberMBeans Hence Region created/destroyed should * be visible to this listener */ private void addSystemNotificationListener(final VM managerVM) { managerVM.invoke("addSystemNotificationListener", () -> { awaitDistributedSystemMXBean(); List<Notification> notifications = new ArrayList<>(); SYSTEM_NOTIFICATIONS_REF.set(notifications); DistributedSystemNotificationListener listener = new DistributedSystemNotificationListener(notifications); ObjectName objectName = MBeanJMXAdapter.getDistributedSystemName(); getPlatformMBeanServer().addNotificationListener(objectName, listener, null, null); }); } private void verifyMemberNotifications(final VM managerVM, final String regionName, final int expectedMembers) { managerVM.invoke("verifyMemberNotifications", () -> { await().until(() -> assertThat(MEMBER_NOTIFICATIONS_REF.get()).hasSize(expectedMembers * 2)); int regionCreatedCount = 0; int regionDestroyedCount = 0; for (Notification notification : MEMBER_NOTIFICATIONS_REF.get()) { if (JMXNotificationType.REGION_CREATED.equals(notification.getType())) { regionCreatedCount++; assertThat(notification.getMessage()).contains(regionName); } else if (JMXNotificationType.REGION_CLOSED.equals(notification.getType())) { regionDestroyedCount++; assertThat(notification.getMessage()).contains(regionName); } else { fail("Unexpected notification type: " + notification.getType()); } } assertThat(regionCreatedCount).isEqualTo(expectedMembers); assertThat(regionDestroyedCount).isEqualTo(expectedMembers); }); } // <[javax.management.Notification[source=10.118.33.232(17632)<v1>-32770][type=gemfire.distributedsystem.cache.region.created][message=Region // Created With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=10.118.33.232(17633)<v2>-32771][type=gemfire.distributedsystem.cache.region.created][message=Region // Created With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=10.118.33.232(17634)<v3>-32772][type=gemfire.distributedsystem.cache.region.created][message=Region // Created With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=10.118.33.232(17632)<v1>-32770][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=10.118.33.232(17633)<v2>-32771][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=10.118.33.232(17634)<v3>-32772][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION]]> private void verifySystemNotifications(final VM managerVM, final String regionName, final int expectedMembers) { managerVM.invoke("verifySystemNotifications", () -> { assertThat(SYSTEM_NOTIFICATIONS_REF.get()).isNotNull(); assertThat(SYSTEM_NOTIFICATIONS_REF.get()).hasSize(expectedMembers + 2); // 2 for the manager int regionCreatedCount = 0; int regionDestroyedCount = 0; for (Notification notification : SYSTEM_NOTIFICATIONS_REF.get()) { if (JMXNotificationType.REGION_CREATED.equals(notification.getType())) { regionCreatedCount++; assertThat(notification.getMessage()).contains(regionName); } else if (JMXNotificationType.REGION_CLOSED.equals(notification.getType())) { regionDestroyedCount++; assertThat(notification.getMessage()).contains(regionName); } else { fail("Unexpected notification type: " + notification.getType()); } } assertThat(regionCreatedCount).isEqualTo(1); // just the manager assertThat(regionDestroyedCount).isEqualTo(expectedMembers + 1); // all 3 members + manager }); } // <[javax.management.Notification[source=192.168.1.72(18496)<v27>-32770][type=gemfire.distributedsystem.cache.region.created][message=Region // Created With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=192.168.1.72(18497)<v28>-32771][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=192.168.1.72(18498)<v29>-32772][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=192.168.1.72(18499)<v30>-32773][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION], // javax.management.Notification[source=192.168.1.72(18496)<v27>-32770][type=gemfire.distributedsystem.cache.region.closed][message=Region // Destroyed/Closed With Name /MANAGEMENT_TEST_REGION]]> private void verifyProxyCleanup(final VM managerVM) { managerVM.invoke("verifyProxyCleanup", () -> { SystemManagementService service = getSystemManagementService_tmp(); Set<DistributedMember> otherMemberSet = getOtherNormalMembers_tmp(); for (final DistributedMember member : otherMemberSet) { String alias = "Waiting for the proxy to get deleted at managing node"; await(alias).until( () -> assertThat(service.getMBeanProxy(service.getRegionMBeanName(member, REGION_PATH), RegionMXBean.class)).isNull()); } }); } private void verifyRemoteDistributedRegion(final VM managerVM, final int expectedMembers) { managerVM.invoke("verifyRemoteDistributedRegion", () -> { Set<DistributedMember> otherMemberSet = getOtherNormalMembers_tmp(); assertThat(otherMemberSet).hasSize(expectedMembers); for (DistributedMember member : otherMemberSet) { RegionMXBean regionMXBean = awaitRegionMXBeanProxy(member, REGION_PATH); RegionAttributesData regionAttributesData = regionMXBean.listRegionAttributes(); assertThat(regionAttributesData).isNotNull(); MembershipAttributesData membershipAttributesData = regionMXBean.listMembershipAttributes(); assertThat(membershipAttributesData).isNotNull(); EvictionAttributesData evictionAttributesData = regionMXBean.listEvictionAttributes(); assertThat(evictionAttributesData).isNotNull(); } DistributedRegionMXBean distributedRegionMXBean = awaitDistributedRegionMXBean(REGION_PATH, expectedMembers); assertThat(distributedRegionMXBean).isNotNull(); assertThat(distributedRegionMXBean.getFullPath()).isEqualTo(REGION_PATH); }); } private void verifyDistributedMBean(final VM managerVM, final int expectedMembers) { managerVM.invoke("verifyDistributedMBean", () -> { if (expectedMembers == 0) { ManagementService service = getManagementService_tmp(); String alias = "Waiting for the proxy to get deleted at managing node"; await(alias) .until(() -> assertThat(service.getDistributedRegionMXBean(REGION_PATH)).isNull()); return; } DistributedRegionMXBean distributedRegionMXBean = awaitDistributedRegionMXBean(REGION_PATH, expectedMembers); assertThat(distributedRegionMXBean.getFullPath()).isEqualTo(REGION_PATH); assertThat(distributedRegionMXBean.getMemberCount()).isEqualTo(expectedMembers); assertThat(distributedRegionMXBean.getMembers()).hasSize(expectedMembers); // Check Stats related Data // LogWriterUtils.getLogWriter().info("<ExpectedString> CacheListenerCallsAvgLatency is " + // distributedRegionMXBean.getCacheListenerCallsAvgLatency() + "</ExpectedString> "); // LogWriterUtils.getLogWriter().info("<ExpectedString> CacheWriterCallsAvgLatency is " + // distributedRegionMXBean.getCacheWriterCallsAvgLatency() + "</ExpectedString> "); // LogWriterUtils.getLogWriter().info("<ExpectedString> CreatesRate is " + // distributedRegionMXBean.getCreatesRate() + "</ExpectedString> "); }); } private void verifyRemotePartitionRegion(final VM managerVM) { managerVM.invoke("verifyRemotePartitionRegion", () -> { Set<DistributedMember> otherMemberSet = getOtherNormalMembers_tmp(); for (DistributedMember member : otherMemberSet) { RegionMXBean regionMXBean = awaitRegionMXBeanProxy(member, PARTITIONED_REGION_PATH); PartitionAttributesData partitionAttributesData = regionMXBean.listPartitionAttributes(); assertThat(partitionAttributesData).isNotNull(); } ManagementService service = getManagementService_tmp(); DistributedRegionMXBean distributedRegionMXBean = service.getDistributedRegionMXBean(PARTITIONED_REGION_PATH); assertThat(distributedRegionMXBean.getMembers()).hasSize(3); }); } private void verifyReplicateRegionAfterCreate(final VM memberVM) { memberVM.invoke("verifyReplicateRegionAfterCreate", () -> { Cache cache = getCache_tmp(); String memberId = MBeanJMXAdapter.getMemberNameOrId(cache.getDistributedSystem().getDistributedMember()); ObjectName objectName = ObjectName.getInstance("GemFire:type=Member,member=" + memberId); // List<Notification> notifications = new ArrayList<>(); // MEMBER_NOTIFICATIONS_REF.set(notifications); // // MemberNotificationListener listener = new MemberNotificationListener(notifications); // ManagementFactory.getPlatformMBeanServer().addNotificationListener(objectName, listener, // null, null); SystemManagementService service = getSystemManagementService_tmp(); RegionMXBean regionMXBean = service.getLocalRegionMBean(REGION_PATH); assertThat(regionMXBean).isNotNull(); Region region = cache.getRegion(REGION_PATH); RegionAttributes regionAttributes = region.getAttributes(); RegionAttributesData regionAttributesData = regionMXBean.listRegionAttributes(); verifyRegionAttributes(regionAttributes, regionAttributesData); MembershipAttributesData membershipData = regionMXBean.listMembershipAttributes(); assertThat(membershipData).isNotNull(); EvictionAttributesData evictionData = regionMXBean.listEvictionAttributes(); assertThat(evictionData).isNotNull(); }); } private void verifyPartitionRegionAfterCreate(final VM memberVM) { memberVM.invoke("verifyPartitionRegionAfterCreate", () -> { Region region = getCache_tmp().getRegion(PARTITIONED_REGION_PATH); SystemManagementService service = getSystemManagementService_tmp(); RegionMXBean regionMXBean = service.getLocalRegionMBean(PARTITIONED_REGION_PATH); verifyPartitionData(region.getAttributes(), regionMXBean.listPartitionAttributes()); }); } private void verifyReplicatedRegionAfterClose(final VM memberVM) { memberVM.invoke("verifyReplicatedRegionAfterClose", () -> { SystemManagementService service = getSystemManagementService_tmp(); RegionMXBean regionMXBean = service.getLocalRegionMBean(REGION_PATH); assertThat(regionMXBean).isNull(); ObjectName objectName = service.getRegionMBeanName( getCache_tmp().getDistributedSystem().getDistributedMember(), REGION_PATH); assertThat(service.getLocalManager().getManagementResourceRepo() .getEntryFromLocalMonitoringRegion(objectName)).isNull(); }); } private void verifyPartitionRegionAfterClose(final VM memberVM) { memberVM.invoke("verifyPartitionRegionAfterClose", () -> { ManagementService service = getManagementService_tmp(); RegionMXBean regionMXBean = service.getLocalRegionMBean(PARTITIONED_REGION_PATH); assertThat(regionMXBean).isNull(); }); } /** * Invoked in member VMs */ private void verifyPartitionData(final RegionAttributes expectedRegionAttributes, final PartitionAttributesData partitionAttributesData) { PartitionAttributes expectedPartitionAttributes = expectedRegionAttributes.getPartitionAttributes(); assertThat(partitionAttributesData.getRedundantCopies()) .isEqualTo(expectedPartitionAttributes.getRedundantCopies()); assertThat(partitionAttributesData.getTotalMaxMemory()) .isEqualTo(expectedPartitionAttributes.getTotalMaxMemory()); // Total number of buckets for whole region assertThat(partitionAttributesData.getTotalNumBuckets()) .isEqualTo(expectedPartitionAttributes.getTotalNumBuckets()); assertThat(partitionAttributesData.getLocalMaxMemory()) .isEqualTo(expectedPartitionAttributes.getLocalMaxMemory()); assertThat(partitionAttributesData.getColocatedWith()) .isEqualTo(expectedPartitionAttributes.getColocatedWith()); String partitionResolver = null; // TODO: these conditionals should be deterministic if (expectedPartitionAttributes.getPartitionResolver() != null) { partitionResolver = expectedPartitionAttributes.getPartitionResolver().getName(); } assertThat(partitionAttributesData.getPartitionResolver()).isEqualTo(partitionResolver); assertThat(partitionAttributesData.getRecoveryDelay()) .isEqualTo(expectedPartitionAttributes.getRecoveryDelay()); assertThat(partitionAttributesData.getStartupRecoveryDelay()) .isEqualTo(expectedPartitionAttributes.getStartupRecoveryDelay()); if (expectedPartitionAttributes.getPartitionListeners() != null) { for (int i = 0; i < expectedPartitionAttributes.getPartitionListeners().length; i++) { // assertEquals((expectedPartitionAttributes.getPartitionListeners())[i].getClass().getCanonicalName(), // partitionAttributesData.getPartitionListeners()[i]); assertThat(partitionAttributesData.getPartitionListeners()[i]).isEqualTo( expectedPartitionAttributes.getPartitionListeners()[i].getClass().getCanonicalName()); } } } /** * Invoked in member VMs */ private void verifyRegionAttributes(final RegionAttributes regionAttributes, final RegionAttributesData regionAttributesData) { String compressorClassName = null; // TODO: these conditionals should be deterministic if (regionAttributes.getCompressor() != null) { compressorClassName = regionAttributes.getCompressor().getClass().getCanonicalName(); } assertThat(regionAttributesData.getCompressorClassName()).isEqualTo(compressorClassName); String cacheLoaderClassName = null; if (regionAttributes.getCacheLoader() != null) { cacheLoaderClassName = regionAttributes.getCacheLoader().getClass().getCanonicalName(); } assertThat(regionAttributesData.getCacheLoaderClassName()).isEqualTo(cacheLoaderClassName); String cacheWriteClassName = null; if (regionAttributes.getCacheWriter() != null) { cacheWriteClassName = regionAttributes.getCacheWriter().getClass().getCanonicalName(); } assertThat(regionAttributesData.getCacheWriterClassName()).isEqualTo(cacheWriteClassName); String keyConstraintClassName = null; if (regionAttributes.getKeyConstraint() != null) { keyConstraintClassName = regionAttributes.getKeyConstraint().getName(); } assertThat(regionAttributesData.getKeyConstraintClassName()).isEqualTo(keyConstraintClassName); String valueContstaintClassName = null; if (regionAttributes.getValueConstraint() != null) { valueContstaintClassName = regionAttributes.getValueConstraint().getName(); } assertThat(regionAttributesData.getValueConstraintClassName()) .isEqualTo(valueContstaintClassName); CacheListener[] listeners = regionAttributes.getCacheListeners(); if (listeners != null) { String[] value = regionAttributesData.getCacheListeners(); for (int i = 0; i < listeners.length; i++) { assertThat(listeners[i].getClass().getName()).isEqualTo(value[i]); } } assertThat(regionAttributesData.getRegionTimeToLive()) .isEqualTo(regionAttributes.getRegionTimeToLive().getTimeout()); assertThat(regionAttributesData.getRegionIdleTimeout()) .isEqualTo(regionAttributes.getRegionIdleTimeout().getTimeout()); assertThat(regionAttributesData.getEntryTimeToLive()) .isEqualTo(regionAttributes.getEntryTimeToLive().getTimeout()); assertThat(regionAttributesData.getEntryIdleTimeout()) .isEqualTo(regionAttributes.getEntryIdleTimeout().getTimeout()); String customEntryTimeToLive = null; Object o1 = regionAttributes.getCustomEntryTimeToLive(); if (o1 != null) { customEntryTimeToLive = o1.toString(); } assertThat(regionAttributesData.getCustomEntryTimeToLive()).isEqualTo(customEntryTimeToLive); String customEntryIdleTimeout = null; Object o2 = regionAttributes.getCustomEntryIdleTimeout(); if (o2 != null) { customEntryIdleTimeout = o2.toString(); } assertThat(regionAttributesData.getCustomEntryIdleTimeout()).isEqualTo(customEntryIdleTimeout); assertThat(regionAttributesData.isIgnoreJTA()).isEqualTo(regionAttributes.getIgnoreJTA()); assertThat(regionAttributesData.getDataPolicy()) .isEqualTo(regionAttributes.getDataPolicy().toString()); assertThat(regionAttributesData.getScope()).isEqualTo(regionAttributes.getScope().toString()); assertThat(regionAttributesData.getInitialCapacity()) .isEqualTo(regionAttributes.getInitialCapacity()); assertThat(regionAttributesData.getLoadFactor()).isEqualTo(regionAttributes.getLoadFactor()); assertThat(regionAttributesData.isLockGrantor()).isEqualTo(regionAttributes.isLockGrantor()); assertThat(regionAttributesData.isMulticastEnabled()) .isEqualTo(regionAttributes.getMulticastEnabled()); assertThat(regionAttributesData.getConcurrencyLevel()) .isEqualTo(regionAttributes.getConcurrencyLevel()); assertThat(regionAttributesData.isIndexMaintenanceSynchronous()) .isEqualTo(regionAttributes.getIndexMaintenanceSynchronous()); assertThat(regionAttributesData.isStatisticsEnabled()) .isEqualTo(regionAttributes.getStatisticsEnabled()); assertThat(regionAttributesData.isSubscriptionConflationEnabled()) .isEqualTo(regionAttributes.getEnableSubscriptionConflation()); assertThat(regionAttributesData.isAsyncConflationEnabled()) .isEqualTo(regionAttributes.getEnableAsyncConflation()); assertThat(regionAttributesData.getPoolName()).isEqualTo(regionAttributes.getPoolName()); assertThat(regionAttributesData.isCloningEnabled()) .isEqualTo(regionAttributes.getCloningEnabled()); assertThat(regionAttributesData.getDiskStoreName()) .isEqualTo(regionAttributes.getDiskStoreName()); String interestPolicy = null; if (regionAttributes.getSubscriptionAttributes() != null) { interestPolicy = regionAttributes.getSubscriptionAttributes().getInterestPolicy().toString(); } assertThat(regionAttributesData.getInterestPolicy()).isEqualTo(interestPolicy); assertThat(regionAttributesData.isDiskSynchronous()) .isEqualTo(regionAttributes.isDiskSynchronous()); } private void verifyRemoteFixedPartitionRegion(final VM managerVM) { managerVM.invoke("Verify Partition region", () -> { Set<DistributedMember> otherMemberSet = getOtherNormalMembers_tmp(); for (DistributedMember member : otherMemberSet) { RegionMXBean bean = awaitRegionMXBeanProxy(member, FIXED_PR_PATH); PartitionAttributesData data = bean.listPartitionAttributes(); assertThat(data).isNotNull(); FixedPartitionAttributesData[] fixedPrData = bean.listFixedPartitionAttributes(); assertThat(fixedPrData).isNotNull(); assertThat(fixedPrData).hasSize(3); for (int i = 0; i < fixedPrData.length; i++) { // TODO: add real assertions // LogWriterUtils.getLogWriter().info("<ExpectedString> Remote PR Data is " + // fixedPrData[i] + "</ExpectedString> "); } } }); } private void createDistributedRegion_tmp(final VM vm, final String regionName) { vm.invoke(() -> createDistributedRegion_tmp(regionName)); } private void createDistributedRegion_tmp(final String regionName) { getCache_tmp().createRegionFactory(RegionShortcut.REPLICATE).create(regionName); } private void createPartitionRegion_tmp(final VM vm, final String partitionRegionName) { vm.invoke("Create Partitioned region", () -> { SystemManagementService service = getSystemManagementService_tmp(); RegionFactory regionFactory = getCache_tmp().createRegionFactory(RegionShortcut.PARTITION_REDUNDANT); regionFactory.create(partitionRegionName); }); } private void createLocalRegion_tmp(final VM vm, final String localRegionName) { vm.invoke("Create Local region", () -> { SystemManagementService service = getSystemManagementService_tmp(); RegionFactory regionFactory = getCache_tmp().createRegionFactory(RegionShortcut.LOCAL); regionFactory.create(localRegionName); }); } private void createSubRegion_tmp(final VM vm, final String parentRegionPath, final String subregionName) { vm.invoke("Create Sub region", () -> { SystemManagementService service = getSystemManagementService_tmp(); Region region = getCache_tmp().getRegion(parentRegionPath); region.createSubregion(subregionName, region.getAttributes()); }); } private String getDistributedMemberId_tmp(final VM vm) { return vm.invoke("getMemberId", () -> getCache_tmp().getDistributedSystem().getDistributedMember().getId()); } private DistributedMember getDistributedMember_tmp(final VM anyVM) { return anyVM.invoke("getDistributedMember_tmp", () -> getCache_tmp().getDistributedSystem().getDistributedMember()); } private SystemManagementService getSystemManagementService_tmp() { return (SystemManagementService) getManagementService_tmp(); } private DM getDistributionManager_tmp() { return ((GemFireCacheImpl) getCache_tmp()).getDistributionManager(); } private DistributedMember getDistributedMember_tmp() { return getCache_tmp().getDistributedSystem().getDistributedMember(); } private Set<DistributedMember> getOtherNormalMembers_tmp() { Set<DistributedMember> allMembers = new HashSet<>(getDistributionManager_tmp().getNormalDistributionManagerIds()); allMembers.remove(getDistributedMember_tmp()); return allMembers; } private void awaitMemberCount(final int expectedCount) { DistributedSystemMXBean distributedSystemMXBean = awaitDistributedSystemMXBean(); await() .until(() -> assertThat(distributedSystemMXBean.getMemberCount()).isEqualTo(expectedCount)); } private DistributedRegionMXBean awaitDistributedRegionMXBean(final String name) { SystemManagementService service = getSystemManagementService_tmp(); await().until(() -> assertThat(service.getDistributedRegionMXBean(name)).isNotNull()); return service.getDistributedRegionMXBean(name); } private DistributedRegionMXBean awaitDistributedRegionMXBean(final String name, final int memberCount) { SystemManagementService service = getSystemManagementService_tmp(); await().until(() -> assertThat(service.getDistributedRegionMXBean(name)).isNotNull()); await().until(() -> assertThat(service.getDistributedRegionMXBean(name).getMemberCount()) .isEqualTo(memberCount)); return service.getDistributedRegionMXBean(name); } private RegionMXBean awaitRegionMXBeanProxy(final DistributedMember member, final String name) { SystemManagementService service = getSystemManagementService_tmp(); ObjectName objectName = service.getRegionMBeanName(member, name); String alias = "awaiting RegionMXBean proxy for " + member; await(alias) .until(() -> assertThat(service.getMBeanProxy(objectName, RegionMXBean.class)).isNotNull()); return service.getMBeanProxy(objectName, RegionMXBean.class); } private RegionMXBean awaitRegionMXBeanProxy(final ObjectName objectName) { SystemManagementService service = getSystemManagementService_tmp(); await() .until(() -> assertThat(service.getMBeanProxy(objectName, RegionMXBean.class)).isNotNull()); return service.getMBeanProxy(objectName, RegionMXBean.class); } private MemberMXBean awaitMemberMXBeanProxy(final DistributedMember member) { SystemManagementService service = getSystemManagementService_tmp(); ObjectName objectName = service.getMemberMBeanName(member); String alias = "awaiting MemberMXBean proxy for " + member; await(alias) .until(() -> assertThat(service.getMBeanProxy(objectName, MemberMXBean.class)).isNotNull()); return service.getMBeanProxy(objectName, MemberMXBean.class); } private MemberMXBean awaitMemberMXBeanProxy(final ObjectName objectName) { SystemManagementService service = getSystemManagementService_tmp(); await() .until(() -> assertThat(service.getMBeanProxy(objectName, MemberMXBean.class)).isNotNull()); return service.getMBeanProxy(objectName, MemberMXBean.class); } private DistributedSystemMXBean awaitDistributedSystemMXBean() { ManagementService service = getSystemManagementService_tmp(); await().until(() -> assertThat(service.getDistributedSystemMXBean()).isNotNull()); return service.getDistributedSystemMXBean(); } private ConditionFactory await() { return Awaitility.await().atMost(2, MINUTES); } private ConditionFactory await(final String alias) { return Awaitility.await(alias).atMost(2, MINUTES); } /** * Registered in manager VM * * User defined notification handler for Region creation handling */ private static class MemberNotificationListener implements NotificationListener { private final List<Notification> notifications; private MemberNotificationListener(List<Notification> notifications) { this.notifications = notifications; } @Override public void handleNotification(final Notification notification, final Object handback) { assertThat(notification).isNotNull(); assertThat(JMXNotificationType.REGION_CREATED.equals(notification.getType()) || JMXNotificationType.REGION_CLOSED.equals(notification.getType())).isTrue(); notifications.add(notification); // TODO: add better validation // LogWriterUtils.getLogWriter().info("<ExpectedString> Member Level Notifications" + // notification + "</ExpectedString> "); } } /** * Registered in manager VM * * User defined notification handler for Region creation handling */ private static class DistributedSystemNotificationListener implements NotificationListener { private final List<Notification> notifications; private DistributedSystemNotificationListener(List<Notification> notifications) { this.notifications = notifications; } @Override public void handleNotification(final Notification notification, final Object handback) { assertThat(notification).isNotNull(); notifications.add(notification); // TODO: add something that will be validated // LogWriterUtils.getLogWriter().info("<ExpectedString> Distributed System Notifications" + // notification + "</ExpectedString> "); } } }
package com.youzi.pojo; import java.util.ArrayList; import java.util.Date; import java.util.List; public class TbItemParamItemExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public TbItemParamItemExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andItemIdIsNull() { addCriterion("item_id is null"); return (Criteria) this; } public Criteria andItemIdIsNotNull() { addCriterion("item_id is not null"); return (Criteria) this; } public Criteria andItemIdEqualTo(Long value) { addCriterion("item_id =", value, "itemId"); return (Criteria) this; } public Criteria andItemIdNotEqualTo(Long value) { addCriterion("item_id <>", value, "itemId"); return (Criteria) this; } public Criteria andItemIdGreaterThan(Long value) { addCriterion("item_id >", value, "itemId"); return (Criteria) this; } public Criteria andItemIdGreaterThanOrEqualTo(Long value) { addCriterion("item_id >=", value, "itemId"); return (Criteria) this; } public Criteria andItemIdLessThan(Long value) { addCriterion("item_id <", value, "itemId"); return (Criteria) this; } public Criteria andItemIdLessThanOrEqualTo(Long value) { addCriterion("item_id <=", value, "itemId"); return (Criteria) this; } public Criteria andItemIdIn(List<Long> values) { addCriterion("item_id in", values, "itemId"); return (Criteria) this; } public Criteria andItemIdNotIn(List<Long> values) { addCriterion("item_id not in", values, "itemId"); return (Criteria) this; } public Criteria andItemIdBetween(Long value1, Long value2) { addCriterion("item_id between", value1, value2, "itemId"); return (Criteria) this; } public Criteria andItemIdNotBetween(Long value1, Long value2) { addCriterion("item_id not between", value1, value2, "itemId"); return (Criteria) this; } public Criteria andCreatedIsNull() { addCriterion("created is null"); return (Criteria) this; } public Criteria andCreatedIsNotNull() { addCriterion("created is not null"); return (Criteria) this; } public Criteria andCreatedEqualTo(Date value) { addCriterion("created =", value, "created"); return (Criteria) this; } public Criteria andCreatedNotEqualTo(Date value) { addCriterion("created <>", value, "created"); return (Criteria) this; } public Criteria andCreatedGreaterThan(Date value) { addCriterion("created >", value, "created"); return (Criteria) this; } public Criteria andCreatedGreaterThanOrEqualTo(Date value) { addCriterion("created >=", value, "created"); return (Criteria) this; } public Criteria andCreatedLessThan(Date value) { addCriterion("created <", value, "created"); return (Criteria) this; } public Criteria andCreatedLessThanOrEqualTo(Date value) { addCriterion("created <=", value, "created"); return (Criteria) this; } public Criteria andCreatedIn(List<Date> values) { addCriterion("created in", values, "created"); return (Criteria) this; } public Criteria andCreatedNotIn(List<Date> values) { addCriterion("created not in", values, "created"); return (Criteria) this; } public Criteria andCreatedBetween(Date value1, Date value2) { addCriterion("created between", value1, value2, "created"); return (Criteria) this; } public Criteria andCreatedNotBetween(Date value1, Date value2) { addCriterion("created not between", value1, value2, "created"); return (Criteria) this; } public Criteria andUpdatedIsNull() { addCriterion("updated is null"); return (Criteria) this; } public Criteria andUpdatedIsNotNull() { addCriterion("updated is not null"); return (Criteria) this; } public Criteria andUpdatedEqualTo(Date value) { addCriterion("updated =", value, "updated"); return (Criteria) this; } public Criteria andUpdatedNotEqualTo(Date value) { addCriterion("updated <>", value, "updated"); return (Criteria) this; } public Criteria andUpdatedGreaterThan(Date value) { addCriterion("updated >", value, "updated"); return (Criteria) this; } public Criteria andUpdatedGreaterThanOrEqualTo(Date value) { addCriterion("updated >=", value, "updated"); return (Criteria) this; } public Criteria andUpdatedLessThan(Date value) { addCriterion("updated <", value, "updated"); return (Criteria) this; } public Criteria andUpdatedLessThanOrEqualTo(Date value) { addCriterion("updated <=", value, "updated"); return (Criteria) this; } public Criteria andUpdatedIn(List<Date> values) { addCriterion("updated in", values, "updated"); return (Criteria) this; } public Criteria andUpdatedNotIn(List<Date> values) { addCriterion("updated not in", values, "updated"); return (Criteria) this; } public Criteria andUpdatedBetween(Date value1, Date value2) { addCriterion("updated between", value1, value2, "updated"); return (Criteria) this; } public Criteria andUpdatedNotBetween(Date value1, Date value2) { addCriterion("updated not between", value1, value2, "updated"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
// Copyright (c) 2006 Damien Miller <djm@mindrot.org> // // Permission to use, copy, modify, and distribute this software for any // purpose with or without fee is hereby granted, provided that the above // copyright notice and this permission notice appear in all copies. // // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR // ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN // ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF // OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. package org.apollo.game.crypto; import java.io.UnsupportedEncodingException; import java.security.SecureRandom; /** * BCrypt implements OpenBSD-style Blowfish password hashing using the scheme * described in "A Future-Adaptable Password Scheme" by Niels Provos and David * Mazieres. * <p> * This password hashing system tries to thwart off-line password cracking using * a computationally-intensive hashing algorithm, based on Bruce Schneier's * Blowfish cipher. The work factor of the algorithm is parameterised, so it can * be increased as computers get faster. * <p> * Usage is really simple. To hash a password for the first time, call the * hashpw method with a random salt, like this: * <p> * <code> * String pw_hash = BCrypt.hashpw(plain_password, BCrypt.gensalt()); <br /> * </code> * <p> * To check whether a plaintext password matches one that has been hashed * previously, use the checkpw method: * <p> * <code> * if (BCrypt.checkpw(candidate_password, stored_hash))<br /> * &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It matches");<br /> * else<br /> * &nbsp;&nbsp;&nbsp;&nbsp;System.out.println("It does not match");<br /> * </code> * <p> * The gensalt() method takes an optional parameter (log_rounds) that determines * the computational complexity of the hashing: * <p> * <code> * String strong_salt = BCrypt.gensalt(10)<br /> * String stronger_salt = BCrypt.gensalt(12)<br /> * </code> * <p> * The amount of work increases exponentially (2**log_rounds), so each increment * is twice as much work. The default log_rounds is 10, and the valid range is 4 * to 31. * * @author Damien Miller * @version 0.2 */ public class BCrypt { // BCrypt parameters private static final int GENSALT_DEFAULT_LOG2_ROUNDS = 10; private static final int BCRYPT_SALT_LEN = 16; // Blowfish parameters private static final int BLOWFISH_NUM_ROUNDS = 16; // Initial contents of key schedule private static final int P_orig[] = { 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b }; private static final int S_orig[] = { 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6 }; // bcrypt IV: "OrpheanBeholderScryDoubt" static private final int bf_crypt_ciphertext[] = { 0x4f727068, 0x65616e42, 0x65686f6c, 0x64657253, 0x63727944, 0x6f756274 }; // Table for Base64 encoding static private final char base64_code[] = { '.', '/', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; // Table for Base64 decoding static private final byte index_64[] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, -1, -1, -1, -1, -1, -1, -1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, -1, -1, -1, -1, -1, -1, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, -1, -1, -1, -1, -1 }; // Expanded Blowfish key private int P[]; private int S[]; /** * Encode a byte array using bcrypt's slightly-modified base64 encoding * scheme. Note that this is *not* compatible with the standard MIME-base64 * encoding. * * @param d the byte array to encode * @param len the number of bytes to encode * @return base64-encoded string * @exception IllegalArgumentException if the length is invalid */ private static String encode_base64(byte d[], int len) throws IllegalArgumentException { int off = 0; StringBuffer rs = new StringBuffer(); int c1, c2; if (len <= 0 || len > d.length) { throw new IllegalArgumentException("Invalid len"); } while (off < len) { c1 = d[off++] & 0xff; rs.append(base64_code[c1 >> 2 & 0x3f]); c1 = (c1 & 0x03) << 4; if (off >= len) { rs.append(base64_code[c1 & 0x3f]); break; } c2 = d[off++] & 0xff; c1 |= c2 >> 4 & 0x0f; rs.append(base64_code[c1 & 0x3f]); c1 = (c2 & 0x0f) << 2; if (off >= len) { rs.append(base64_code[c1 & 0x3f]); break; } c2 = d[off++] & 0xff; c1 |= c2 >> 6 & 0x03; rs.append(base64_code[c1 & 0x3f]); rs.append(base64_code[c2 & 0x3f]); } return rs.toString(); } /** * Look up the 3 bits base64-encoded by the specified character, * range-checking againt conversion table * * @param x the base64-encoded value * @return the decoded value of x */ private static byte char64(char x) { if (x < 0 || x > index_64.length) { return -1; } return index_64[x]; } /** * Decode a string encoded using bcrypt's base64 scheme to a byte array. * Note that this is *not* compatible with the standard MIME-base64 * encoding. * * @param s the string to decode * @param maxolen the maximum number of bytes to decode * @return an array containing the decoded bytes * @throws IllegalArgumentException if maxolen is invalid */ private static byte[] decode_base64(String s, int maxolen) throws IllegalArgumentException { StringBuffer rs = new StringBuffer(); int off = 0, slen = s.length(), olen = 0; byte ret[]; byte c1, c2, c3, c4, o; if (maxolen <= 0) { throw new IllegalArgumentException("Invalid maxolen"); } while (off < slen - 1 && olen < maxolen) { c1 = char64(s.charAt(off++)); c2 = char64(s.charAt(off++)); if (c1 == -1 || c2 == -1) { break; } o = (byte) (c1 << 2); o |= (c2 & 0x30) >> 4; rs.append((char) o); if (++olen >= maxolen || off >= slen) { break; } c3 = char64(s.charAt(off++)); if (c3 == -1) { break; } o = (byte) ((c2 & 0x0f) << 4); o |= (c3 & 0x3c) >> 2; rs.append((char) o); if (++olen >= maxolen || off >= slen) { break; } c4 = char64(s.charAt(off++)); o = (byte) ((c3 & 0x03) << 6); o |= c4; rs.append((char) o); ++olen; } ret = new byte[olen]; for (off = 0; off < olen; off++) { ret[off] = (byte) rs.charAt(off); } return ret; } /** * Blowfish encipher a single 64-bit block encoded as two 32-bit halves * * @param lr an array containing the two 32-bit half blocks * @param off the position in the array of the blocks */ private void encipher(int lr[], int off) { int i, n, l = lr[off], r = lr[off + 1]; l ^= P[0]; for (i = 0; i <= BLOWFISH_NUM_ROUNDS - 2;) { // Feistel substitution on left word n = S[l >> 24 & 0xff]; n += S[0x100 | l >> 16 & 0xff]; n ^= S[0x200 | l >> 8 & 0xff]; n += S[0x300 | l & 0xff]; r ^= n ^ P[++i]; // Feistel substitution on right word n = S[r >> 24 & 0xff]; n += S[0x100 | r >> 16 & 0xff]; n ^= S[0x200 | r >> 8 & 0xff]; n += S[0x300 | r & 0xff]; l ^= n ^ P[++i]; } lr[off] = r ^ P[BLOWFISH_NUM_ROUNDS + 1]; lr[off + 1] = l; } /** * Cycically extract a word of key material * * @param data the string to extract the data from * @param offp a "pointer" (as a one-entry array) to the current offset into * data * @return the next word of material from data */ private static int streamtoword(byte data[], int offp[]) { int i; int word = 0; int off = offp[0]; for (i = 0; i < 4; i++) { word = word << 8 | data[off] & 0xff; off = (off + 1) % data.length; } offp[0] = off; return word; } /** * Initialise the Blowfish key schedule */ private void init_key() { P = P_orig.clone(); S = S_orig.clone(); } /** * Key the Blowfish cipher * * @param key an array containing the key */ private void key(byte key[]) { int i; int koffp[] = { 0 }; int lr[] = { 0, 0 }; int plen = P.length, slen = S.length; for (i = 0; i < plen; i++) { P[i] = P[i] ^ streamtoword(key, koffp); } for (i = 0; i < plen; i += 2) { encipher(lr, 0); P[i] = lr[0]; P[i + 1] = lr[1]; } for (i = 0; i < slen; i += 2) { encipher(lr, 0); S[i] = lr[0]; S[i + 1] = lr[1]; } } /** * Perform the "enhanced key schedule" step described by Provos and Mazieres * in "A Future-Adaptable Password Scheme" * http://www.openbsd.org/papers/bcrypt-paper.ps * * @param data salt information * @param key password information */ private void ekskey(byte data[], byte key[]) { int i; int koffp[] = { 0 }, doffp[] = { 0 }; int lr[] = { 0, 0 }; int plen = P.length, slen = S.length; for (i = 0; i < plen; i++) { P[i] = P[i] ^ streamtoword(key, koffp); } for (i = 0; i < plen; i += 2) { lr[0] ^= streamtoword(data, doffp); lr[1] ^= streamtoword(data, doffp); encipher(lr, 0); P[i] = lr[0]; P[i + 1] = lr[1]; } for (i = 0; i < slen; i += 2) { lr[0] ^= streamtoword(data, doffp); lr[1] ^= streamtoword(data, doffp); encipher(lr, 0); S[i] = lr[0]; S[i + 1] = lr[1]; } } /** * Perform the central password hashing step in the bcrypt scheme * * @param password the password to hash * @param salt the binary salt to hash with the password * @param log_rounds the binary logarithm of the number of rounds of hashing * to apply * @return an array containing the binary hashed password */ private byte[] crypt_raw(byte password[], byte salt[], int log_rounds) { int rounds, i, j; int cdata[] = bf_crypt_ciphertext.clone(); int clen = cdata.length; byte ret[]; if (log_rounds < 4 || log_rounds > 31) { throw new IllegalArgumentException("Bad number of rounds"); } rounds = 1 << log_rounds; if (salt.length != BCRYPT_SALT_LEN) { throw new IllegalArgumentException("Bad salt length"); } init_key(); ekskey(salt, password); for (i = 0; i < rounds; i++) { key(password); key(salt); } for (i = 0; i < 64; i++) { for (j = 0; j < clen >> 1; j++) { encipher(cdata, j << 1); } } ret = new byte[clen * 4]; for (i = 0, j = 0; i < clen; i++) { ret[j++] = (byte) (cdata[i] >> 24 & 0xff); ret[j++] = (byte) (cdata[i] >> 16 & 0xff); ret[j++] = (byte) (cdata[i] >> 8 & 0xff); ret[j++] = (byte) (cdata[i] & 0xff); } return ret; } /** * Hash a password using the OpenBSD bcrypt scheme * * @param password the password to hash * @param salt the salt to hash with (perhaps generated using * BCrypt.gensalt) * @return the hashed password */ public static String hashpw(String password, String salt) { BCrypt B; String real_salt; byte passwordb[], saltb[], hashed[]; char minor = (char) 0; int rounds, off; StringBuffer rs = new StringBuffer(); if (salt.charAt(0) != '$' || salt.charAt(1) != '2') { throw new IllegalArgumentException("Invalid salt version"); } if (salt.charAt(2) == '$') { off = 3; } else { minor = salt.charAt(2); if (minor != 'a' || salt.charAt(3) != '$') { throw new IllegalArgumentException("Invalid salt revision"); } off = 4; } // Extract number of rounds if (salt.charAt(off + 2) > '$') { throw new IllegalArgumentException("Missing salt rounds"); } rounds = Integer.parseInt(salt.substring(off, off + 2)); real_salt = salt.substring(off + 3, off + 25); try { passwordb = (password + (minor >= 'a' ? "\000" : "")).getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { throw new AssertionError("UTF-8 is not supported"); } saltb = decode_base64(real_salt, BCRYPT_SALT_LEN); B = new BCrypt(); hashed = B.crypt_raw(passwordb, saltb, rounds); rs.append("$2"); if (minor >= 'a') { rs.append(minor); } rs.append("$"); if (rounds < 10) { rs.append("0"); } rs.append(Integer.toString(rounds)); rs.append("$"); rs.append(encode_base64(saltb, saltb.length)); rs.append(encode_base64(hashed, bf_crypt_ciphertext.length * 4 - 1)); return rs.toString(); } /** * Generate a salt for use with the BCrypt.hashpw() method * * @param log_rounds the log2 of the number of rounds of hashing to apply - * the work factor therefore increases as 2**log_rounds. * @param random an instance of SecureRandom to use * @return an encoded salt value */ private static String gensalt(int log_rounds, SecureRandom random) { StringBuffer rs = new StringBuffer(); byte rnd[] = new byte[BCRYPT_SALT_LEN]; random.nextBytes(rnd); rs.append("$2a$"); if (log_rounds < 10) { rs.append("0"); } rs.append(Integer.toString(log_rounds)); rs.append("$"); rs.append(encode_base64(rnd, rnd.length)); return rs.toString(); } /** * Generate a salt for use with the BCrypt.hashpw() method * * @param log_rounds the log2 of the number of rounds of hashing to apply - * the work factor therefore increases as 2**log_rounds. * @return an encoded salt value */ public static String gensalt(int log_rounds) { return gensalt(log_rounds, new SecureRandom()); } /** * Generate a salt for use with the BCrypt.hashpw() method, selecting a * reasonable default for the number of hashing rounds to apply * * @return an encoded salt value */ public static String gensalt() { return gensalt(GENSALT_DEFAULT_LOG2_ROUNDS); } /** * Check that a plaintext password matches a previously hashed one * * @param plaintext the plaintext password to verify * @param hashed the previously-hashed password * @return true if the passwords match, false otherwise */ public static boolean checkpw(String plaintext, String hashed) { return hashed.compareTo(hashpw(plaintext, hashed)) == 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.datastructures; import java.io.Externalizable; import java.io.IOException; import java.io.InvalidObjectException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.ObjectStreamException; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.AbstractQueuedSynchronizer; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteCondition; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteInterruptedException; import org.apache.ignite.IgniteLock; import org.apache.ignite.internal.IgnitionEx; import org.apache.ignite.internal.processors.cache.IgniteInternalCache; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal; import org.apache.ignite.internal.processors.cluster.IgniteChangeGlobalStateSupport; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.transactions.TransactionRollbackException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.cache.GridCacheUtils.retryTopologySafe; import static org.apache.ignite.transactions.TransactionConcurrency.PESSIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Cache reentrant lock implementation based on AbstractQueuedSynchronizer. */ public final class GridCacheLockImpl extends AtomicDataStructureProxy<GridCacheLockState> implements GridCacheLockEx, IgniteChangeGlobalStateSupport, Externalizable { /** */ private static final long serialVersionUID = 0L; /** Deserialization stash. */ private static final ThreadLocal<String> stash = new ThreadLocal<>(); /** Initialization guard. */ private final AtomicBoolean initGuard = new AtomicBoolean(); /** Initialization latch. */ private final CountDownLatch initLatch = new CountDownLatch(1); /** Lock that provides non-overlapping processing of updates. */ private Lock updateLock = new ReentrantLock(); /** Internal synchronization object. */ private Sync sync; /** Flag indicating that every operation on this lock should be interrupted. */ private volatile boolean interruptAll; /** * Empty constructor required by {@link Externalizable}. */ public GridCacheLockImpl() { // This instance should never be used directly. ctx = null; } /** * Synchronization implementation for reentrant lock using AbstractQueuedSynchronizer. */ @SuppressWarnings({"CallToThreadYield", "CallToSignalInsteadOfSignalAll"}) private class Sync extends AbstractQueuedSynchronizer { /** */ private static final long serialVersionUID = 1192457210091910933L; /** */ private static final long LOCK_FREE = 0; /** Map containing condition objects. */ private Map<String, ConditionObject> conditionMap; /** List of condition signal calls on this node. */ private Map<String, Integer> outgoingSignals; /** Last condition waited on. */ @Nullable private volatile String lastCondition; /** True if any node owning the lock had failed. */ private volatile boolean isBroken; /** UUID of the node that currently owns the lock. */ private volatile UUID currentOwnerNode; /** ID of the thread that currently owns the lock. */ private volatile long currentOwnerThreadId; /** UUID of this node. */ private final UUID thisNode; /** FailoverSafe flag. */ private final boolean failoverSafe; /** Fairness flag. */ private final boolean fair; /** Threads that are waiting on this lock. */ private Set<Long> waitingThreads; /** * @param state State. */ protected Sync(GridCacheLockState state) { setState(state.get()); thisNode = ctx.localNodeId(); currentOwnerNode = state.getId(); currentOwnerThreadId = state.getThreadId(); conditionMap = new HashMap<>(); outgoingSignals = new HashMap<>(); failoverSafe = state.isFailoverSafe(); fair = state.isFair(); waitingThreads = new ConcurrentSkipListSet<>(); } /** * */ protected void addOutgoingSignal(String condition) { int cnt = 0; if (outgoingSignals.containsKey(condition)) { cnt = outgoingSignals.get(condition); // SignalAll has already been called. if (cnt == 0) return; } outgoingSignals.put(condition, cnt + 1); } /** * @param condition Condition. */ protected void addOutgoingSignalAll(String condition) { outgoingSignals.put(condition, 0); } /** * Process any condition await calls on this node. */ private String processAwait() { if (lastCondition == null) return null; String ret = lastCondition; lastCondition = null; return ret; } /** */ private Map<String, Integer> processSignal() { Map<String, Integer> ret = new HashMap<>(outgoingSignals); outgoingSignals.clear(); return ret; } /** Interrupt every thread on this node waiting on this lock. */ private synchronized void interruptAll() { // First release all threads waiting on associated condition queues. if (!conditionMap.isEmpty()) { // Temporarily obtain ownership of the lock, // in order to signal all conditions. UUID tempUUID = getOwnerNode(); long tempThreadID = currentOwnerThreadId; setCurrentOwnerNode(thisNode); currentOwnerThreadId = Thread.currentThread().getId(); for (Condition c : conditionMap.values()) c.signalAll(); // Restore owner node and owner thread. setCurrentOwnerNode(tempUUID); currentOwnerThreadId = tempThreadID; } // Interrupt any future call to acquire/release on this sync object. interruptAll = true; // Interrupt any ongoing transactions. for (Thread t: getQueuedThreads()) t.interrupt(); } /** Check if lock is in correct state (i.e. not broken in non-failoversafe mode), * if not throw {@linkplain IgniteInterruptedException} */ private void validate(final boolean throwInterrupt) { // Interrupted flag shouldn't be always cleared // (e.g. lock() method doesn't throw exception and doesn't clear interrupted) // but should be cleared if this method is called after lock breakage or node stop. // If interruptAll is set, exception is thrown anyway. boolean interrupted = Thread.currentThread().isInterrupted(); // Clear interrupt flag. if (throwInterrupt || interruptAll) Thread.interrupted(); if (interruptAll) throw new IgniteException("Lock broken (possible reason: node stopped" + " or node owning lock failed while in non-failoversafe mode)."); // Global queue should be synchronized only if interrupted exception should be thrown. if (fair && (throwInterrupt && interrupted) && !interruptAll) { synchronizeQueue(true, Thread.currentThread()); throw new IgniteInterruptedException("Lock is interrupted."); } } /** * Sets the number of permits currently acquired on this lock. This method should only be used in {@linkplain * GridCacheLockImpl#onUpdate(GridCacheLockState)}. * * @param permits Number of permits acquired at this reentrant lock. */ final synchronized void setPermits(int permits) { setState(permits); } /** * Gets the number of permissions currently acquired at this lock. * * @return Number of permits acquired at this reentrant lock. */ final int getPermits() { return getState(); } /** * Sets the UUID of the node that currently owns this lock. This method should only be used in {@linkplain * GridCacheLockImpl#onUpdate(GridCacheLockState)}. * * @param ownerNode UUID of the node owning this lock. */ final synchronized void setCurrentOwnerNode(UUID ownerNode) { currentOwnerNode = ownerNode; } /** * Gets the UUID of the node that currently owns the lock. * * @return UUID of the node that currently owns the lock. */ final UUID getOwnerNode() { return currentOwnerNode; } /** * Checks if latest call to acquire/release was called on this node. * Should only be called from update method. * * @param newOwnerID ID of the node that is about to acquire this lock (or null). * @return true if acquire/release that triggered last update came from this node. */ protected boolean isLockedLocally(UUID newOwnerID) { return thisNode.equals(getOwnerNode()) || thisNode.equals(newOwnerID); } /** * @param newOwnerThreadId New owner thread id. */ protected void setCurrentOwnerThread(long newOwnerThreadId) { currentOwnerThreadId = newOwnerThreadId; } /** * Returns true if node that owned the locked failed before call to unlock. * * @return true if any node failed while owning the lock. */ protected boolean isBroken() { return isBroken; } /** */ protected void setBroken(boolean isBroken) { this.isBroken = isBroken; } /** */ protected synchronized boolean hasPredecessor(LinkedList<UUID> nodes) { if (!fair) return false; for (Iterator<UUID> it = nodes.iterator(); it.hasNext(); ) { UUID node = it.next(); if (ctx.discovery().node(node) == null) { it.remove(); continue; } return !node.equals(thisNode); } return false; } /** * Performs tryLock. * @param acquires Number of permits to acquire. * @param fair Fairness parameter. * @return {@code True} if succeeded, false otherwise. */ final boolean tryAcquire(final int acquires, final boolean fair) { // If broken in non-failoversafe mode, exit immediately. if (interruptAll) return true; final Thread current = Thread.currentThread(); boolean failed = false; int c = getState(); // Wait for lock to reach stable state. while (c != 0) { UUID currentOwner = currentOwnerNode; if (currentOwner != null) { failed = ctx.discovery().node(currentOwner) == null; break; } c = getState(); } // Check if lock is released or current owner failed. if (c == 0 || failed) { if (compareAndSetGlobalState(0, acquires, current, fair)) { // Not used for synchronization (we use ThreadID), but updated anyway. setExclusiveOwnerThread(current); while (!isHeldExclusively() && !interruptAll) Thread.yield(); return true; } } else if (isHeldExclusively()) { int nextc = c + acquires; if (nextc < 0) // overflow throw new Error("Maximum lock count exceeded."); setState(nextc); return true; } if (fair && !isQueued(current)) synchronizeQueue(false, current); return false; } /** * Performs lock. */ final void lock() { acquire(1); } /** {@inheritDoc} */ @Override protected final boolean tryAcquire(int acquires) { return tryAcquire(acquires, fair); } /** {@inheritDoc} */ @Override protected final boolean tryRelease(int releases) { // This method is called with release==0 only when trying to wake through update, // to check if some other node released the lock. if (releases == 0) return true; // If broken in non-failoversafe mode, exit immediately. if (interruptAll) return true; int c = getState() - releases; if (!isHeldExclusively()) { log.error("Lock.unlock() is called in illegal state [callerNodeId=" + thisNode + ", ownerNodeId=" + currentOwnerNode + ", callerThreadId=" + Thread.currentThread().getId() + ", ownerThreadId=" + currentOwnerThreadId + ", lockState=" + getState() + "]"); throw new IllegalMonitorStateException(); } boolean free = false; if (c == 0) { free = true; setGlobalState(0, processAwait(), processSignal()); while (isHeldExclusively() && !interruptAll) Thread.yield(); } else setState(c); return free; } /** {@inheritDoc} */ @Override protected final boolean isHeldExclusively() { // While we must in general read state before owner, // we don't need to do so to check if current thread is owner return currentOwnerThreadId == Thread.currentThread().getId() && thisNode.equals(currentOwnerNode); } /** * @param name Condition name. * @return Condition object. */ final synchronized IgniteCondition newCondition(String name) { if (conditionMap.containsKey(name)) return new IgniteConditionObject(name, conditionMap.get(name)); ConditionObject cond = new ConditionObject(); conditionMap.put(name, cond); return new IgniteConditionObject(name, cond); } // Methods relayed from outer class final int getHoldCount() { return isHeldExclusively() ? getState() : 0; } final boolean isLocked() throws IgniteCheckedException { return getState() != 0 || cacheView.get(key).get() != 0; } /** * This method is used for synchronizing the reentrant lock state across all nodes. */ boolean compareAndSetGlobalState(final int expVal, final int newVal, final Thread newThread, final boolean bargingProhibited) { try { return retryTopologySafe(new Callable<Boolean>() { @Override public Boolean call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheLockState val = cacheView.get(key); if (val == null) throw new IgniteCheckedException("Failed to find reentrant lock with given name: " + name); final long newThreadID = newThread.getId(); LinkedList<UUID> nodes = val.getNodes(); // Barging is prohibited in fair mode unless tryLock() is called. if (!(bargingProhibited && hasPredecessor(nodes))) { if (val.get() == expVal || ctx.discovery().node(val.getId()) == null) { val.set(newVal); val.setId(thisNode); val.setThreadId(newThreadID); val.setSignals(null); // This node is already in queue, except in cases where this is the only node // or this is a call to tryLock(), in which case barging is ok. // Queue is only updated if this is fair lock. if (val.isFair() && (nodes.isEmpty() || !bargingProhibited)) nodes.addFirst(thisNode); val.setNodes(nodes); val.setChanged(true); cacheView.put(key, val); tx.commit(); return true; } } return false; } catch (Exception e) { if (interruptAll) { if (log.isInfoEnabled()) log.info("Node is stopped (or lock is broken in non-failover safe mode)," + " aborting transaction."); // Return immediately, exception will be thrown later. return true; } else { if (Thread.currentThread().isInterrupted()) { if (log.isInfoEnabled()) log.info("Thread is interrupted while attempting to acquire lock."); // Delegate the decision to throw InterruptedException to the AQS. sync.release(0); return false; } U.error(log, "Failed to compare and set: " + this, e); } throw e; } } }); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** * This method is used for synchronizing the number of acquire attempts on this lock across all nodes. * * @param cancelled true if acquire attempt is cancelled, false if acquire attempt should be registered. */ boolean synchronizeQueue(final boolean cancelled, final Thread thread) { final AtomicBoolean interrupted = new AtomicBoolean(false); try { return retryTopologySafe(new Callable<Boolean>() { @Override public Boolean call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheLockState val = cacheView.get(key); if (val == null) throw new IgniteCheckedException("Failed to find reentrant lock with given name: " + name); LinkedList<UUID> nodes = val.getNodes(); if (!cancelled) { nodes.add(thisNode); val.setChanged(false); cacheView.put(key, val); tx.commit(); // Keep track of all threads that are queued in global queue. // We deliberately don't use #sync.isQueued(), because AQS // cancel threads immediately after throwing interrupted exception. sync.waitingThreads.add(thread.getId()); return true; } else { if (sync.waitingThreads.contains(thread.getId())) { // Update other nodes if this is the first node in queue. val.setChanged(nodes.lastIndexOf(thisNode) == 0); nodes.removeLastOccurrence(thisNode); cacheView.put(key, val); tx.commit(); sync.waitingThreads.remove(thread.getId()); return true; } } return false; } catch (Exception e) { if (interruptAll) { if (log.isInfoEnabled()) log.info("Node is stopped (or lock is broken in non-failover safe mode)," + " aborting transaction."); // Abort this attempt to synchronize queue and start another one, // that will return immediately. sync.release(0); return false; } else { // If thread got interrupted, abort this attempt to synchronize queue, // clear interrupt flag and try again, and let the AQS decide // whether to throw an exception or ignore it. if (Thread.interrupted() || X.hasCause(e, InterruptedException.class)) { interrupted.set(true); throw new TransactionRollbackException("Thread got interrupted " + "while synchronizing the global queue, retrying. "); } U.error(log, "Failed to synchronize global lock queue: " + this, e); } throw e; } } }); } catch (IgniteCheckedException e) { throw U.convertException(e); } finally { // Restore interrupt flag and let AQS decide what to do with it. if (interrupted.get()) Thread.currentThread().interrupt(); } } /** * Sets the global state across all nodes after releasing the reentrant lock. * * @param newVal New state. * @param lastCond Id of the condition await is called. * @param outgoingSignals Map containing signal calls on this node since the last acquisition of the lock. */ protected boolean setGlobalState(final int newVal, @Nullable final String lastCond, final Map<String, Integer> outgoingSignals) { try { return retryTopologySafe(new Callable<Boolean>() { @Override public Boolean call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheLockState val = cacheView.get(key); if (val == null) throw new IgniteCheckedException("Failed to find reentrant lock with given name: " + name); val.set(newVal); if (newVal == 0) { val.setId(null); val.setThreadId(LOCK_FREE); } val.setChanged(true); // If this lock is fair, remove this node from queue. if (val.isFair() && newVal == 0) { UUID rmvdNode = val.getNodes().removeFirst(); assert (thisNode.equals(rmvdNode)); } // Get global condition queue. Map<String, LinkedList<UUID>> condMap = val.getConditionMap(); // Create map containing signals from this node. Map<UUID, LinkedList<String>> signalMap = new HashMap<>(); // Put any signal calls on this node to global state. if (!outgoingSignals.isEmpty()) { for (String condition : outgoingSignals.keySet()) { int cnt = outgoingSignals.get(condition); // Get queue for this condition. List<UUID> list = condMap.get(condition); if (list != null && !list.isEmpty()) { // Check if signalAll was called. if (cnt == 0) cnt = list.size(); // Remove from global condition queue. for (int i = 0; i < cnt; i++) { if (list.isEmpty()) break; UUID uuid = list.remove(0); // Skip if node to be released is not alive anymore. if (ctx.discovery().node(uuid) == null) { cnt++; continue; } LinkedList<String> queue = signalMap.get(uuid); if (queue == null) { queue = new LinkedList<>(); signalMap.put(uuid, queue); } queue.add(condition); } } } } val.setSignals(signalMap); // Check if this release is called after condition.await() call; // If true, add this node to the global waiting queue. if (lastCond != null) { LinkedList<UUID> queue; //noinspection IfMayBeConditional if (!condMap.containsKey(lastCond)) // New condition object. queue = new LinkedList<>(); else // Existing condition object. queue = condMap.get(lastCond); queue.add(thisNode); condMap.put(lastCond, queue); } val.setConditionMap(condMap); cacheView.put(key, val); tx.commit(); return true; } catch (Exception e) { if (interruptAll) { if (log.isInfoEnabled()) log.info("Node is stopped (or lock is broken in non-failover safe mode)," + " aborting transaction."); return true; } else U.error(log, "Failed to release: " + this, e); throw e; } } }); } catch (IgniteCheckedException e) { throw U.convertException(e); } } synchronized boolean checkIncomingSignals(GridCacheLockState state) { if (state.getSignals() == null) return false; LinkedList<String> signals = state.getSignals().get(thisNode); if (signals == null || signals.isEmpty()) return false; UUID tempUUID = getOwnerNode(); Thread tempThread = getExclusiveOwnerThread(); long tempThreadID = currentOwnerThreadId; // Temporarily allow current thread to signal condition object. // This is safe to do because: // 1. if release was called on this node, // it was called from currently active thread; // 2. if release came from a thread on any other node, // all threads on this node are already blocked. setCurrentOwnerNode(thisNode); setExclusiveOwnerThread(Thread.currentThread()); currentOwnerThreadId = Thread.currentThread().getId(); for (String signal: signals) conditionMap.get(signal).signal(); // Restore owner node and owner thread. setCurrentOwnerNode(tempUUID); setExclusiveOwnerThread(tempThread); currentOwnerThreadId = tempThreadID; return true; } /** * Condition implementation for {@linkplain IgniteLock}. * **/ private class IgniteConditionObject implements IgniteCondition { /** */ private final String name; /** */ private final AbstractQueuedSynchronizer.ConditionObject obj; /** * @param name Condition name. * @param obj Condition object. */ protected IgniteConditionObject(String name, ConditionObject obj) { this.name = name; this.obj = obj; } /** * Name of this condition. * * @return name Name of this condition object. */ @Override public String name() { return name; } /** {@inheritDoc} */ @Override public void await() throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); lastCondition = name; obj.await(); sync.validate(true); } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public void awaitUninterruptibly() { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); lastCondition = name; obj.awaitUninterruptibly(); sync.validate(false); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public long awaitNanos(long nanosTimeout) throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); lastCondition = name; long result = obj.awaitNanos(nanosTimeout); sync.validate(true); return result; } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public boolean await(long time, TimeUnit unit) throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); lastCondition = name; boolean result = obj.await(time, unit); sync.validate(true); return result; } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public boolean awaitUntil(Date deadline) throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); lastCondition = name; boolean result = obj.awaitUntil(deadline); sync.validate(true); return result; } catch (InterruptedException e) { throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public void signal() { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); validate(false); addOutgoingSignal(name); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public void signalAll() { ctx.kernalContext().gateway().readLock(); try { if (!isHeldExclusively()) throw new IllegalMonitorStateException(); sync.validate(false); addOutgoingSignalAll(name); } finally { ctx.kernalContext().gateway().readUnlock(); } } } } /** * Constructor. * * @param name Reentrant lock name. * @param key Reentrant lock key. * @param lockView Reentrant lock projection. */ public GridCacheLockImpl(String name, GridCacheInternalKey key, IgniteInternalCache<GridCacheInternalKey, GridCacheLockState> lockView) { super(name, key, lockView); } /** * @throws IgniteCheckedException If operation failed. */ private void initializeReentrantLock() throws IgniteCheckedException { if (initGuard.compareAndSet(false, true)) { try { sync = retryTopologySafe(new Callable<Sync>() { @Override public Sync call() throws Exception { try (GridNearTxLocal tx = CU.txStartInternal(ctx, cacheView, PESSIMISTIC, REPEATABLE_READ)) { GridCacheLockState val = cacheView.get(key); if (val == null) { if (log.isDebugEnabled()) log.debug("Failed to find reentrant lock with given name: " + name); return null; } tx.rollback(); return new Sync(val); } } }); if (log.isDebugEnabled()) log.debug("Initialized internal sync structure: " + sync); } finally { initLatch.countDown(); } } else { U.await(initLatch); if (sync == null) throw new IgniteCheckedException("Internal reentrant lock has not been properly initialized."); } } /** {@inheritDoc} */ @Override public void onUpdate(GridCacheLockState val) { // Called only on initialization, so it's safe to ignore update. if (sync == null) return; updateLock.lock(); try { // If this update is a result of unsuccessful acquire in fair mode, no local update should be done. if (!val.isChanged()) return; // Check if update came from this node. boolean loc = sync.isLockedLocally(val.getId()); // Process any incoming signals. boolean incomingSignals = sync.checkIncomingSignals(val); // Update permission count. sync.setPermits(val.get()); // Update owner's node id. sync.setCurrentOwnerNode(val.getId()); // Update owner's thread id. sync.setCurrentOwnerThread(val.getThreadId()); // Check if any threads waiting on this node need to be notified. if ((incomingSignals || sync.getPermits() == 0) && !loc) { // Try to notify any waiting threads. sync.release(0); } } finally { updateLock.unlock(); } } /** {@inheritDoc} */ @Override public void onNodeRemoved(UUID nodeId) { updateLock.lock(); try { if (nodeId.equals(sync.getOwnerNode())) { if (!sync.failoverSafe) { sync.setBroken(true); sync.interruptAll(); } } // Try to notify any waiting threads. sync.release(0); } finally { updateLock.unlock(); } } /** {@inheritDoc} */ @Override public void onStop() { if (sync == null) { interruptAll = true; return; } if (!sync.failoverSafe) { sync.setBroken(true); } sync.interruptAll(); // Try to notify any waiting threads. sync.release(0); } /** {@inheritDoc} */ @Override public void lock() { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); if (sync == null) throw new IgniteCheckedException("Failed to find reentrant lock with given name: " + name); sync.lock(); sync.validate(false); } catch (IgniteCheckedException e) { throw U.convertException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public void lockInterruptibly() throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); sync.acquireInterruptibly(1); sync.validate(true); } catch (IgniteCheckedException e) { throw U.convertException(e); } catch (InterruptedException e) { if (sync.fair) sync.synchronizeQueue(true, Thread.currentThread()); throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public boolean tryLock() { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); boolean result = sync.tryAcquire(1, false); sync.validate(false); return result; } catch (IgniteCheckedException e) { throw U.convertException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public boolean tryLock(long timeout, TimeUnit unit) throws IgniteInterruptedException { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); boolean result = sync.tryAcquireNanos(1, unit.toNanos(timeout)); sync.validate(true); return result; } catch (IgniteCheckedException e) { throw U.convertException(e); } catch (InterruptedException e) { if (sync.fair) sync.synchronizeQueue(true, Thread.currentThread()); throw new IgniteInterruptedException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public void unlock() { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); // Validate before release. sync.validate(false); sync.release(1); } catch (IgniteCheckedException e) { throw U.convertException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } @NotNull @Override public Condition newCondition() { throw new UnsupportedOperationException("IgniteLock does not allow creation of nameless conditions. "); } /** {@inheritDoc} */ @Override public IgniteCondition getOrCreateCondition(String name) { ctx.kernalContext().gateway().readLock(); try { initializeReentrantLock(); IgniteCondition result = sync.newCondition(name); sync.validate(false); return result; } catch (IgniteCheckedException e) { throw U.convertException(e); } finally { ctx.kernalContext().gateway().readUnlock(); } } /** {@inheritDoc} */ @Override public int getHoldCount() { try { initializeReentrantLock(); return sync.getHoldCount(); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean isHeldByCurrentThread() { try { initializeReentrantLock(); return sync.isHeldExclusively(); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean isLocked() { try { initializeReentrantLock(); return sync.isLocked(); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean hasQueuedThreads() { try { initializeReentrantLock(); return sync.hasQueuedThreads(); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean hasQueuedThread(Thread thread) { try { initializeReentrantLock(); return sync.isQueued(thread); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean hasWaiters(IgniteCondition condition) { try { initializeReentrantLock(); AbstractQueuedSynchronizer.ConditionObject c = sync.conditionMap.get(condition.name()); if (c == null) throw new IllegalArgumentException(); return sync.hasWaiters(c); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public int getWaitQueueLength(IgniteCondition condition) { try { initializeReentrantLock(); AbstractQueuedSynchronizer.ConditionObject c = sync.conditionMap.get(condition.name()); if (c == null) throw new IllegalArgumentException(); return sync.getWaitQueueLength(c); } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isFailoverSafe() { try { initializeReentrantLock(); return sync.failoverSafe; } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isFair() { try { initializeReentrantLock(); return sync.fair; } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean isBroken() { try { initializeReentrantLock(); return sync.isBroken(); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public void needCheckNotRemoved() { // no-op } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF(name); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { stash.set(in.readUTF()); } /** * Reconstructs object on unmarshalling. * * @return Reconstructed object. * @throws ObjectStreamException Thrown in case of unmarshalling error. */ private Object readResolve() throws ObjectStreamException { String name = stash.get(); assert name != null; try { IgniteLock lock = IgnitionEx.localIgnite().context().dataStructures().reentrantLock( name, null, false, false, false); if (lock == null) throw new IllegalStateException("Lock was not found on deserialization: " + name); return lock; } catch (IgniteCheckedException e) { throw U.withCause(new InvalidObjectException(e.getMessage()), e); } finally { stash.remove(); } } /** {@inheritDoc} */ @Override public void close() { if (!rmvd) { try { boolean force = sync != null && (sync.isBroken() && !sync.failoverSafe); ctx.kernalContext().dataStructures().removeReentrantLock(name, ctx.group().name(), force); } catch (IgniteCheckedException e) { throw U.convertException(e); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridCacheLockImpl.class, this); } }
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.mediaframeworktest.functional; //import android.content.Resources; import com.android.mediaframeworktest.MediaFrameworkTest; import com.android.mediaframeworktest.MediaNames; import android.content.res.AssetFileDescriptor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.MediaMetadataRetriever; import android.media.MediaPlayer; import android.media.MediaRecorder; import android.os.Looper; import android.os.SystemClock; import android.util.Log; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.io.FileOutputStream; import java.util.Random; /** * Junit / Instrumentation test case for the media player api */ public class CodecTest { private static String TAG = "CodecTest"; private static MediaPlayer mMediaPlayer; private MediaPlayer.OnPreparedListener mOnPreparedListener; private static int WAIT_FOR_COMMAND_TO_COMPLETE = 60000; //1 min max. private static boolean mInitialized = false; private static boolean mPrepareReset = false; private static Looper mLooper = null; private static final Object lock = new Object(); private static final Object prepareDone = new Object(); private static final Object videoSizeChanged = new Object(); private static final Object onCompletion = new Object(); private static boolean onPrepareSuccess = false; public static boolean onCompleteSuccess = false; public static boolean mPlaybackError = false; public static int mMediaInfoUnknownCount = 0; public static int mMediaInfoVideoTrackLaggingCount = 0; public static int mMediaInfoBadInterleavingCount = 0; public static int mMediaInfoNotSeekableCount = 0; public static int mMediaInfoMetdataUpdateCount = 0; public static String printCpuInfo(){ String cm = "dumpsys cpuinfo"; String cpuinfo =null; int ch; try{ Process p = Runtime.getRuntime().exec(cm); InputStream in = p.getInputStream(); StringBuffer sb = new StringBuffer(512); while ( ( ch = in.read() ) != -1 ){ sb.append((char) ch); } cpuinfo = sb.toString(); }catch (IOException e){ Log.v(TAG, e.toString()); } return cpuinfo; } public static int getDuration(String filePath) { Log.v(TAG, "getDuration - " + filePath); MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); mp.prepare(); }catch (Exception e){ Log.v(TAG, e.toString()); } int duration = mp.getDuration(); Log.v(TAG, "Duration " + duration); mp.release(); Log.v(TAG, "release"); return duration; } public static boolean getCurrentPosition(String filePath){ Log.v(TAG, "GetCurrentPosition - " + filePath); int currentPosition = 0; long t1=0; long t2 =0; MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); Log.v(TAG, "start playback"); mp.prepare(); mp.start(); t1=SystemClock.uptimeMillis(); Thread.sleep(10000); mp.pause(); Thread.sleep(MediaNames.PAUSE_WAIT_TIME); t2=SystemClock.uptimeMillis(); }catch (Exception e){ Log.v(TAG, e.toString()); } currentPosition = mp.getCurrentPosition(); mp.stop(); mp.release(); Log.v(TAG, "mp currentPositon = " + currentPosition + " play duration = " + (t2-t1)); //The currentposition should be within 10% of the sleep time //For the very short mp3, it should return the length instead of 10 seconds if (filePath.equals(MediaNames.SHORTMP3)){ if (currentPosition < 1000 ) return true; } if ((currentPosition < ((t2-t1) *1.2)) && (currentPosition > 0)) return true; else return false; } public static boolean seekTo(String filePath){ Log.v(TAG, "seekTo " + filePath); int currentPosition = 0; MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); mp.prepare(); mp.start(); mp.seekTo(MediaNames.SEEK_TIME); Thread.sleep(MediaNames.WAIT_TIME); currentPosition = mp.getCurrentPosition(); }catch (Exception e){ Log.v(TAG, e.getMessage()); } mp.stop(); mp.release(); Log.v(TAG, "CurrentPosition = " + currentPosition); //The currentposition should be at least greater than the 80% of seek time if ((currentPosition > MediaNames.SEEK_TIME *0.8)) return true; else return false; } public static boolean setLooping(String filePath){ int currentPosition = 0; int duration = 0; long t1 =0; long t2 =0; Log.v (TAG, "SetLooping - " + filePath); MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); mp.prepare(); duration = mp.getDuration(); Log.v(TAG, "setLooping duration " + duration); mp.setLooping(true); mp.start(); Thread.sleep(5000); mp.seekTo(duration - 5000); t1=SystemClock.uptimeMillis(); Thread.sleep(20000); t2=SystemClock.uptimeMillis(); Log.v(TAG, "pause"); //Bug# 1106852 - IllegalStateException will be thrown if pause is called //in here //mp.pause(); currentPosition = mp.getCurrentPosition(); Log.v(TAG, "looping position " + currentPosition + "duration = " + (t2-t1)); }catch (Exception e){ Log.v(TAG, "Exception : " + e.toString()); } mp.stop(); mp.release(); //The current position should be within 20% of the sleep time //and should be greater than zero. if ((currentPosition < ((t2-t1-5000)*1.2)) && currentPosition > 0) return true; else return false; } public static boolean pause(String filePath) throws Exception { Log.v(TAG, "pause - " + filePath); boolean misPlaying = true; boolean pauseResult = false; long t1=0; long t2=0; MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filePath); mp.prepare(); int duration = mp.getDuration(); mp.start(); t1=SystemClock.uptimeMillis(); Thread.sleep(5000); mp.pause(); Thread.sleep(MediaNames.PAUSE_WAIT_TIME); t2=SystemClock.uptimeMillis(); misPlaying = mp.isPlaying(); int curPosition = mp.getCurrentPosition(); Log.v(TAG, filePath + " pause currentPositon " + curPosition); Log.v(TAG, "isPlaying "+ misPlaying + " wait time " + (t2 - t1) ); String cpuinfo = printCpuInfo(); Log.v(TAG, cpuinfo); if ((curPosition>0) && (curPosition < ((t2-t1) * 1.3)) && (misPlaying == false)) pauseResult = true; mp.stop(); mp.release(); return pauseResult; } public static void prepareStopRelease(String filePath) throws Exception { Log.v(TAG, "prepareStopRelease" + filePath); MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filePath); mp.prepare(); mp.stop(); mp.release(); } public static void preparePauseRelease(String filePath) throws Exception { Log.v(TAG, "preparePauseRelease" + filePath); MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filePath); mp.prepare(); mp.pause(); mp.release(); } static MediaPlayer.OnVideoSizeChangedListener mOnVideoSizeChangedListener = new MediaPlayer.OnVideoSizeChangedListener() { public void onVideoSizeChanged(MediaPlayer mp, int width, int height) { synchronized (videoSizeChanged) { Log.v(TAG, "sizechanged notification received ..."); videoSizeChanged.notify(); } } }; //Register the videoSizeChanged listener public static int videoHeight(String filePath) throws Exception { Log.v(TAG, "videoHeight - " + filePath); int videoHeight = 0; synchronized (lock) { initializeMessageLooper(); try { lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch(Exception e) { Log.v(TAG, "looper was interrupted."); return 0; } } try { mMediaPlayer.setDataSource(filePath); mMediaPlayer.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mMediaPlayer.setOnVideoSizeChangedListener(mOnVideoSizeChangedListener); synchronized (videoSizeChanged) { try { mMediaPlayer.prepare(); mMediaPlayer.start(); videoSizeChanged.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch (Exception e) { Log.v(TAG, "wait was interrupted"); } } videoHeight = mMediaPlayer.getVideoHeight(); terminateMessageLooper(); } catch (Exception e) { Log.e(TAG, e.getMessage()); } return videoHeight; } //Register the videoSizeChanged listener public static int videoWidth(String filePath) throws Exception { Log.v(TAG, "videoWidth - " + filePath); int videoWidth = 0; synchronized (lock) { initializeMessageLooper(); try { lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch(Exception e) { Log.v(TAG, "looper was interrupted."); return 0; } } try { mMediaPlayer.setDataSource(filePath); mMediaPlayer.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mMediaPlayer.setOnVideoSizeChangedListener(mOnVideoSizeChangedListener); synchronized (videoSizeChanged) { try { mMediaPlayer.prepare(); mMediaPlayer.start(); videoSizeChanged.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch (Exception e) { Log.v(TAG, "wait was interrupted"); } } videoWidth = mMediaPlayer.getVideoWidth(); terminateMessageLooper(); } catch (Exception e) { Log.e(TAG, e.getMessage()); } return videoWidth; } //This also test the streaming video which may take a long //time to start the playback. public static boolean videoSeekTo(String filePath) throws Exception { Log.v(TAG, "videoSeekTo - " + filePath); int currentPosition = 0; int duration = 0; boolean videoResult = false; MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filePath); mp.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mp.prepare(); mp.start(); if (filePath.equals(MediaNames.VIDEO_SHORT_3GP)){ mp.pause(); Thread.sleep(MediaNames.PAUSE_WAIT_TIME); mp.seekTo(0); mp.start(); Thread.sleep(1000); currentPosition = mp.getCurrentPosition(); Log.v(TAG,"short position " + currentPosition); if (currentPosition > 100 ) return true; else return false; } Thread.sleep(5000); duration = mp.getDuration(); Log.v(TAG, "video duration " + duration); mp.pause(); Thread.sleep(MediaNames.PAUSE_WAIT_TIME); mp.seekTo(duration - 20000 ); mp.start(); Thread.sleep(1000); mp.pause(); Thread.sleep(MediaNames.PAUSE_WAIT_TIME); mp.seekTo(duration/2); mp.start(); Thread.sleep(10000); currentPosition = mp.getCurrentPosition(); Log.v(TAG, "video currentPosition " + currentPosition); mp.release(); if (currentPosition > (duration /2 )*0.9) return true; else return false; } public static boolean seekToEnd(String filePath){ Log.v(TAG, "seekToEnd - " + filePath); int duration = 0; int currentPosition = 0; boolean isPlaying = false; MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); Log.v(TAG, "start playback"); mp.prepare(); duration = mp.getDuration(); mp.seekTo(duration - 3000); mp.start(); Thread.sleep(6000); }catch (Exception e){} isPlaying = mp.isPlaying(); currentPosition = mp.getCurrentPosition(); Log.v(TAG, "seekToEnd currentPosition= " + currentPosition + " isPlaying = " + isPlaying); mp.stop(); mp.release(); Log.v(TAG, "duration = " + duration); if (currentPosition < 0.9 * duration || isPlaying) return false; else return true; } public static boolean shortMediaStop(String filePath){ Log.v(TAG, "shortMediaStop - " + filePath); //This test is only for the short media file int duration = 0; int currentPosition = 0; boolean isPlaying = false; MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); Log.v(TAG, "start playback"); mp.prepare(); duration = mp.getDuration(); mp.start(); Thread.sleep(10000); }catch (Exception e){} isPlaying = mp.isPlaying(); currentPosition = mp.getCurrentPosition(); Log.v(TAG, "seekToEnd currentPosition= " + currentPosition + " isPlaying = " + isPlaying); mp.stop(); mp.release(); Log.v(TAG, "duration = " + duration); if (currentPosition > duration || isPlaying) return false; else return true; } public static boolean playToEnd(String filePath){ Log.v(TAG, "shortMediaStop - " + filePath); //This test is only for the short media file int duration = 200000; int updateDuration = 0; int currentPosition = 0; boolean isPlaying = false; MediaPlayer mp = new MediaPlayer(); try{ Thread.sleep(5000); mp.setDataSource(filePath); Log.v(TAG, "start playback"); mp.prepare(); //duration = mp.getDuration(); mp.start(); Thread.sleep(50000); }catch (Exception e){} isPlaying = mp.isPlaying(); currentPosition = mp.getCurrentPosition(); //updateDuration = mp.getDuration(); Log.v(TAG, "seekToEnd currentPosition= " + currentPosition + " isPlaying = " + isPlaying); mp.stop(); mp.release(); //Log.v(TAG, "duration = " + duration); //Log.v(TAG, "Update duration = " + updateDuration); if (currentPosition > duration || isPlaying) return false; else return true; } public static boolean seektoBeforeStart(String filePath){ Log.v(TAG, "seektoBeforeStart - " + filePath); //This test is only for the short media file int duration = 0; int currentPosition = 0; MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); mp.prepare(); duration = mp.getDuration(); mp.seekTo(duration - 10000); mp.start(); currentPosition=mp.getCurrentPosition(); mp.stop(); mp.release(); }catch (Exception e){} if (currentPosition < duration/2) return false; else return true; } public static boolean mediaRecorderRecord(String filePath){ Log.v(TAG, "SoundRecording - " + filePath); //This test is only for the short media file int duration = 0; try{ MediaRecorder mRecorder = new MediaRecorder(); mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); mRecorder.setOutputFile(filePath); mRecorder.prepare(); mRecorder.start(); Thread.sleep(500); mRecorder.stop(); Log.v(TAG, "sound recorded"); mRecorder.release(); }catch (Exception e){ Log.v(TAG, e.toString()); } //Verify the recorded file MediaPlayer mp = new MediaPlayer(); try{ mp.setDataSource(filePath); mp.prepare(); duration = mp.getDuration(); Log.v(TAG,"Duration " + duration); mp.release(); }catch (Exception e){} //Check the record media file length is greate than zero if (duration > 0) return true; else return false; } //Test for mediaMeta Data Thumbnail public static boolean getThumbnail(String filePath, String goldenPath){ Log.v(TAG, "getThumbnail - " + filePath); int goldenHeight = 0; int goldenWidth = 0; int outputWidth = 0; int outputHeight = 0; //This test is only for the short media file try{ BitmapFactory mBitmapFactory = new BitmapFactory(); MediaMetadataRetriever mMediaMetadataRetriever = new MediaMetadataRetriever(); try { mMediaMetadataRetriever.setDataSource(filePath); } catch(Exception e) { e.printStackTrace(); return false; } Bitmap outThumbnail = mMediaMetadataRetriever.getFrameAtTime(-1); //Verify the thumbnail Bitmap goldenBitmap = mBitmapFactory.decodeFile(goldenPath); outputWidth = outThumbnail.getWidth(); outputHeight = outThumbnail.getHeight(); goldenHeight = goldenBitmap.getHeight(); goldenWidth = goldenBitmap.getWidth(); //check the image dimension if ((outputWidth != goldenWidth) || (outputHeight != goldenHeight)) return false; // Check half line of pixel int x = goldenHeight / 2; for (int j = 1; j < goldenWidth / 2; j++) { if (goldenBitmap.getPixel(x, j) != outThumbnail.getPixel(x, j)) { Log.v(TAG, "pixel = " + goldenBitmap.getPixel(x, j)); return false; } } }catch (Exception e){ Log.v(TAG, e.toString()); return false; } return true; } //Load midi file from resources public static boolean resourcesPlayback(AssetFileDescriptor afd, int expectedDuration){ int duration = 0; try{ MediaPlayer mp = new MediaPlayer(); mp.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(), afd.getLength()); mp.prepare(); mp.start(); duration = mp.getDuration(); Thread.sleep(5000); mp.release(); }catch (Exception e){ Log.v(TAG,e.getMessage()); } if (duration > expectedDuration) return true; else return false; } public static boolean prepareAsyncReset(String filePath){ //preparesAsync try{ MediaPlayer mp = new MediaPlayer(); mp.setDataSource(filePath); mp.prepareAsync(); mp.reset(); mp.release(); }catch (Exception e){ Log.v(TAG,e.getMessage()); return false; } return true; } public static boolean isLooping(String filePath) { MediaPlayer mp = null; try { mp = new MediaPlayer(); if (mp.isLooping()) { Log.v(TAG, "MediaPlayer.isLooping() returned true after ctor"); return false; } mp.setDataSource(filePath); mp.prepare(); mp.setLooping(true); if (!mp.isLooping()) { Log.v(TAG, "MediaPlayer.isLooping() returned false after setLooping(true)"); return false; } mp.setLooping(false); if (mp.isLooping()) { Log.v(TAG, "MediaPlayer.isLooping() returned true after setLooping(false)"); return false; } }catch (Exception e){ Log.v(TAG, "Exception : " + e.toString()); return false; } finally { if (mp != null) mp.release(); } return true; } public static boolean isLoopingAfterReset(String filePath) { MediaPlayer mp = null; try { mp = new MediaPlayer(); mp.setDataSource(filePath); mp.prepare(); mp.setLooping(true); mp.reset(); if (mp.isLooping()) { Log.v(TAG, "MediaPlayer.isLooping() returned true after reset()"); return false; } }catch (Exception e){ Log.v(TAG, "Exception : " + e.toString()); return false; } finally { if (mp != null) mp.release(); } return true; } /* * Initializes the message looper so that the mediaPlayer object can * receive the callback messages. */ private static void initializeMessageLooper() { Log.v(TAG, "start looper"); new Thread() { @Override public void run() { // Set up a looper to be used by camera. Looper.prepare(); Log.v(TAG, "start loopRun"); // Save the looper so that we can terminate this thread // after we are done with it. mLooper = Looper.myLooper(); mMediaPlayer = new MediaPlayer(); synchronized (lock) { mInitialized = true; lock.notify(); } Looper.loop(); // Blocks forever until Looper.quit() is called. Log.v(TAG, "initializeMessageLooper: quit."); } }.start(); } /* * Terminates the message looper thread. */ private static void terminateMessageLooper() { mLooper.quit(); mMediaPlayer.release(); } static MediaPlayer.OnPreparedListener mPreparedListener = new MediaPlayer.OnPreparedListener() { public void onPrepared(MediaPlayer mp) { synchronized (prepareDone) { if(mPrepareReset){ Log.v(TAG, "call Reset"); mMediaPlayer.reset(); } Log.v(TAG, "notify the prepare callback"); prepareDone.notify(); onPrepareSuccess = true; } } }; public static boolean prepareAsyncCallback(String filePath, boolean reset) throws Exception { //Added the PrepareReset flag which allow us to switch to different //test case. if (reset){ mPrepareReset = true; } synchronized (lock) { initializeMessageLooper(); try { lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch(Exception e) { Log.v(TAG, "looper was interrupted."); return false; } } try{ mMediaPlayer.setOnPreparedListener(mPreparedListener); mMediaPlayer.setDataSource(filePath); mMediaPlayer.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mMediaPlayer.prepareAsync(); synchronized (prepareDone) { try { prepareDone.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch (Exception e) { Log.v(TAG, "wait was interrupted."); } } terminateMessageLooper(); }catch (Exception e){ Log.v(TAG,e.getMessage()); } return onPrepareSuccess; } static MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() { public void onCompletion(MediaPlayer mp) { synchronized (onCompletion) { Log.v(TAG, "notify the completion callback"); onCompletion.notify(); onCompleteSuccess = true; } } }; static MediaPlayer.OnErrorListener mOnErrorListener = new MediaPlayer.OnErrorListener() { public boolean onError(MediaPlayer mp, int framework_err, int impl_err) { mPlaybackError = true; mp.reset(); return true; } }; static MediaPlayer.OnInfoListener mInfoListener = new MediaPlayer.OnInfoListener() { public boolean onInfo(MediaPlayer mp, int what, int extra) { switch (what){ case MediaPlayer.MEDIA_INFO_UNKNOWN: mMediaInfoUnknownCount++; break; case MediaPlayer.MEDIA_INFO_VIDEO_TRACK_LAGGING: mMediaInfoVideoTrackLaggingCount++; break; case MediaPlayer.MEDIA_INFO_BAD_INTERLEAVING: mMediaInfoBadInterleavingCount++; break; case MediaPlayer.MEDIA_INFO_NOT_SEEKABLE: mMediaInfoNotSeekableCount++; break; case MediaPlayer.MEDIA_INFO_METADATA_UPDATE: mMediaInfoMetdataUpdateCount++; break; } return true; } }; // For each media file, forward twice and backward once, then play to the end public static boolean playMediaSamples(String filePath) throws Exception { int duration = 0; int curPosition = 0; int nextPosition = 0; int waittime = 0; onCompleteSuccess = false; mMediaInfoUnknownCount = 0; mMediaInfoVideoTrackLaggingCount = 0; mMediaInfoBadInterleavingCount = 0; mMediaInfoNotSeekableCount = 0; mMediaInfoMetdataUpdateCount = 0; mPlaybackError = false; String testResult; initializeMessageLooper(); synchronized (lock) { try { lock.wait(WAIT_FOR_COMMAND_TO_COMPLETE); } catch(Exception e) { Log.v(TAG, "looper was interrupted."); return false; } } try { mMediaPlayer.setOnCompletionListener(mCompletionListener); mMediaPlayer.setOnErrorListener(mOnErrorListener); mMediaPlayer.setOnInfoListener(mInfoListener); Log.v(TAG, "playMediaSamples: sample file name " + filePath); mMediaPlayer.setDataSource(filePath); mMediaPlayer.setDisplay(MediaFrameworkTest.mSurfaceView.getHolder()); mMediaPlayer.prepare(); duration = mMediaPlayer.getDuration(); // start to play mMediaPlayer.start(); waittime = duration - mMediaPlayer.getCurrentPosition(); synchronized(onCompletion){ try { onCompletion.wait(waittime + 2000); }catch (Exception e) { Log.v(TAG, "playMediaSamples are interrupted"); return false; } } terminateMessageLooper(); }catch (Exception e) { Log.v(TAG, "playMediaSamples:" + e.getMessage()); } return onCompleteSuccess; } }
package Java.org.network.mana.mana_components; import Java.org.network.mana.base_components.neurons.Neuron; import Java.org.network.mana.base_components.sparse.InterleavedSparseAddOn; import Java.org.network.mana.base_components.sparse.InterleavedSparseMatrix; import Java.org.network.mana.base_components.sparse.SrcTarDataPack; import Java.org.network.mana.base_components.sparse.SrcTarPair; import Java.org.network.mana.base_components.synapses.ConnectSpecs; import Java.org.network.mana.base_components.synapses.STDP; import Java.org.network.mana.base_components.synapses.ShortTermPlasticity; import Java.org.network.mana.enums.ConnectRule; import Java.org.network.mana.enums.Ordering; import Java.org.network.mana.enums.SynapseType; import Java.org.network.mana.globals.Default_Parameters; import Java.org.network.mana.utils.BufferedDoubleArray; import Java.org.network.mana.utils.Utils; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.ThreadLocalRandom; public class MANAMatrix { /** * Interleaved in the following way { weights, dw } contains weight information. */ protected InterleavedSparseMatrix weightsTOrd; /** * Interleaved in the following way { delay, lastArr, U, D, F, u, R }, and arranged in * source-major order (data pertaining to target neurons are contiguous and ordered). Contains * all the necessary information to calculate UDF and spike arrivals. */ protected InterleavedSparseMatrix outDataSOrd; /** An addon set of values containing more target ordered data. */ protected InterleavedSparseAddOn tOrdLastArrivals; /** * A Map from source to target, specifically if you lookup the values when * source ordered the integer in this at the same coordinate gives the location * of the value when target ordered. Traverse this lookup map to rearrange a * source-ordered set into a target ordered one. It has #synapses number of elements * and for accesses must be multipled by nilFac to get the absolute index. */ protected int[] srcToTargLookup; public final SynapseType type; private int nnz; public final int noSrc; public final int noTar; public final Neuron src; public final MANANeurons tar; /** * Makes a MANAMatrix from the COO ordered tuples of all synapse values... * @param cooMat * @param src * @param tar */ public MANAMatrix(COOManaMat cooMat, Neuron src, MANANeurons tar) { //Collections.sort(cooMat.data, Ordering.orderTypeTupleComp(Ordering.SOURCE)); noSrc = src.getSize(); noTar = tar.getSize(); this.src = src; this.tar = tar; type = SynapseType.getSynType(src.isExcitatory(), tar.isExcitatory()); int [] targRange = {0, cooMat.tarILF}; weightsTOrd = new InterleavedSparseMatrix(cooMat.data, targRange, noTar, noSrc, //offsetTar, offsetSrc, Ordering.TARGET); tOrdLastArrivals = new InterleavedSparseAddOn(weightsTOrd, 1); // pfrBuffers = new InterleavedSparseAddOn(weightsTOrd, 1); int cnt=0; for(SrcTarDataPack tup : cooMat.data) { tOrdLastArrivals.values[cnt] = tup.values[tup.values.length-2]; // Assign the now TARGET ORDERED cooMat.data linear (target ordered) indices tup.values[tup.values.length-1] = cnt++; } int [] srcRange = {cooMat.tarILF, cooMat.tarILF+cooMat.srcILF}; // This will source order sort cooMat.data! // So now the target ordered linear indices will be in source order... outDataSOrd = new InterleavedSparseMatrix(cooMat.data, srcRange, noSrc, noTar, //offsetSrc, offsetTar, Ordering.SOURCE); srcToTargLookup = new int[cooMat.data.size()]; cnt = 0; // Copy the target linear indices that are source ordered to our lookup table for(SrcTarDataPack tup : cooMat.data) { srcToTargLookup[cnt++] = (int) tup.values[tup.values.length-1]; } nnz = weightsTOrd.getNnz(); } /** * Outgoing data in {@link #outDataSOrd} is interleaved in the following way * { delay, lastArr, U, D, F, u, R }, while {@link #weightsTOrd} is interleaved as * { weight, dw }. * Using a lot of defaults * @param src * @param tar * @param maxDist * @param maxDly */ public MANAMatrix(Neuron src, MANANeurons tar, double maxDist, double maxDly, ConnectSpecs cSpecs) { this.src = src; this.tar = tar; List<SrcTarDataPack> targCOOTup = new LinkedList<>(); List<SrcTarDataPack> srcCOOTup = new LinkedList<>(); type = SynapseType.getSynType(src.isExcitatory(), tar.isExcitatory()); this.noSrc = src.getSize(); this.noTar = tar.N; System.out.println(); for (int ii=0; ii<src.getSize(); ii++) { // boolean skp = true; for(int jj=0; jj<tar.N; jj++) { // System.out.print(" "); if (src == tar && ii==jj) { // System.out.print("."); continue; } if (cSpecs.rule == ConnectRule.Random) { if (ThreadLocalRandom.current().nextDouble() >= cSpecs.parms[0]) { // TODO: This isn't how this is used... do something _NOT_ dumb with prob dists... // System.out.print("."); continue; } } else if (cSpecs.rule == ConnectRule.Distance) { double dist = Utils.euclidean(src.getCoordinates(false)[ii], tar.getCoordinates(false)[jj]); double cProb = cSpecs.parms[0] * Math.exp(-((dist*dist)/(cSpecs.parms[1] * cSpecs.parms[1]))); if (ThreadLocalRandom.current().nextDouble() >= cProb) { // System.out.print("."); continue; } } else if (cSpecs.rule == ConnectRule.Distance2) { double dist = Utils.euclidean(src.getCoordinates(false)[ii], tar.getCoordinates(false)[jj]); int loc; if(src.isExcitatory()) { if(tar.isExcitatory()) { loc = 0; } else { loc = 1; } } else { if(tar.isExcitatory()) { loc = 2; } else { loc = 3; } } if(dist > cSpecs.parms[loc] || ThreadLocalRandom.current().nextDouble() > cSpecs.parms[cSpecs.parms.length-1]) { continue; } } // System.out.print("X"); SrcTarPair coo = new SrcTarPair(ii, jj); // w, dw double[] tarData = {cSpecs.getNewWt(), 0}; SrcTarDataPack tarDatPack = new SrcTarDataPack(coo, tarData); double[] srcData = new double[7]; // Outbound values... delay, lastArr, U, D, F, u, R // Set Short term plasticity/UDF parameters -- occupies indices 2-6 ShortTermPlasticity.setSourceDefaults(srcData, 0, type); // Calculate delay from distance srcData[0] = Default_Parameters.dt * (int)(maxDly * Utils.euclidean(src.getCoordinates(false)[ii], tar.getCoordinates(false)[jj])/(maxDist* Default_Parameters.dt)); SrcTarDataPack srcDatPack = new SrcTarDataPack(coo, srcData); targCOOTup.add(tarDatPack); srcCOOTup.add(srcDatPack); } // System.out.println(" "); } int[] tRange = {0, 2}; int[] sRange = {0, 7}; weightsTOrd = new InterleavedSparseMatrix(targCOOTup, tRange, src.getSize(), tar.N, //offsetSrc, offsetTar, Ordering.TARGET); outDataSOrd = new InterleavedSparseMatrix(srcCOOTup, sRange, tar.N, src.getSize(), //offsetTar, offsetSrc, Ordering.SOURCE); nnz = weightsTOrd.getNnz(); tOrdLastArrivals = new InterleavedSparseAddOn(weightsTOrd, 1); //pfrBuffers = new InterleavedSparseAddOn(weightsTOrd, 1); assert(nnz == outDataSOrd.getNnz()); srcToTargLookup = Utils.getSortKey(targCOOTup, Ordering.orderTypeTupleComp(Ordering.SOURCE)); } /** * At the moment a source neuron spikes calculates the UDF-psp contribution * and sets the outgoing last arrival time to when that spike will arrive given * the synapse. Does this for all local outgoing synapses from the source neuron * @param noSrc * @param time */ public void calcSpikeResponses(int noSrc, double time) { int start = outDataSOrd.getStartIndex(noSrc); int end = outDataSOrd.getEndIndex(noSrc); for(int ii=start; ii<end; ii+=outDataSOrd.getInc()) { ShortTermPlasticity.getPSR_UDF(ii, time, outDataSOrd.getRawData()); // calculate UDF outDataSOrd.getRawData()[ii+1] = outDataSOrd.getRawData()[ii]+time; // Sets the new last arrival time to when this calcSpikeResponses will arrive. } } /** * Based on their arrival times, adds event data (what is necessary * to know when and where a spike will arrive and how much of a contribution it'll make). Performs * this for all local outgoing synapses from a given neuron. This function directly populates * the event queue and therefor performs all the necessary event encoding. * Order for events is: {arrTime, rel tar ind, udfMultiplier, abs tar ind} with udfMultiplier being * a float represented as int bits. * @param noSrc index of the source neuron * @param time simulation clock * @param dt integration time step * @param eventQ the node-local synaptic event queue */ public void addEvents(int noSrc, double time, double dt, PriorityBlockingQueue<int []> eventQ) { int start = outDataSOrd.getStartIndex(noSrc); int end = outDataSOrd.getEndIndex(noSrc); int inc = outDataSOrd.getInc(); double [] vals = outDataSOrd.getRawData(); try { for (int ii = start; ii < end; ii += inc) { int[] evt = new int[6]; evt[0] = (int) ((time + vals[ii]) / dt); evt[1] = srcToTargLookup[ii/inc] * weightsTOrd.getInc(); evt[2] = Float.floatToIntBits((float) (10 * vals[ii + inc - 1] * vals[ii + inc - 2])); if(Float.intBitsToFloat(evt[2]) > 200) { throw new IllegalStateException("Unusual UDF Response"); } evt[3] = outDataSOrd.getRawOrdIndices()[ii / inc]; evt[4] = noSrc; evt[5] = SrcTarPair.hashCodeGen(noSrc, evt[3]); eventQ.add(evt); } } catch (Exception e) { e.printStackTrace(); } } /** * Processes synaptic events that is, queued spikes which have an arrival time, destination, * and which contribute a specific current value. Perform STDP and add the PSP to an array * meant to contain the local total incoming currents to each target neuron. * @param eventQ the node-local event queue of calcSpikeResponses events to be processed, events are stored as integer arrays * {arrivalTime/dt, absolute index, * post synaptic response (float encoded in int bits), target number} * @param incCur the local incoming total currents to each target neuron * @param stdpRule the STDP rule used to perform STDP * @param lastSpkTimes the last time each post synaptic cell spiked. * @param time current time * @param dt simulation delta t */ public void processEventsSTDP(PriorityBlockingQueue<int[]> eventQ, double[] incCur, STDP stdpRule, BufferedDoubleArray lastSpkTimes, double time, double dt) { int [] event = null; try { while (!eventQ.isEmpty() && eventQ.peek()[0] * dt <= time) { event = eventQ.poll(); int ind; if(event[event.length-1] == -1) { // invalidated ind = weightsTOrd.find(event[3], event[4]); } else { ind = event[1]; } if(ind==-1) { System.out.println("Apparently there's no synapse?"); } incCur[event[3]] += weightsTOrd.getRawData()[ind] * Float.intBitsToFloat(event[2]); stdpRule.preTriggered(weightsTOrd, event, lastSpkTimes, dt); // TODO: Fix the event thing to make it not dependent on increment in wts mat, so that callers can apply their own offsets without having to know wts tOrdLastArrivals.setValue(ind/2, time, 0); } } catch (Exception e) { e.printStackTrace(); } } /** * Processes synapse events i.e. looks in the event queue for all events that arrive (or should have arrived) * at this time (given the time of the last pre-synaptic spike and the delay of the synapse) * and removed them from the event queue. Each event is then processed: meaning that the * appropriate amount of current (the current from the event is stored in the queue) is deposited * on the appropriate target neuron. * * @param eventQ the node local queue containing all synaptic events. * @param incCur the incoming currents to each of the neurons (local to a node) where each synapse's contribution is stored * @param time current simulation clock * @param dt integration time step */ public void processEvents(PriorityBlockingQueue<int[]> eventQ, double[] incCur, double time, double dt) { int [] event = null; try { while (!eventQ.isEmpty() && eventQ.peek()[0] * dt <= time) { event = eventQ.poll(); incCur[event[3]] += weightsTOrd.getRawData()[event[1]] * Float.intBitsToFloat(event[2]); // TODO: Fix the event thing to make it not dependent on increment in wts mat, so that callers can apply their own offsets without having to know wts tOrdLastArrivals.setValue(event[1]/2, time, 0); } } catch (Exception e) { e.printStackTrace(); } } public void inDegrees(final int[] inD) { for(int ii=0; ii<noTar; ++ii) { inD[ii] += weightsTOrd.getRawPtrs()[ii+1] - weightsTOrd.getRawPtrs()[ii]; } } public void outDegrees(final int[] oD) { for(int ii=0; ii<weightsTOrd.getNnz(); ++ii) { oD[weightsTOrd.getRawOrdIndices()[ii]]++; } } // public void normWts(double[] normVals) { // weightsTOrd.mulFromArray(normVals, 0); // } public void scaleWts(double[] normVals) { weightsTOrd.mulFromArray(normVals, 0); } public void rightDivWts(double [] div) { weightsTOrd.divFromArray(div, 0); } public void scaleWeights(int noTar, double scale) { weightsTOrd.scaleMajor(noTar, scale, 0); } public double getIncomingSum(int noTar) { return weightsTOrd.getMajorSum(noTar, 0); } public void updateWeights() { weightsTOrd.addDw2W(); } public double[] calcAndGetSums(double[] localWtSums) { weightsTOrd.sumIncoming(localWtSums, 0); return localWtSums; } public double getMaxWeight() { return weightsTOrd.getMax(0); } public void mhpStage1() { } public InterleavedSparseMatrix getWeightsTOrd() { return weightsTOrd; } public InterleavedSparseAddOn gettOrdLastArrivals() { return tOrdLastArrivals; } public static void main(String [] args) { int numN = 10; MANANeurons src = new MANANeurons(numN, true, Utils.getUniformRandomArray(numN, 0, 100), Utils.getUniformRandomArray(numN, 0, 100), Utils.getUniformRandomArray(numN, 0, 100)); MANANeurons tar = new MANANeurons(numN, true, Utils.getUniformRandomArray(numN, 0, 100), Utils.getUniformRandomArray(numN, 0, 100), Utils.getUniformRandomArray(numN, 0, 100)); ConnectSpecs cSpecs = new ConnectSpecs(ConnectRule.Random, new double[]{0.2}, Math.sqrt(30000), 20); MANAMatrix mm = new MANAMatrix(src, tar, Math.sqrt(30000), 20, cSpecs); double[] rawVals = mm.weightsTOrd.getRawData(); for(int ii=0; ii<rawVals.length; ++ii) { rawVals[ii] = Math.ceil( Math.random() * 10); } System.out.println(Arrays.toString(mm.weightsTOrd.getPtrs())); System.out.println(); System.out.println(Arrays.toString(mm.weightsTOrd.getOrdIndices())); System.out.println(); System.out.println(Arrays.toString(mm.weightsTOrd.getValues())); int[] srcInd = new int[mm.nnz]; int[] tarInd = new int[mm.nnz]; double [] vals = new double[mm.nnz]; mm.weightsTOrd.getInCOO(srcInd, tarInd, vals, 0); double[][] mat = new double[numN][numN]; int[][] matC = new int[numN][numN]; for(int ii=0; ii<mm.nnz; ++ii) { mat[srcInd[ii]][tarInd[ii]] = vals[ii]; matC[srcInd[ii]][tarInd[ii]]++; } System.out.println(); for(int ii=0; ii<numN; ++ii) { System.out.println(Arrays.toString(mat[ii])); } System.out.println(); for(int ii=0; ii<numN; ++ii) { System.out.println(Arrays.toString(matC[ii])); } double[] sus = new double[10]; mm.calcAndGetSums(sus); System.out.println(); System.out.println(Arrays.toString(sus)); for(int ii=0; ii<numN; ++ii) { mm.scaleWeights(ii, 1/sus[ii]); } System.out.println(); System.out.println(Arrays.toString(mm.weightsTOrd.getPtrs())); System.out.println(); System.out.println(Arrays.toString(mm.weightsTOrd.getOrdIndices())); System.out.println(); System.out.println(Arrays.toString(mm.weightsTOrd.getValues())); srcInd = new int[mm.nnz]; tarInd = new int[mm.nnz]; vals = new double[mm.nnz]; mm.weightsTOrd.getInCOO(srcInd, tarInd, vals, 0); mat = new double[numN][numN]; matC = new int[numN][numN]; for(int ii=0; ii<mm.nnz; ++ii) { mat[srcInd[ii]][tarInd[ii]] = vals[ii]; matC[srcInd[ii]][tarInd[ii]]++; } System.out.println(); for(int ii=0; ii<numN; ++ii) { System.out.println(Arrays.toString(mat[ii])); } COOManaMat coomana = new COOManaMat(mm, Ordering.TARGET); MANAMatrix convMana = new MANAMatrix(coomana, src, tar); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getPtrs())); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getOrdIndices())); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getValues())); srcInd = new int[convMana.nnz]; tarInd = new int[convMana.nnz]; vals = new double[convMana.nnz]; convMana.weightsTOrd.getInCOO(srcInd, tarInd, vals, 0); mat = new double[numN][numN]; matC = new int[numN][numN]; for(int ii=0; ii<mm.nnz; ++ii) { mat[srcInd[ii]][tarInd[ii]] = vals[ii]; matC[srcInd[ii]][tarInd[ii]]++; } System.out.println(); for(int ii=0; ii<numN; ++ii) { System.out.println(Arrays.toString(mat[ii])); } coomana.data.removeFirst(); coomana.data.remove(4); convMana = new MANAMatrix(coomana, src, tar); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getPtrs())); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getOrdIndices())); System.out.println(); System.out.println(Arrays.toString(convMana.weightsTOrd.getValues())); System.out.println(); srcInd = new int[convMana.nnz]; tarInd = new int[convMana.nnz]; vals = new double[convMana.nnz]; convMana.weightsTOrd.getInCOO(srcInd, tarInd, vals, 0); mat = new double[numN][numN]; matC = new int[numN][numN]; for(int ii=0; ii<convMana.nnz; ++ii) { mat[srcInd[ii]][tarInd[ii]] = vals[ii]; matC[srcInd[ii]][tarInd[ii]]++; } for(int ii=0; ii<numN; ++ii) { System.out.println(Arrays.toString(mat[ii])); } // TODO: Actually use JUnit instead of being lazy... so lazy! System.out.println("Dummy. It's a place for a breakpoint! :D"); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.editor; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.editorActions.CopyPastePreProcessor; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.CharFilter; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiWhiteSpace; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.PythonFileType; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.psi.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; /** * User : catherine */ public class PythonCopyPasteProcessor implements CopyPastePreProcessor { @Nullable @Override public String preprocessOnCopy(PsiFile file, int[] startOffsets, int[] endOffsets, String text) { return null; } @Override public String preprocessOnPaste(Project project, PsiFile file, Editor editor, String text, RawText rawText) { if (!CodeInsightSettings.getInstance().INDENT_TO_CARET_ON_PASTE || file.getLanguage() != PythonLanguage.getInstance()) { return text; } final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getSettings(project); final boolean useTabs = codeStyleSettings.useTabCharacter(PythonFileType.INSTANCE); final int indentSize = codeStyleSettings.getIndentSize(PythonFileType.INSTANCE); CharFilter NOT_INDENT_FILTER = new CharFilter() { public boolean accept(char ch) { return useTabs? ch != '\t' : ch != ' '; } }; final String indentChar = useTabs ? "\t" : " "; final CaretModel caretModel = editor.getCaretModel(); final SelectionModel selectionModel = editor.getSelectionModel(); final Document document = editor.getDocument(); final int caretOffset = selectionModel.getSelectionStart() != selectionModel.getSelectionEnd() ? selectionModel.getSelectionStart() : caretModel.getOffset(); final int lineNumber = document.getLineNumber(caretOffset); final int lineStartOffset = getLineStartSafeOffset(document, lineNumber); final int lineEndOffset = document.getLineEndOffset(lineNumber); final PsiElement element = file.findElementAt(caretOffset); if (PsiTreeUtil.getParentOfType(element, PyStringLiteralExpression.class) != null) return text; text = addLeadingSpaces(text, NOT_INDENT_FILTER, indentSize, indentChar); int firstLineIndent = StringUtil.findFirst(text, NOT_INDENT_FILTER); final String indentText = getIndentText(file, document, caretOffset, lineNumber, firstLineIndent); int toRemove = calculateIndentToRemove(text, NOT_INDENT_FILTER); final String toString = document.getText(TextRange.create(lineStartOffset, lineEndOffset)); if (StringUtil.isEmptyOrSpaces(indentText) && isApplicable(file, text, caretOffset)) { caretModel.moveToOffset(lineStartOffset); if (StringUtil.isEmptyOrSpaces(toString)) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { document.deleteString(lineStartOffset, lineEndOffset); } }); } editor.getSelectionModel().setSelection(lineStartOffset, selectionModel.getSelectionEnd()); } final List<String> strings = StringUtil.split(text, "\n", false); String newText = ""; if (StringUtil.isEmptyOrSpaces(indentText)) { for (String s : strings) { newText += indentText + StringUtil.trimStart(s, StringUtil.repeat(indentChar, toRemove)); } } else { newText = text; } if (addLinebreak(text, toString, useTabs) && selectionModel.getSelectionStart() == selectionModel.getSelectionEnd()) newText += "\n"; return newText; } private static String addLeadingSpaces(String text, final CharFilter filter, int indentSize, String indentChar) { final List<String> strings = StringUtil.split(text, "\n", false); if (strings.size() > 1) { int firstLineIndent = StringUtil.findFirst(strings.get(0), filter); int secondLineIndent = StringUtil.findFirst(strings.get(1), filter); final int diff = secondLineIndent - firstLineIndent; if (diff > indentSize) { text = StringUtil.repeat(indentChar, diff - indentSize) + text; } } return text; } private static String getIndentText(@NotNull final PsiFile file, @NotNull final Document document, int caretOffset, int lineNumber, int firstLineIndent) { PsiElement nonWS = PyUtil.findNextAtOffset(file, caretOffset, PsiWhiteSpace.class); if (nonWS != null) { final IElementType nonWSType = nonWS.getNode().getElementType(); if (nonWSType == PyTokenTypes.ELSE_KEYWORD || nonWSType == PyTokenTypes.ELIF_KEYWORD || nonWSType == PyTokenTypes.EXCEPT_KEYWORD || nonWSType == PyTokenTypes.FINALLY_KEYWORD) { lineNumber -= 1; nonWS = PyUtil.findNextAtOffset(file, getLineStartSafeOffset(document, lineNumber), PsiWhiteSpace.class); } } int lineStartOffset = getLineStartSafeOffset(document, lineNumber); String indentText = document.getText(TextRange.create(lineStartOffset, caretOffset)); if (nonWS != null && document.getLineNumber(nonWS.getTextOffset()) == lineNumber) { indentText = document.getText(TextRange.create(lineStartOffset, nonWS.getTextOffset())); } else if (caretOffset == lineStartOffset) { final PsiElement ws = file.findElementAt(lineStartOffset); if (ws != null) { final String wsText = ws.getText(); final List<String> strings = StringUtil.split(wsText, "\n"); if (strings.size() >= 1) { indentText = strings.get(0); } } if (indentText.length() == firstLineIndent) return ""; } return indentText; } private static int calculateIndentToRemove(@NotNull String text, @NotNull final CharFilter filter) { final List<String> strings = StringUtil.split(text, "\n", false); int minIndent = StringUtil.findFirst(text, filter); for (String s : strings) { final int indent = StringUtil.findFirst(s, filter); if (indent < minIndent && !StringUtil.isEmptyOrSpaces(s)) minIndent = indent; } return minIndent; } private static boolean isApplicable(@NotNull final PsiFile file, @NotNull String text, int caretOffset) { final boolean useTabs = CodeStyleSettingsManager.getSettings(file.getProject()).useTabCharacter(PythonFileType.INSTANCE); final PsiElement nonWS = PyUtil.findNextAtOffset(file, caretOffset, PsiWhiteSpace.class); if (nonWS == null || text.endsWith("\n")) return true; if (inStatementList(file, caretOffset) && (text.startsWith(useTabs ? "\t" : " ") || StringUtil.split(text, "\n").size() > 1)) return true; return false; } private static boolean inStatementList(@NotNull final PsiFile file, int caretOffset) { final PsiElement element = file.findElementAt(caretOffset); return PsiTreeUtil.getParentOfType(element, PyStatementList.class) != null || PsiTreeUtil.getParentOfType(element, PyFunction.class) != null || PsiTreeUtil.getParentOfType(element, PyClass.class) != null; } private static boolean addLinebreak(@NotNull String text, @NotNull String toString, boolean useTabs) { if ((text.startsWith(useTabs ? "\t" : " ") || StringUtil.split(text, "\n").size() > 1) && !text.endsWith("\n") && !StringUtil.isEmptyOrSpaces(toString)) return true; return false; } public static int getLineStartSafeOffset(final Document document, int line) { if (line == document.getLineCount()) return document.getTextLength(); if (line < 0) return 0; return document.getLineStartOffset(line); } }
package org.cipres.treebase.util; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.lang.Thread.UncaughtExceptionHandler; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.apache.log4j.Logger; import org.cipres.treebase.ContextManager; import org.cipres.treebase.domain.admin.User; import org.cipres.treebase.domain.matrix.Matrix; import org.cipres.treebase.domain.nexus.NexusDataSet; import org.cipres.treebase.domain.study.Study; import org.cipres.treebase.domain.study.Submission; import org.cipres.treebase.domain.study.UploadFileResult; import org.cipres.treebase.domain.taxon.TaxonLabel; import org.hibernate.Session; import org.hibernate.Transaction; /** * BulkUpload.java * * Created on Jun 6, 2006 * * @author mjd * */ public class BulkUpload extends AbstractStandalone implements BulkUploadInterface { @SuppressWarnings(value = "unused") private static final Logger LOGGER = Logger.getLogger(BulkUpload.class); private MJDLogger mjdLog = null; private boolean force = false; /** * Constructor. Need default constructor for Spring container. */ public BulkUpload() { super(); } @SuppressWarnings(value = { "unused" }) private class TrivialExceptionHandler implements UncaughtExceptionHandler { public void uncaughtException(Thread t, Throwable e) { e.printStackTrace(); // Maybe explicitly shut down mesquite here // TODO: A much better implementation would be to destroy and build // the required objects and continue gracefully System.exit(1); } } /** * * @see org.springframework.web.servlet.mvc.BulkUpload#onSubmit(javax.servlet.http.HttpServletRequest, * javax.servlet.http.HttpServletResponse, java.lang.Object, * org.springframework.validation.BindException) */ public static void main(String[] args) { setupContext(); BulkUpload bu = new BulkUpload(); Thread.setDefaultUncaughtExceptionHandler(bu.new TrivialExceptionHandler()); BulkUploadInterface me = (BulkUploadInterface) ContextManager.getBean("bulkUpload"); me.doIt(args); } public void doIt(String[] args) { GetOpts<UnixOptions> go = new GetOpts<UnixOptions> (new UnixOptions("fl:")); UnixOptions opts = go.getOpts(args); setForce(opts.getBoolOpt("f")); if (opts.hasOpt("l")) { String logFileName = opts.getStringOpt("l"); try { setLogger(new PrintStream(new File(logFileName))); } catch (FileNotFoundException e) { die("Log file '" + logFileName + "' not found"); } } else { setLogger(System.err); } for (String arg : go.getArgs()) { try { doFileOrDirectory(arg); } catch (IOException e) { warn("I/O error processing file '" + arg + "': " + e.getMessage()); } } System.exit(0); } public void doFileOrDirectory(String fileOrDirectoryName) throws IOException { File fileOrDirectory = new File(fileOrDirectoryName); if (fileOrDirectory.isDirectory()) { log("Processing directory '" + fileOrDirectoryName + "'"); doFiles(fileOrDirectory.listFiles()); } else if (fileOrDirectory.isFile()) { doFiles(fileOrDirectory); } else if (! fileOrDirectory.exists()) { log("File '" + fileOrDirectoryName + "' does not exist; skipping"); } else { log("File '" + fileOrDirectoryName + "' exists, but is not a file or a directory; skipping"); } } public void doFiles(File... files) throws IOException { List<File> undoneFiles = new LinkedList<File> (); for (File f : files) { String fn = f.getAbsolutePath(); if (didFile(fn)) { log("Did file " + fn + " already; skipping"); continue; } else { log("Doing file " + fn + " in this batch"); } undoneFiles.add(f); } if (undoneFiles.isEmpty()) { if (files.length > 1) log("Files in this batch are already done"); return; } else log("Doing " + undoneFiles.size() + "/" + files.length + " files in this batch."); User submitter = null; // No submitter //Study s = bulkUpload.addFileSimple(new File(dirName, fn), submitter, session); //Study s = addFilesSimple(undoneFiles, submitter, null); //VG 2010-02-18 -- see comment at the method Study s = addFilesOneByOne(undoneFiles, submitter); if (s == null) { log("conversion of files failed"); } else { log("conversion of files successful, saved nexus files as study id=" + s.getId() + " submission id=" + s.getSubmission().getId()); } } private boolean didFile(String fn) { // Force processing of this file, if requested if (this.force) return false; else return DidNexusFile.didFile(fn); } void log(String s) throws IOException { this.mjdLog.write(s); } void flush() { ContextManager.getTaxonLabelHome().flush(); } //VG this is not used: the only use is commented out Study addFileSimple(File f, User aUser, Session session) throws IOException { List<File> theFile = new LinkedList<File> (); theFile.add(f); return addFilesSimple(theFile, aUser, session); } //VG 2010-02-18 stopped using this method //This method commits matrices, but not trees or the files. //Uncommenting the commitTransaction() call does not help much, //since then all trees and files are bundled into a huge transaction, //where a single failure scraps uploads of all prior trees and files (but not matrices!) public Study addFilesSimple(List<File> files, User aUser, Session session) throws IOException { //Transaction t = session.beginTransaction(); Submission sub = getPseudoSubmission(); Study study = sub.getStudy(); log("submission ID = " + sub.getId() + " study ID " + study.getId()); UploadFileResult uploadResult = ContextManager.getSubmissionService().addNexusFilesJDBC(sub, files, null); // commitTransaction(); log("added " + uploadResult.getMatrixCount() + " matrices and " + uploadResult.getTreeCount() + " trees."); return study; } //VG 2010-02-18 This fixes the deficiency of addFilesSimple by committing each file upload. public Study addFilesOneByOne(List<File> files, User aUser) throws IOException { Submission sub = getPseudoSubmission(); Study study = sub.getStudy(); log("submission ID = " + sub.getId() + " study ID " + study.getId()); int countMatrices = 0, countTrees = 0; for (File f : files) { beginTransaction(); List<File> theFile = new LinkedList<File> (); theFile.add(f); UploadFileResult uploadResult = ContextManager.getSubmissionService().addNexusFilesJDBC(sub, theFile, null); commitTransaction(); log("added " + uploadResult.getMatrixCount() + " matrices and " + uploadResult.getTreeCount() + " trees."); countMatrices += uploadResult.getMatrixCount(); countTrees += uploadResult.getTreeCount(); } log("Added in his batch: " + countMatrices + " matrices, " + countTrees + " trees."); return study; } private static Submission getPseudoSubmission() { // return PseudoSubmission.getPseudoSubmission("TEST"); //VG2010-03-05 Changed to a more appropriate name return PseudoSubmission.getPseudoSubmission("UPLOAD"); } private class MJDLogger extends PrintStream { public MJDLogger(String filename) throws IOException { this(new FileOutputStream(filename)); } public MJDLogger(OutputStream stream) { super(stream); } public void write(String s) throws IOException { long now = System.currentTimeMillis(); super.print(String.format("%tc ", now)); super.print(s + "\n"); super.flush(); } } public boolean getForce() { return force; } public void setForce(boolean b) { force = b; } public PrintStream getLogger() { return mjdLog; } public void setLogger(PrintStream logger) { this.mjdLog = new MJDLogger(logger); } }
package lx.af.demo.activity.DemoFrame; import android.os.Bundle; import android.support.v4.view.ViewPager; import android.view.View; import android.widget.ListView; import android.widget.TextView; import com.viewpagerindicator.PageIndicator; import java.util.ArrayList; import java.util.List; import butterknife.ButterKnife; import butterknife.BindView; import lx.af.demo.R; import lx.af.demo.adapter.ImagePagerAdapter; import lx.af.demo.adapter.PostListAdapter; import lx.af.demo.base.ActionBar; import lx.af.demo.base.BaseActivity; import lx.af.demo.model.PostModel; import lx.af.manager.GlobalThreadManager; import lx.af.test.TestImageHelper; import lx.af.utils.ViewUtils.ActionBarScrollFadeHelper; import lx.af.utils.ViewUtils.BufferedOnClickListener; import lx.af.utils.ViewUtils.ViewPagerAutoFlipper; import lx.af.view.SwipeRefresh.SwipeRefreshLayout; import lx.af.view.SwipeRefresh.SwipeRefreshListLayout; import lx.af.widget.LoadingBkgView; import lx.af.widget.iconify.widget.IconTextView; /** * author: lx * date: 15-12-15 */ public class PostListActivity extends BaseActivity implements SwipeRefreshLayout.OnRefreshListener, SwipeRefreshListLayout.OnLoadMoreListener, ActionBar.Default.Callback.Overlay { @BindView(R.id.activity_swipe_refresh_listview) ListView mListView; @BindView(R.id.activity_swipe_refresh_layout) SwipeRefreshListLayout mSwipeRefreshLayout; @BindView(R.id.activity_swipe_refresh_loading_view) LoadingBkgView mLoadingView; private ViewPager mHeaderViewPager; private PageIndicator mPageIndicator; private TextView mActionBarTitle; private View mActionBarBack; private View mActionBarMenu; private ViewPagerAutoFlipper mPageFlipper; private PostListAdapter mAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_post_list); ButterKnife.bind(this); mSwipeRefreshLayout.setOnRefreshListener(this); mSwipeRefreshLayout.setOnLoadMoreListener(this); mSwipeRefreshLayout.setLoadMorePreCount(1); View header = View.inflate(this, R.layout.post_list_header, null); mPageIndicator = (PageIndicator) header.findViewById(R.id.swipe_refresh_header_pager_indicator); mHeaderViewPager = (ViewPager) header.findViewById(R.id.swipe_refresh_header_pager); mPageFlipper = ViewPagerAutoFlipper.newInstance(mHeaderViewPager).setInterval(2500); mLoadingView.setRetryClickCallback(new View.OnClickListener() { @Override public void onClick(View v) { initData(); } }); mAdapter = new PostListAdapter(this); mListView.addHeaderView(header); mListView.setAdapter(mAdapter); ActionBarScrollFadeHelper .with(getActionBarView()) .startOffset(mHeaderViewPager) .endOffset(header) .addFadeWithView(getActionBarDivider()) .addFadeWithView(mActionBarTitle) .addFadeWithView(mActionBarMenu) .addFadeReverseDrawable(mActionBarBack.getBackground()) .start(mSwipeRefreshLayout); initData(); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); } @Override public void onActionBarCreated(View actionBar, IconTextView left, TextView title, IconTextView right) { mActionBarTitle = title; mActionBarBack = left; mActionBarBack.setBackgroundResource(R.drawable.swipe_activity_back_bkg); right.setText("{md-add}"); right.setClickable(true); mActionBarMenu = right; mActionBarTitle.setOnClickListener(new BufferedOnClickListener() { @Override public void onBufferedClick(View v, int clickCount) { if (clickCount >= 2) { // double click title, scroll to top and refresh list mListView.smoothScrollToPositionFromTop(0, 0); loadData(true); } } }); } @Override public void onLoadMore() { loadData(false); } @Override public void onRefresh() { loadData(true); } private void initData() { mLoadingView.loading(); GlobalThreadManager.runInThreadPool(new Runnable() { @Override public void run() { try { Thread.sleep(2000); } catch (InterruptedException ignore) { } final List<PostModel> list = PostModel.createRandomList(8, true); runOnUiThread(new Runnable() { @Override public void run() { if (list == null) { mLoadingView.fail("simulate load data fail, click to reload"); } else if (list.size() == 0) { mLoadingView.empty("simulate data list empty, click to reload"); } else { mLoadingView.done(300); mAdapter.addAll(list); mAdapter.notifyDataSetChanged(); resetHeaderViewPager(TestImageHelper.randomImageListL(2, 5)); } } }); } }); } private void loadData(final boolean refresh) { GlobalThreadManager.runInThreadPool(new Runnable() { @Override public void run() { try { Thread.sleep(refresh ? 1600 : 800); } catch (InterruptedException ignore) { } final List<PostModel> postList = PostModel.createRandomList(10, true); runOnUiThread(new Runnable() { @Override public void run() { if (refresh) { resetHeaderViewPager(TestImageHelper.randomImageListL(2, 5)); } if (postList == null) { toastShort("simulate load data fail, refresh=" + refresh); mSwipeRefreshLayout.setLoadMoreFailed(); return; } if (refresh) { mAdapter.reset(postList); } else { mAdapter.addAll(postList); } mAdapter.notifyDataSetChanged(); mSwipeRefreshLayout.setLoadMoreEnabled(postList.size() > 0); mSwipeRefreshLayout.setLoading(false); } }); } }); } private void resetHeaderViewPager(ArrayList<String> imageList) { mHeaderViewPager.setAdapter(new ImagePagerAdapter(imageList)); mPageIndicator.setViewPager(mHeaderViewPager); mPageFlipper.start(); } }
package com.linkedin.thirdeye.detector.driver; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hibernate.SessionFactory; import org.joda.time.DateTime; import org.quartz.CronScheduleBuilder; import org.quartz.CronTrigger; import org.quartz.JobBuilder; import org.quartz.JobDetail; import org.quartz.JobKey; import org.quartz.Scheduler; import org.quartz.SchedulerException; import org.quartz.Trigger; import org.quartz.TriggerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.thirdeye.client.timeseries.TimeSeriesHandler; import com.linkedin.thirdeye.client.timeseries.TimeSeriesResponseConverter; import com.linkedin.thirdeye.detector.api.AnomalyFunctionSpec; import com.linkedin.thirdeye.detector.db.AnomalyFunctionRelationDAO; import com.linkedin.thirdeye.detector.db.AnomalyFunctionSpecDAO; import com.linkedin.thirdeye.detector.db.AnomalyResultDAO; import com.linkedin.thirdeye.detector.driver.TestAnomalyApplication.TestType; import com.linkedin.thirdeye.detector.function.AnomalyFunction; import com.linkedin.thirdeye.detector.function.AnomalyFunctionFactory; public class AnomalyDetectionJobManager { private static final Logger LOG = LoggerFactory.getLogger(AnomalyDetectionJobManager.class); private final Scheduler quartzScheduler; // private final ThirdEyeClient thirdEyeClient; private final TimeSeriesHandler timeSeriesHandler; private final TimeSeriesResponseConverter timeSeriesResponseConverter; private final AnomalyFunctionSpecDAO specDAO; private final AnomalyFunctionRelationDAO relationDAO; private final AnomalyResultDAO resultDAO; private final SessionFactory sessionFactory; private final Object sync; private final Map<Long, String> scheduledJobKeys; private final MetricRegistry metricRegistry; private final AnomalyFunctionFactory anomalyFunctionFactory; private final FailureEmailConfiguration failureEmailConfig; private static final ObjectMapper reader = new ObjectMapper(new YAMLFactory()); public AnomalyDetectionJobManager(Scheduler quartzScheduler, TimeSeriesHandler timeSeriesHandler, TimeSeriesResponseConverter timeSeriesResponseConverter, AnomalyFunctionSpecDAO specDAO, AnomalyFunctionRelationDAO relationDAO, AnomalyResultDAO resultDAO, SessionFactory sessionFactory, MetricRegistry metricRegistry, AnomalyFunctionFactory anomalyFunctionFactory, FailureEmailConfiguration failureEmailConfig) { this.quartzScheduler = quartzScheduler; // this.thirdEyeClient = thirdEyeClient; this.timeSeriesHandler = timeSeriesHandler; this.timeSeriesResponseConverter = timeSeriesResponseConverter; this.specDAO = specDAO; this.relationDAO = relationDAO; this.resultDAO = resultDAO; this.sessionFactory = sessionFactory; this.metricRegistry = metricRegistry; this.sync = new Object(); this.scheduledJobKeys = new HashMap<>(); this.anomalyFunctionFactory = anomalyFunctionFactory; this.failureEmailConfig = failureEmailConfig; } public List<Long> getActiveJobs() { synchronized (sync) { List<Long> jobs = new ArrayList<>(scheduledJobKeys.keySet()); Collections.sort(jobs); return jobs; } } public void runAdHoc(Long id, String windowStartIsoString, String windowEndIsoString) throws Exception { synchronized (sync) { AnomalyFunctionSpec spec = specDAO.findById(id); if (spec == null) { throw new IllegalArgumentException("No function with id " + id); } AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec); String triggerKey = String.format("ad_hoc_anomaly_function_trigger_%d", spec.getId()); Trigger trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey).startNow().build(); String jobKey = String.format("ad_hoc_anomaly_function_job_%d", spec.getId()); buildAndScheduleJob(jobKey, trigger, anomalyFunction, spec, windowStartIsoString, windowEndIsoString); } } /** * @param jobKey * @param trigger * @param anomalyFunction * @param spec * @param windowStartIsoString * @param windowEndIsoString * @throws SchedulerException */ private void buildAndScheduleJob(String jobKey, Trigger trigger, AnomalyFunction anomalyFunction, AnomalyFunctionSpec spec, String windowStartIsoString, String windowEndIsoString) throws SchedulerException { JobDetail job = JobBuilder.newJob(AnomalyDetectionJob.class).withIdentity(jobKey).build(); job.getJobDataMap().put(AnomalyDetectionJob.FUNCTION, anomalyFunction); // job.getJobDataMap().put(AnomalyDetectionJob.CLIENT, thirdEyeClient); job.getJobDataMap().put(AnomalyDetectionJob.TIME_SERIES_HANDLER, timeSeriesHandler); job.getJobDataMap().put(AnomalyDetectionJob.TIME_SERIES_RESPONSE_CONVERTER, timeSeriesResponseConverter); job.getJobDataMap().put(AnomalyDetectionJob.WINDOW_START, windowStartIsoString); job.getJobDataMap().put(AnomalyDetectionJob.WINDOW_END, windowEndIsoString); job.getJobDataMap().put(AnomalyDetectionJob.RESULT_DAO, resultDAO); job.getJobDataMap().put(AnomalyDetectionJob.SESSION_FACTORY, sessionFactory); job.getJobDataMap().put(AnomalyDetectionJob.METRIC_REGISTRY, metricRegistry); job.getJobDataMap().put(AnomalyDetectionJob.RELATION_DAO, relationDAO); job.getJobDataMap().put(FailureEmailConfiguration.FAILURE_EMAIL_CONFIG_KEY, failureEmailConfig); quartzScheduler.scheduleJob(job, trigger); LOG.info("Started {}: {}", jobKey, spec); } public void start(Long id) throws Exception { synchronized (sync) { AnomalyFunctionSpec spec = specDAO.findById(id); if (spec == null) { throw new IllegalArgumentException("No function with id " + id); } AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec); String triggerKey = String.format("scheduled_anomaly_function_trigger_%d", spec.getId()); CronTrigger trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey) .withSchedule(CronScheduleBuilder.cronSchedule(spec.getCron())).build(); String jobKey = String.format("scheduled_anomaly_function_job_%d", spec.getId()); scheduledJobKeys.put(id, jobKey); buildAndScheduleJob(jobKey, trigger, anomalyFunction, spec, null, null); // use schedule time // to determine // start/end } } public void stop(Long id) throws Exception { synchronized (sync) { String jobKey = scheduledJobKeys.remove(id); if (jobKey == null) { throw new IllegalArgumentException("No scheduled job for function id " + id); } quartzScheduler.deleteJob(JobKey.jobKey(jobKey)); LOG.info("Stopped {}", jobKey); } } /** * Available for testing, but anomalies need to be created with a valid anomaly function ID * (foreign key constraint). */ public void runAdhocFile(String filePath, int existingFunctionId, String windowStartIsoString, String windowEndIsoString) throws Exception { synchronized (sync) { File file = new File(filePath); if (!file.exists() || file.isDirectory()) { throw new IllegalArgumentException("File does not exist or is a directory: " + file); } AnomalyFunctionSpec spec = reader.readValue(file, AnomalyFunctionSpec.class); spec.setId(existingFunctionId); runAdhocConfig(spec, windowStartIsoString, windowEndIsoString, filePath); } } public void runAdhocConfig(AnomalyFunctionSpec spec, String windowStartIsoString, String windowEndIsoString, String executionName) throws Exception, SchedulerException { AnomalyFunction anomalyFunction = anomalyFunctionFactory.fromSpec(spec); String triggerKey = String.format("file-based_anomaly_function_trigger_%s", executionName); Trigger trigger = TriggerBuilder.newTrigger().withIdentity(triggerKey).startNow().build(); String jobKey = String.format("file-based_anomaly_function_job_%s", executionName); buildAndScheduleJob(jobKey, trigger, anomalyFunction, spec, windowStartIsoString, windowEndIsoString); } public static void main(String[] args) throws Exception { if (args.length != 2 && args.length != 4) { System.err.println( "Arguments must be configYml functionSpecPath [startISO endISO, both hour aligned]"); System.exit(1); } String thirdEyeConfigDir = args[0]; System.setProperty("dw.rootDir", thirdEyeConfigDir); String detectorApplicationConfigFile = thirdEyeConfigDir + "/" + "detector.yml"; String filePath = args[1]; String startISO = null; String endISO = null; if (args.length == 4) { startISO = args[2]; endISO = args[3]; } else { DateTime now = DateTime.now().minusHours(3); // data delay. startISO = now.minusDays(7) // subtract 7 days to set up w/w comparison .minusHours(4) // subtract hours to specify what the length of the comparison window is .toString(); endISO = now.toString(); } int existingFunctionId = 1; new TestAnomalyApplication(filePath, startISO, endISO, TestType.FUNCTION, existingFunctionId) .run(new String[] { "server", detectorApplicationConfigFile }); } }
package org.jcodec.codecs.prores; import static java.lang.Math.min; import static java.util.Arrays.fill; import static org.jcodec.codecs.prores.ProresConsts.dcCodebooks; import static org.jcodec.codecs.prores.ProresConsts.firstDCCodebook; import static org.jcodec.codecs.prores.ProresConsts.interlaced_scan; import static org.jcodec.codecs.prores.ProresConsts.levCodebooks; import static org.jcodec.codecs.prores.ProresConsts.progressive_scan; import static org.jcodec.codecs.prores.ProresConsts.runCodebooks; import static org.jcodec.common.dct.SimpleIDCT10Bit.idct10; import static org.jcodec.common.tools.MathUtil.log2; import static org.jcodec.common.tools.MathUtil.toSigned; import java.nio.ByteBuffer; import org.jcodec.codecs.prores.ProresConsts.FrameHeader; import org.jcodec.codecs.prores.ProresConsts.PictureHeader; import org.jcodec.common.VideoCodecMeta; import org.jcodec.common.VideoDecoder; import org.jcodec.common.io.BitReader; import org.jcodec.common.io.NIOUtils; import org.jcodec.common.logging.Logger; import org.jcodec.common.model.ColorSpace; import org.jcodec.common.model.Picture8Bit; import org.jcodec.common.model.Rect; import org.jcodec.common.model.Size; import org.jcodec.platform.Platform; /** * * This class is part of JCodec ( www.jcodec.org ) This software is distributed * under FreeBSD License * * Decoder for Apple ProRes format * * As posted at http://git.videolan.org/?p=ffmpeg.git;a=commitdiff;h=5554d * e13b29b9bb812ee5cfd606349873ddf0945 * * @author The JCodec project * */ public class ProresDecoder extends VideoDecoder { public ProresDecoder() { } public static ProresDecoder createProresDecoder(int downscale) { if (2 == downscale) { return new ProresToThumb4x4(); } else if (4 == downscale) { return new ProresToThumb2x2(); } else if (8 == downscale) { return new ProresToThumb(); } else { return new ProresDecoder(); } } static final int[] table = new int[] { 8, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; static final int[] mask = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, -1 }; public static final int nZeros(int check16Bit) { int low = table[check16Bit & 0xff]; check16Bit >>= 8; int high = table[check16Bit]; return high + (mask[high] & low); } public static final int readCodeword(BitReader reader, Codebook codebook) { int q = nZeros(reader.check16Bits()); reader.skipFast(q + 1); if (q > codebook.switchBits) { int bits = codebook.golombBits + q; if (bits > 16) Logger.error("Broken prores slice"); return ((1 << bits) | reader.readFast16(bits)) - codebook.golombOffset; } else if (codebook.riceOrder > 0) return (q << codebook.riceOrder) | reader.readFast16(codebook.riceOrder); else return q; } public static final int golumbToSigned(int val) { return (val >> 1) ^ golumbSign(val); } public static final int golumbSign(int val) { return -(val & 1); } public static final void readDCCoeffs(BitReader bits, int[] qMat, int[] out, int blocksPerSlice, int blkSize) { int c = readCodeword(bits, firstDCCodebook); if (c < 0) { return; // throw new RuntimeException("DC tex damaged"); } int prevDc = golumbToSigned(c); out[0] = 4096 + qScale(qMat, 0, prevDc); int code = 5, sign = 0, idx = blkSize; for (int i = 1; i < blocksPerSlice; i++, idx += blkSize) { code = readCodeword(bits, dcCodebooks[min(code, 6)]); if (code < 0) { return; // throw new RuntimeException("DC tex damaged"); } if (code != 0) sign ^= golumbSign(code); else sign = 0; prevDc += toSigned((code + 1) >> 1, sign); out[idx] = 4096 + qScale(qMat, 0, prevDc); } } protected static final void readACCoeffs(BitReader bits, int[] qMat, int[] out, int blocksPerSlice, int[] scan, int max, int log2blkSize) { int run = 4; int level = 2; int blockMask = blocksPerSlice - 1; // since blocksPerSlice is 1 << n int log2BlocksPerSlice = log2(blocksPerSlice); int maxCoeffs = 64 << log2BlocksPerSlice; int pos = blockMask; while (bits.remaining() > 32 || bits.checkAllBits() != 0) { run = readCodeword(bits, runCodebooks[min(run, 15)]); if (run < 0 || run >= maxCoeffs - pos - 1) { return; // throw new RuntimeException("AC tex damaged, RUN"); } pos += run + 1; level = readCodeword(bits, levCodebooks[min(level, 9)]) + 1; if (level < 0 || level > 65535) { return; // throw new RuntimeException("DC tex damaged, LEV"); } int sign = -bits.read1Bit(); int ind = pos >> log2BlocksPerSlice; if (ind >= max) break; out[((pos & blockMask) << log2blkSize) + scan[ind]] = qScale(qMat, ind, toSigned(level, sign)); } } private static final int qScale(int[] qMat, int ind, int val) { return ((val * qMat[ind]) >> 2); } protected void decodeOnePlane(BitReader bits, int blocksPerSlice, int[] out, int[] qMat, int[] scan, int mbX, int mbY, int plane) { try { readDCCoeffs(bits, qMat, out, blocksPerSlice, 64); readACCoeffs(bits, qMat, out, blocksPerSlice, scan, 64, 6); } catch (RuntimeException e) { System.err.println("Suppressing slice error at [" + mbX + ", " + mbY + "]."); } for (int i = 0; i < blocksPerSlice; i++) { idct10(out, i << 6); } } public Picture8Bit decodeFrame8Bit(ByteBuffer data, byte[][] target) { FrameHeader fh = readFrameHeader(data); int codedWidth = (fh.width + 15) & ~0xf; int codedHeight = (fh.height + 15) & ~0xf; int lumaSize = codedWidth * codedHeight; int chromaSize = lumaSize >> (3 - fh.chromaType); if (target == null || target[0].length < lumaSize || target[1].length < chromaSize || target[2].length < chromaSize) { throw new RuntimeException("Provided output picture won't fit into provided buffer"); } if (fh.frameType == 0) { decodePicture(data, target, fh.width, fh.height, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, 0, fh.chromaType); } else { decodePicture(data, target, fh.width, fh.height >> 1, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, fh.topFieldFirst ? 1 : 2, fh.chromaType); decodePicture(data, target, fh.width, fh.height >> 1, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, fh.topFieldFirst ? 2 : 1, fh.chromaType); } return new Picture8Bit(codedWidth, codedHeight, target, fh.chromaType == 2 ? ColorSpace.YUV422 : ColorSpace.YUV444, new Rect(0, 0, fh.width, fh.height)); } public Picture8Bit[] decodeFields8Bit(ByteBuffer data, byte[][][] target) { FrameHeader fh = readFrameHeader(data); int codedWidth = (fh.width + 15) & ~0xf; int codedHeight = (fh.height + 15) & ~0xf; int lumaSize = codedWidth * codedHeight; int chromaSize = lumaSize >> 1; if (fh.frameType == 0) { if (target == null || target[0][0].length < lumaSize || target[0][1].length < chromaSize || target[0][2].length < chromaSize) { throw new RuntimeException("Provided output picture won't fit into provided buffer"); } decodePicture(data, target[0], fh.width, fh.height, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, 0, fh.chromaType); return new Picture8Bit[] { Picture8Bit.createPicture8Bit(codedWidth, codedHeight, target[0], ColorSpace.YUV422) }; } else { lumaSize >>= 1; chromaSize >>= 1; if (target == null || target[0][0].length < lumaSize || target[0][1].length < chromaSize || target[0][2].length < chromaSize || target[1][0].length < lumaSize || target[1][1].length < chromaSize || target[1][2].length < chromaSize) { throw new RuntimeException("Provided output picture won't fit into provided buffer"); } decodePicture(data, target[fh.topFieldFirst ? 0 : 1], fh.width, fh.height >> 1, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, 0, fh.chromaType); decodePicture(data, target[fh.topFieldFirst ? 1 : 0], fh.width, fh.height >> 1, codedWidth >> 4, fh.qMatLuma, fh.qMatChroma, fh.scan, 0, fh.chromaType); return new Picture8Bit[] { Picture8Bit.createPicture8Bit(codedWidth, codedHeight >> 1, target[0], ColorSpace.YUV422), Picture8Bit.createPicture8Bit(codedWidth, codedHeight >> 1, target[1], ColorSpace.YUV422) }; } } @SuppressWarnings("unused") public static FrameHeader readFrameHeader(ByteBuffer inp) { int frameSize = inp.getInt(); String sig = readSig(inp); if (!"icpf".equals(sig)) throw new RuntimeException("Not a prores frame"); short hdrSize = inp.getShort(); short version = inp.getShort(); int res1 = inp.getInt(); short width = inp.getShort(); short height = inp.getShort(); int flags1 = inp.get(); int frameType = (flags1 >> 2) & 3; int chromaType = (flags1 >> 6) & 3; int[] scan; boolean topFieldFirst = false; if (frameType == 0) { scan = progressive_scan; } else { scan = interlaced_scan; if (frameType == 1) topFieldFirst = true; } byte res2 = inp.get(); byte prim = inp.get(); byte transFunc = inp.get(); byte matrix = inp.get(); byte pixFmt = inp.get(); byte res3 = inp.get(); int flags2 = inp.get() & 0xff; int[] qMatLuma = new int[64]; int[] qMatChroma = new int[64]; if (hasQMatLuma(flags2)) { readQMat(inp, qMatLuma, scan); } else { fill(qMatLuma, 4); } if (hasQMatChroma(flags2)) { readQMat(inp, qMatChroma, scan); } else { fill(qMatChroma, 4); } inp.position( inp.position() + hdrSize - (20 + (hasQMatLuma(flags2) ? 64 : 0) + (hasQMatChroma(flags2) ? 64 : 0))); return new FrameHeader(frameSize - hdrSize - 8, width, height, frameType, topFieldFirst, scan, qMatLuma, qMatChroma, chromaType); } static final String readSig(ByteBuffer inp) { byte[] sig = new byte[4]; inp.get(sig); return Platform.stringFromBytes(sig); } protected void decodePicture(ByteBuffer data, byte[][] result, int width, int height, int mbWidth, int[] qMatLuma, int[] qMatChroma, int[] scan, int pictureType, int chromaType) { ProresConsts.PictureHeader ph = readPictureHeader(data); int mbX = 0, mbY = 0; int sliceMbCount = 1 << ph.log2SliceMbWidth; for (int i = 0; i < ph.sliceSizes.length; i++) { while (mbWidth - mbX < sliceMbCount) sliceMbCount >>= 1; decodeSlice(NIOUtils.read(data, ph.sliceSizes[i]), qMatLuma, qMatChroma, scan, sliceMbCount, mbX, mbY, ph.sliceSizes[i], result, width, pictureType, chromaType); mbX += sliceMbCount; if (mbX == mbWidth) { sliceMbCount = 1 << ph.log2SliceMbWidth; mbX = 0; mbY++; } } } @SuppressWarnings("unused") public static PictureHeader readPictureHeader(ByteBuffer inp) { int hdrSize = (inp.get() & 0xff) >> 3; inp.getInt(); int sliceCount = inp.getShort(); int a = inp.get() & 0xff; int log2SliceMbWidth = a >> 4; short[] sliceSizes = new short[sliceCount]; for (int i = 0; i < sliceCount; i++) { sliceSizes[i] = inp.getShort(); } return new PictureHeader(log2SliceMbWidth, sliceSizes); } private void decodeSlice(ByteBuffer data, int[] qMatLuma, int[] qMatChroma, int[] scan, int sliceMbCount, int mbX, int mbY, short sliceSize, byte[][] result, int lumaStride, int pictureType, int chromaType) { int hdrSize = (data.get() & 0xff) >> 3; int qScale = clip(data.get() & 0xff, 1, 224); qScale = qScale > 128 ? qScale - 96 << 2 : qScale; int yDataSize = data.getShort(); int uDataSize = data.getShort(); int vDataSize = hdrSize > 7 ? data.getShort() : sliceSize - uDataSize - yDataSize - hdrSize; int[] y = new int[sliceMbCount << 8]; decodeOnePlane(bitstream(data, yDataSize), sliceMbCount << 2, y, scaleMat(qMatLuma, qScale), scan, mbX, mbY, 0); int chromaBlkCount = (sliceMbCount << chromaType) >> 1; int[] u = new int[chromaBlkCount << 6]; decodeOnePlane(bitstream(data, uDataSize), chromaBlkCount, u, scaleMat(qMatChroma, qScale), scan, mbX, mbY, 1); int[] v = new int[chromaBlkCount << 6]; decodeOnePlane(bitstream(data, vDataSize), chromaBlkCount, v, scaleMat(qMatChroma, qScale), scan, mbX, mbY, 2); putSlice(result, lumaStride, mbX, mbY, y, u, v, pictureType == 0 ? 0 : 1, pictureType == 2 ? 1 : 0, chromaType, sliceMbCount); } public static final int[] scaleMat(int[] qMatLuma, int qScale) { int[] res = new int[qMatLuma.length]; for (int i = 0; i < qMatLuma.length; i++) res[i] = qMatLuma[i] * qScale; return res; } static final BitReader bitstream(ByteBuffer data, int dataSize) { return BitReader.createBitReader(NIOUtils.read(data, dataSize)); } byte clipTo8Bit(int val, int min, int max) { return (byte) (((val < min ? min : (val > max ? max : val)) >> 2) - 128); } static final int clip(int val, int min, int max) { return val < min ? min : (val > max ? max : val); } protected void putSlice(byte[][] result, int lumaStride, int mbX, int mbY, int[] y, int[] u, int[] v, int dist, int shift, int chromaType, int sliceMbCount) { int chromaStride = lumaStride >> 1; putLuma(result[0], shift * lumaStride, lumaStride << dist, mbX, mbY, y, sliceMbCount, dist, shift); if (chromaType == 2) { putChroma(result[1], shift * chromaStride, chromaStride << dist, mbX, mbY, u, sliceMbCount, dist, shift); putChroma(result[2], shift * chromaStride, chromaStride << dist, mbX, mbY, v, sliceMbCount, dist, shift); } else { putLuma(result[1], shift * lumaStride, lumaStride << dist, mbX, mbY, u, sliceMbCount, dist, shift); putLuma(result[2], shift * lumaStride, lumaStride << dist, mbX, mbY, v, sliceMbCount, dist, shift); } } private void putLuma(byte[] y, int off, int stride, int mbX, int mbY, int[] luma, int mbPerSlice, int dist, int shift) { off += (mbX << 4) + (mbY << 4) * stride; for (int k = 0; k < mbPerSlice; k++) { putBlock(y, off, stride, luma, k << 8, dist, shift); putBlock(y, off + 8, stride, luma, (k << 8) + 64, dist, shift); putBlock(y, off + 8 * stride, stride, luma, (k << 8) + 128, dist, shift); putBlock(y, off + 8 * stride + 8, stride, luma, (k << 8) + 192, dist, shift); off += 16; } } private void putChroma(byte[] y, int off, int stride, int mbX, int mbY, int[] chroma, int mbPerSlice, int dist, int shift) { off += (mbX << 3) + (mbY << 4) * stride; for (int k = 0; k < mbPerSlice; k++) { putBlock(y, off, stride, chroma, k << 7, dist, shift); putBlock(y, off + 8 * stride, stride, chroma, (k << 7) + 64, dist, shift); off += 8; } } private void putBlock(byte[] square, int sqOff, int sqStride, int[] flat, int flOff, int dist, int shift) { for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) square[j + sqOff] = clipTo8Bit(flat[j + flOff], 4, 1019); sqOff += sqStride; flOff += 8; } } static final boolean hasQMatChroma(int flags2) { return (flags2 & 1) != 0; } static final void readQMat(ByteBuffer inp, int[] qMatLuma, int[] scan) { byte[] b = new byte[64]; inp.get(b); for (int i = 0; i < 64; i++) { qMatLuma[i] = b[scan[i]] & 0xff; } } static final boolean hasQMatLuma(int flags2) { return (flags2 & 2) != 0; } public boolean isProgressive(ByteBuffer data) { return (((data.get(20) & 0xff) >> 2) & 3) == 0; } public static int probe(ByteBuffer data) { if (data.get(4) == 'i' && data.get(5) == 'c' && data.get(6) == 'p' && data.get(7) == 'f') return 100; return 0; } @Override public VideoCodecMeta getCodecMeta(ByteBuffer data) { FrameHeader fh = readFrameHeader(data); return new VideoCodecMeta(new Size(fh.width, fh.height)); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.rename; import com.intellij.lang.findUsages.DescriptiveNameUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.openapi.wm.ex.StatusBarEx; import com.intellij.psi.*; import com.intellij.psi.impl.light.LightElement; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.copy.CopyFilesOrDirectoriesHandler; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.refactoring.listeners.RefactoringEventData; import com.intellij.refactoring.listeners.RefactoringEventListener; import com.intellij.refactoring.rename.naming.AutomaticRenamer; import com.intellij.refactoring.rename.naming.AutomaticRenamerFactory; import com.intellij.refactoring.ui.ConflictsDialog; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.MoveRenameUsageInfo; import com.intellij.refactoring.util.NonCodeUsageInfo; import com.intellij.refactoring.util.RelatedUsageInfo; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import java.util.*; public class RenameProcessor extends BaseRefactoringProcessor { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.rename.RenameProcessor"); protected final LinkedHashMap<PsiElement, String> myAllRenames = new LinkedHashMap<>(); private @NotNull PsiElement myPrimaryElement; private String myNewName = null; private boolean mySearchInComments; private boolean mySearchTextOccurrences; protected boolean myForceShowPreview; private String myCommandName; private NonCodeUsageInfo[] myNonCodeUsages = new NonCodeUsageInfo[0]; private final List<AutomaticRenamerFactory> myRenamerFactories = new ArrayList<>(); private final List<AutomaticRenamer> myRenamers = new ArrayList<>(); private final List<UnresolvableCollisionUsageInfo> mySkippedUsages = new ArrayList<>(); public RenameProcessor(Project project, @NotNull PsiElement element, @NotNull @NonNls String newName, boolean isSearchInComments, boolean isSearchTextOccurrences) { super(project); myPrimaryElement = element; assertNonCompileElement(element); //assertValidName(element, newName); mySearchInComments = isSearchInComments; mySearchTextOccurrences = isSearchTextOccurrences; setNewName(newName); } public Set<PsiElement> getElements() { return Collections.unmodifiableSet(myAllRenames.keySet()); } public String getNewName(PsiElement element) { return myAllRenames.get(element); } public void addRenamerFactory(AutomaticRenamerFactory factory) { if (!myRenamerFactories.contains(factory)) { myRenamerFactories.add(factory); } } public void removeRenamerFactory(AutomaticRenamerFactory factory) { myRenamerFactories.remove(factory); } @Override public void doRun() { if (!myPrimaryElement.isValid()) return; prepareRenaming(myPrimaryElement, myNewName, myAllRenames); super.doRun(); } public void prepareRenaming(@NotNull final PsiElement element, final String newName, final LinkedHashMap<PsiElement, String> allRenames) { final List<RenamePsiElementProcessor> processors = RenamePsiElementProcessor.allForElement(element); myForceShowPreview = false; for (RenamePsiElementProcessor processor : processors) { if (processor.canProcessElement(element)) { processor.prepareRenaming(element, newName, allRenames); myForceShowPreview |= processor.forcesShowPreview(); } } } @Nullable private String getHelpID() { return RenamePsiElementProcessor.forElement(myPrimaryElement).getHelpID(myPrimaryElement); } @Override public boolean preprocessUsages(@NotNull final Ref<UsageInfo[]> refUsages) { UsageInfo[] usagesIn = refUsages.get(); MultiMap<PsiElement, String> conflicts = new MultiMap<>(); RenameUtil.addConflictDescriptions(usagesIn, conflicts); RenamePsiElementProcessor.forElement(myPrimaryElement).findExistingNameConflicts(myPrimaryElement, myNewName, conflicts, myAllRenames); if (!conflicts.isEmpty()) { final RefactoringEventData conflictData = new RefactoringEventData(); conflictData.putUserData(RefactoringEventData.CONFLICTS_KEY, conflicts.values()); myProject.getMessageBus().syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC) .conflictsDetected("refactoring.rename", conflictData); if (ApplicationManager.getApplication().isUnitTestMode()) { throw new ConflictsInTestsException(conflicts.values()); } ConflictsDialog conflictsDialog = prepareConflictsDialog(conflicts, refUsages.get()); if (!conflictsDialog.showAndGet()) { if (conflictsDialog.isShowConflicts()) prepareSuccessful(); return false; } } final List<UsageInfo> variableUsages = new ArrayList<>(); if (!myRenamers.isEmpty()) { if (!findRenamedVariables(variableUsages)) return false; final LinkedHashMap<PsiElement, String> renames = new LinkedHashMap<>(); for (final AutomaticRenamer renamer : myRenamers) { final List<? extends PsiNamedElement> variables = renamer.getElements(); for (final PsiNamedElement variable : variables) { final String newName = renamer.getNewName(variable); if (newName != null) { addElement(variable, newName); prepareRenaming(variable, newName, renames); } } } if (!renames.isEmpty()) { for (PsiElement element : renames.keySet()) { assertNonCompileElement(element); } myAllRenames.putAll(renames); final Runnable runnable = () -> { for (final Map.Entry<PsiElement, String> entry : renames.entrySet()) { final UsageInfo[] usages = ApplicationManager.getApplication().runReadAction(new Computable<UsageInfo[]>() { @Override public UsageInfo[] compute() { return RenameUtil.findUsages(entry.getKey(), entry.getValue(), mySearchInComments, mySearchTextOccurrences, myAllRenames); } }); Collections.addAll(variableUsages, usages); } }; if (!ProgressManager.getInstance() .runProcessWithProgressSynchronously(runnable, RefactoringBundle.message("searching.for.variables"), true, myProject)) { return false; } } } final Set<UsageInfo> usagesSet = ContainerUtil.newLinkedHashSet(usagesIn); usagesSet.addAll(variableUsages); final List<UnresolvableCollisionUsageInfo> conflictUsages = RenameUtil.removeConflictUsages(usagesSet); if (conflictUsages != null) { mySkippedUsages.addAll(conflictUsages); } refUsages.set(usagesSet.toArray(new UsageInfo[usagesSet.size()])); prepareSuccessful(); return PsiElementRenameHandler.canRename(myProject, null, myPrimaryElement); } public static void assertNonCompileElement(PsiElement element) { LOG.assertTrue(!(element instanceof PsiCompiledElement), element); } private void assertValidName(PsiElement element, String newName) { LOG.assertTrue(RenameUtil.isValidName(myProject, element, newName), "element: " + element + ", newName: " + newName); } private boolean findRenamedVariables(final List<UsageInfo> variableUsages) { for (Iterator<AutomaticRenamer> iterator = myRenamers.iterator(); iterator.hasNext(); ) { AutomaticRenamer automaticVariableRenamer = iterator.next(); if (!automaticVariableRenamer.hasAnythingToRename()) continue; if (!showAutomaticRenamingDialog(automaticVariableRenamer)) { iterator.remove(); } } final Runnable runnable = () -> ApplicationManager.getApplication().runReadAction(() -> { for (final AutomaticRenamer renamer : myRenamers) { renamer.findUsages(variableUsages, mySearchInComments, mySearchTextOccurrences, mySkippedUsages, myAllRenames); } }); return ProgressManager.getInstance() .runProcessWithProgressSynchronously(runnable, RefactoringBundle.message("searching.for.variables"), true, myProject); } protected boolean showAutomaticRenamingDialog(AutomaticRenamer automaticVariableRenamer) { if (ApplicationManager.getApplication().isUnitTestMode()) { for (PsiNamedElement element : automaticVariableRenamer.getElements()) { automaticVariableRenamer.setRename(element, automaticVariableRenamer.getNewName(element)); } return true; } final AutomaticRenamingDialog dialog = new AutomaticRenamingDialog(myProject, automaticVariableRenamer); return dialog.showAndGet(); } public void addElement(@NotNull PsiElement element, @NotNull String newName) { assertNonCompileElement(element); myAllRenames.put(element, newName); } private void setNewName(@NotNull String newName) { myNewName = newName; myAllRenames.put(myPrimaryElement, newName); myCommandName = RefactoringBundle .message("renaming.0.1.to.2", UsageViewUtil.getType(myPrimaryElement), DescriptiveNameUtil.getDescriptiveName(myPrimaryElement), newName); } @Override @NotNull protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) { return new RenameViewDescriptor(myAllRenames); } @Override @NotNull public UsageInfo[] findUsages() { myRenamers.clear(); ArrayList<UsageInfo> result = new ArrayList<>(); List<PsiElement> elements = new ArrayList<>(myAllRenames.keySet()); //noinspection ForLoopReplaceableByForEach for (int i = 0; i < elements.size(); i++) { PsiElement element = elements.get(i); if (element == null) { LOG.error("primary: " + myPrimaryElement + "; renamers: " + myRenamers); continue; } final String newName = myAllRenames.get(element); final UsageInfo[] usages = RenameUtil.findUsages(element, newName, mySearchInComments, mySearchTextOccurrences, myAllRenames); final List<UsageInfo> usagesList = Arrays.asList(usages); result.addAll(usagesList); for (AutomaticRenamerFactory factory : myRenamerFactories) { if (factory.isApplicable(element)) { myRenamers.add(factory.createRenamer(element, newName, usagesList)); } } for (AutomaticRenamerFactory factory : Extensions.getExtensions(AutomaticRenamerFactory.EP_NAME)) { if (factory.getOptionName() == null && factory.isApplicable(element)) { myRenamers.add(factory.createRenamer(element, newName, usagesList)); } } } UsageInfo[] usageInfos = result.toArray(new UsageInfo[result.size()]); usageInfos = UsageViewUtil.removeDuplicatedUsages(usageInfos); return usageInfos; } @Override protected void refreshElements(@NotNull PsiElement[] elements) { LOG.assertTrue(elements.length > 0); myPrimaryElement = elements[0]; final Iterator<String> newNames = myAllRenames.values().iterator(); LinkedHashMap<PsiElement, String> newAllRenames = new LinkedHashMap<>(); for (PsiElement resolved : elements) { newAllRenames.put(resolved, newNames.next()); } myAllRenames.clear(); myAllRenames.putAll(newAllRenames); } @Override protected boolean isPreviewUsages(@NotNull UsageInfo[] usages) { if (myForceShowPreview) return true; if (super.isPreviewUsages(usages)) return true; if (UsageViewUtil.reportNonRegularUsages(usages, myProject)) return true; return false; } @Nullable @Override protected String getRefactoringId() { return "refactoring.rename"; } @Nullable @Override protected RefactoringEventData getBeforeData() { final RefactoringEventData data = new RefactoringEventData(); data.addElement(myPrimaryElement); return data; } @Nullable @Override protected RefactoringEventData getAfterData(@NotNull UsageInfo[] usages) { final RefactoringEventData data = new RefactoringEventData(); data.addElement(myPrimaryElement); return data; } @Override public void performRefactoring(@NotNull UsageInfo[] usages) { final int[] choice = myAllRenames.size() > 1 ? new int[]{-1} : null; String message = null; try { for (Iterator<Map.Entry<PsiElement, String>> iterator = myAllRenames.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<PsiElement, String> entry = iterator.next(); if (entry.getKey() instanceof PsiFile) { final PsiFile file = (PsiFile)entry.getKey(); final PsiDirectory containingDirectory = file.getContainingDirectory(); if (CopyFilesOrDirectoriesHandler.checkFileExist(containingDirectory, choice, file, entry.getValue(), "Rename")) { iterator.remove(); continue; } } RenameUtil.checkRename(entry.getKey(), entry.getValue()); } } catch (IncorrectOperationException e) { message = e.getMessage(); } if (message != null) { CommonRefactoringUtil.showErrorMessage(RefactoringBundle.message("rename.title"), message, getHelpID(), myProject); return; } List<Runnable> postRenameCallbacks = new ArrayList<>(); final MultiMap<PsiElement, UsageInfo> classified = classifyUsages(myAllRenames.keySet(), usages); for (final PsiElement element : myAllRenames.keySet()) { String newName = myAllRenames.get(element); final RefactoringElementListener elementListener = getTransaction().getElementListener(element); final RenamePsiElementProcessor renamePsiElementProcessor = RenamePsiElementProcessor.forElement(element); Runnable postRenameCallback = renamePsiElementProcessor.getPostRenameCallback(element, newName, elementListener); final Collection<UsageInfo> infos = classified.get(element); try { RenameUtil.doRename(element, newName, infos.toArray(new UsageInfo[infos.size()]), myProject, elementListener); } catch (final IncorrectOperationException e) { RenameUtil.showErrorMessage(e, element, myProject); return; } if (postRenameCallback != null) { postRenameCallbacks.add(postRenameCallback); } } for (Runnable runnable : postRenameCallbacks) { runnable.run(); } List<NonCodeUsageInfo> nonCodeUsages = new ArrayList<>(); for (UsageInfo usage : usages) { if (usage instanceof NonCodeUsageInfo) { nonCodeUsages.add((NonCodeUsageInfo)usage); } } myNonCodeUsages = nonCodeUsages.toArray(new NonCodeUsageInfo[nonCodeUsages.size()]); if (!mySkippedUsages.isEmpty()) { if (!ApplicationManager.getApplication().isUnitTestMode() && !ApplicationManager.getApplication().isHeadlessEnvironment()) { ApplicationManager.getApplication().invokeLater(() -> { final IdeFrame ideFrame = WindowManager.getInstance().getIdeFrame(myProject); if (ideFrame != null) { StatusBarEx statusBar = (StatusBarEx)ideFrame.getStatusBar(); HyperlinkListener listener = new HyperlinkListener() { @Override public void hyperlinkUpdate(HyperlinkEvent e) { if (e.getEventType() != HyperlinkEvent.EventType.ACTIVATED) return; Messages.showMessageDialog("<html>Following usages were safely skipped:<br>" + StringUtil.join(mySkippedUsages, unresolvableCollisionUsageInfo -> unresolvableCollisionUsageInfo.getDescription(), "<br>") + "</html>", "Not All Usages Were Renamed", null); } }; statusBar.notifyProgressByBalloon(MessageType.WARNING, "<html><body>Unable to rename certain usages. <a href=\"\">Browse</a></body></html>", null, listener); } }, ModalityState.NON_MODAL); } } } @Override protected void performPsiSpoilingRefactoring() { RenameUtil.renameNonCodeUsages(myProject, myNonCodeUsages); } @Override protected String getCommandName() { return myCommandName; } public static MultiMap<PsiElement, UsageInfo> classifyUsages(Collection<? extends PsiElement> elements, UsageInfo[] usages) { final MultiMap<PsiElement, UsageInfo> result = new MultiMap<>(); for (UsageInfo usage : usages) { LOG.assertTrue(usage instanceof MoveRenameUsageInfo); if (usage.getReference() instanceof LightElement) { continue; //filter out implicit references (e.g. from derived class to super class' default constructor) } MoveRenameUsageInfo usageInfo = (MoveRenameUsageInfo)usage; if (usage instanceof RelatedUsageInfo) { final PsiElement relatedElement = ((RelatedUsageInfo)usage).getRelatedElement(); if (elements.contains(relatedElement)) { result.putValue(relatedElement, usage); } } else { PsiElement referenced = usageInfo.getReferencedElement(); if (elements.contains(referenced)) { result.putValue(referenced, usage); } else if (referenced != null) { PsiElement indirect = referenced.getNavigationElement(); if (elements.contains(indirect)) { result.putValue(indirect, usage); } } } } return result; } public Collection<String> getNewNames() { return myAllRenames.values(); } public void setSearchInComments(boolean value) { mySearchInComments = value; } public void setSearchTextOccurrences(boolean searchTextOccurrences) { mySearchTextOccurrences = searchTextOccurrences; } public boolean isSearchInComments() { return mySearchInComments; } public boolean isSearchTextOccurrences() { return mySearchTextOccurrences; } public void setCommandName(final String commandName) { myCommandName = commandName; } }
/** * GnucashObjectImpl.java * License: GPLv3 or later * created: 01.10.2005 13:32:15 * (c) 2005 by <a href="http://Wolschon.biz">Wolschon Softwaredesign und Beratung</a> */ package biz.wolschon.fileformats.gnucash.jwsdpimpl; //other imports //automatically created logger for debug and error -output import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.xml.bind.JAXBException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import biz.wolschon.fileformats.gnucash.GnucashFile; import biz.wolschon.fileformats.gnucash.jwsdpimpl.generated.ObjectFactory; import biz.wolschon.fileformats.gnucash.jwsdpimpl.generated.Slot; import biz.wolschon.fileformats.gnucash.jwsdpimpl.generated.SlotsType; import biz.wolschon.fileformats.gnucash.jwsdpimpl.generated.SlotValueType; /** * (c) 2005 by <a href="http://Wolschon.biz>Wolschon Softwaredesign und Beratung</a>.<br/> * Project: gnucashReader<br/> * GnucashObjectImpl.java<br/> * created: 01.10.2005 13:32:15 <br/> *<br/><br/> * Helper-Class used to implement functions all gnucash-objects support. * @author <a href="mailto:Marcus@Wolschon.biz">Marcus Wolschon</a> */ public class GnucashObjectImpl implements GnucashObject { /** * the user-defined values. */ private SlotsType mySlots; /** * The file we belong to. */ private final GnucashFile myFile; /** * @return all keys that can be used with ${@link #getUserDefinedAttribute(String)}}. */ @SuppressWarnings("unchecked") public Collection<String> getUserDefinedAttributeKeys() { List<Slot> slots = getSlots().getSlot(); List<String> retval = new ArrayList<String>(slots.size()); for (Slot slot : slots) { retval.add(slot.getSlotKey()); } return retval; } /** * @param name the name of the user-defined attribute * @return the value or null if not set */ @SuppressWarnings("unchecked") public String getUserDefinedAttribute(final String name) { List<Slot> slots = getSlots().getSlot(); for (Slot slot : slots) { if (slot.getSlotKey().equals(name)) { Object value = slot.getSlotValue().getContent().get(0); if (value == null) { return null; } if (! (value instanceof String)) { LOGGER.error("User-defined attribute for key '" + name + "' may not be a String." + " It is of type [" + value.getClass().getName() + "]"); } return value.toString(); } } return null; } /** * Automatically created logger for debug and error-output. */ private static final Log LOGGER = LogFactory.getLog(GnucashObjectImpl.class); // ------------------------ support for propertyChangeListeners ------------------ /** * support for firing PropertyChangeEvents. * (gets initialized only if we really have listeners) */ private volatile PropertyChangeSupport myPropertyChange = null; /** * Returned value may be null if we never had listeners. * @return Our support for firing PropertyChangeEvents */ protected PropertyChangeSupport getPropertyChangeSupport() { return myPropertyChange; } /** * Add a PropertyChangeListener to the listener list. * The listener is registered for all properties. * * @param listener The PropertyChangeListener to be added */ public final void addPropertyChangeListener( final PropertyChangeListener listener) { if (myPropertyChange == null) { myPropertyChange = new PropertyChangeSupport(this); } myPropertyChange.addPropertyChangeListener(listener); } /** * Add a PropertyChangeListener for a specific property. The listener * will be invoked only when a call on firePropertyChange names that * specific property. * * @param propertyName The name of the property to listen on. * @param listener The PropertyChangeListener to be added */ public final void addPropertyChangeListener( final String propertyName, final PropertyChangeListener listener) { if (myPropertyChange == null) { myPropertyChange = new PropertyChangeSupport(this); } myPropertyChange.addPropertyChangeListener(propertyName, listener); } /** * Remove a PropertyChangeListener for a specific property. * * @param propertyName The name of the property that was listened on. * @param listener The PropertyChangeListener to be removed */ public final void removePropertyChangeListener( final String propertyName, final PropertyChangeListener listener) { if (myPropertyChange != null) { myPropertyChange.removePropertyChangeListener(propertyName, listener); } } /** * Remove a PropertyChangeListener from the listener list. * This removes a PropertyChangeListener that was registered * for all properties. * * @param listener The PropertyChangeListener to be removed */ public synchronized void removePropertyChangeListener( final PropertyChangeListener listener) { if (myPropertyChange != null) { myPropertyChange.removePropertyChangeListener(listener); } } // ------------------------------------------------------- /** * Just an overridden ToString to return this classe's name * and hashCode. * @return className and hashCode */ @Override public String toString() { return "GnucashObjectImpl@" + hashCode(); } /** * @return Returns the slots. * @see ${@link #mySlots} */ public SlotsType getSlots() { return mySlots; } /** * @param slots The slots to set. * @see ${@link #mySlots} */ @SuppressWarnings("unchecked") public void setSlots(final SlotsType slots) { if (slots == null) { throw new IllegalArgumentException("null 'slots' given!"); } Object old = mySlots; if (old == slots) { return; // nothing has changed } mySlots = slots; // we have an xsd-problem saving empty slots so we add a dummy-value if (slots.getSlot().isEmpty()) { try { ObjectFactory objectFactory = new ObjectFactory(); Slot slot = objectFactory.createSlot(); slot.setSlotKey("dummy"); SlotValueType value = objectFactory.createSlotValueType(); value.setType("string"); value.getContent().add("dummy"); slot.setSlotValue(value); slots.getSlot().add(slot); } catch (JAXBException e) { LOGGER.error("[JAXBException] Problem in " + getClass().getName(), e); } } // <<insert code to react further to this change here PropertyChangeSupport propertyChangeFirer = getPropertyChangeSupport(); if (propertyChangeFirer != null) { propertyChangeFirer.firePropertyChange("slots", old, slots); } } /** * @param slots ${@link #mySlots} * @param myFile The file we belong to */ public GnucashObjectImpl(final SlotsType slots, final GnucashFile myFile) { super(); this.myFile = myFile; setSlots(slots); } /** * @return Returns the file. * @see ${@link #myFile} */ public GnucashFile getGnucashFile() { return myFile; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.siyeh.ig.testFrameworks; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.*; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.BaseInspection; import com.siyeh.ig.BaseInspectionVisitor; import com.siyeh.ig.InspectionGadgetsFix; import com.siyeh.ig.PsiReplacementUtil; import com.siyeh.ig.callMatcher.CallMatcher; import com.siyeh.ig.psiutils.BoolUtils; import com.siyeh.ig.psiutils.ComparisonUtils; import com.siyeh.ig.psiutils.EqualityCheck; import com.siyeh.ig.psiutils.ImportUtils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; public abstract class SimplifiableAssertionInspection extends BaseInspection { @Override @NotNull protected String buildErrorString(Object... infos) { return InspectionGadgetsBundle.message("simplifiable.junit.assertion.problem.descriptor", infos[0]); } @Override public InspectionGadgetsFix buildFix(Object... infos) { return new SimplifyAssertFix(); } @Override public BaseInspectionVisitor buildVisitor() { return new SimplifiableJUnitAssertionVisitor(); } protected abstract boolean checkTestNG(); static boolean isAssertThatCouldBeFail(PsiExpression position, boolean checkTrue) { return (checkTrue ? PsiKeyword.TRUE : PsiKeyword.FALSE).equals(position.getText()); } boolean isAssertEqualsThatCouldBeAssertLiteral(AssertHint assertHint) { final PsiExpression firstTestArgument = assertHint.getFirstArgument(); final PsiExpression secondTestArgument = assertHint.getSecondArgument(); return isSimpleLiteral(firstTestArgument, secondTestArgument) || isSimpleLiteral(secondTestArgument, firstTestArgument); } static boolean isSimpleLiteral(PsiExpression expression1, PsiExpression expression2) { if (!(expression1 instanceof PsiLiteralExpression) || expression2 == null) { return false; } final String text = expression1.getText(); if (PsiKeyword.NULL.equals(text)) { return true; } if (!PsiKeyword.TRUE.equals(text) && !PsiKeyword.FALSE.equals(text)) { return false; } final PsiType type = expression2.getType(); return PsiType.BOOLEAN.equals(type); } static boolean isEqualityComparison(PsiExpression expression) { if (expression instanceof PsiBinaryExpression) { final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)expression; final IElementType tokenType = binaryExpression.getOperationTokenType(); if (!tokenType.equals(JavaTokenType.EQEQ)) { return false; } final PsiExpression lhs = binaryExpression.getLOperand(); final PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { return false; } final PsiType type = lhs.getType(); return type != null && TypeConversionUtil.isPrimitiveAndNotNullOrWrapper(type); } return EqualityCheck.from(expression) != null; } static final CallMatcher ARRAYS_EQUALS = CallMatcher.staticCall("java.util.Arrays", "equals").parameterCount(2); static boolean isArrayEqualityComparison(PsiExpression expression) { return expression instanceof PsiMethodCallExpression && ARRAYS_EQUALS.test((PsiMethodCallExpression)expression); } static boolean isIdentityComparison(PsiExpression expression) { if (!(expression instanceof PsiBinaryExpression)) { return false; } final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)expression; if (!ComparisonUtils.isEqualityComparison(binaryExpression)) { return false; } final PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { return false; } final PsiExpression lhs = binaryExpression.getLOperand(); final PsiType lhsType = lhs.getType(); if (lhsType instanceof PsiPrimitiveType) { return false; } final PsiType rhsType = rhs.getType(); return !(rhsType instanceof PsiPrimitiveType); } private class SimplifyAssertFix extends InspectionGadgetsFix { @Override @NotNull public String getFamilyName() { return InspectionGadgetsBundle.message("simplify.junit.assertion.simplify.quickfix"); } @Override public void doFix(Project project, ProblemDescriptor descriptor) { final PsiElement methodNameIdentifier = descriptor.getPsiElement(); final PsiElement parent = methodNameIdentifier.getParent(); if (parent == null) { return; } final PsiMethodCallExpression callExpression = (PsiMethodCallExpression)parent.getParent(); final AssertHint assertHint = AssertHint.createAssertEqualsHint(callExpression, checkTestNG()); if (assertHint != null && isAssertEqualsThatCouldBeAssertLiteral(assertHint)) { replaceAssertEqualsWithAssertLiteral(assertHint); } else { final AssertHint assertTrueFalseHint = AssertHint.createAssertTrueFalseHint(callExpression, checkTestNG()); if (assertTrueFalseHint == null) { return; } final boolean assertTrue = assertTrueFalseHint.isAssertTrue(); final PsiExpression argument = assertTrueFalseHint.getFirstArgument(); if (ComparisonUtils.isNullComparison(argument)) { replaceAssertWithAssertNull(assertTrueFalseHint); } else if (isIdentityComparison(argument)) { replaceWithAssertSame(assertTrueFalseHint); } else if (assertTrue && isEqualityComparison(argument)) { replaceWithAssertEquals(assertTrueFalseHint, "assertEquals"); } else if (isAssertThatCouldBeFail(argument, !assertTrue)) { replaceWithFail(assertTrueFalseHint); } else if (isEqualityComparison(argument)) { replaceWithAssertEquals(assertTrueFalseHint, "assertNotEquals"); } else if (assertTrue && !checkTestNG() && isArrayEqualityComparison(argument)) { replaceWithAssertEquals(assertTrueFalseHint, "assertArrayEquals"); } else if (BoolUtils.isNegation(argument)) { replaceWithNegatedBooleanAssertion(assertTrueFalseHint); } } } private void addStaticImportOrQualifier(String methodName, AssertHint assertHint, StringBuilder out) { final PsiMethodCallExpression originalMethodCall = (PsiMethodCallExpression)assertHint.getOriginalExpression(); final PsiReferenceExpression methodExpression = originalMethodCall.getMethodExpression(); final PsiExpression qualifier = methodExpression.getQualifierExpression(); if (qualifier == null) { final PsiMethod method = assertHint.getMethod(); if (method == null) { return; } final PsiClass containingClass = method.getContainingClass(); if (containingClass == null) { return; } final String className = containingClass.getQualifiedName(); if (className == null) { return; } if (!ImportUtils.addStaticImport(className, methodName, originalMethodCall)) { // add qualifier if old call was to JUnit4 method and adding static import failed out.append(className).append("."); } } else { // apparently not statically imported, keep old qualifier in new assert call out.append(qualifier.getText()).append('.'); } } private void replaceWithFail(AssertHint assertHint) { @NonNls final StringBuilder newExpression = new StringBuilder(); addStaticImportOrQualifier("fail", assertHint, newExpression); newExpression.append("fail("); final PsiExpression message = assertHint.getMessage(); if (message != null) { newExpression.append(message.getText()); } newExpression.append(')'); PsiReplacementUtil.replaceExpressionAndShorten(assertHint.getOriginalExpression(), newExpression.toString()); } /** * <code>assertTrue</code> -> <code>assertEquals</code> * <p/ * <code>assertFalse</code> -> <code>assertNotEquals</code> (do not replace for junit 5 Assertions * as there is no primitive overloads for <code>assertNotEquals</code> and boxing would be enforced if replaced) */ private void replaceWithAssertEquals(AssertHint assertHint, final String methodName) { final PsiExpression firstArgument = assertHint.getFirstArgument(); PsiExpression lhs = null; PsiExpression rhs = null; if (firstArgument instanceof PsiBinaryExpression) { final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)firstArgument; lhs = binaryExpression.getLOperand(); rhs = binaryExpression.getROperand(); } else { final EqualityCheck check = EqualityCheck.from(firstArgument); if (check != null) { lhs = check.getLeft(); rhs = check.getRight(); } else if (firstArgument instanceof PsiMethodCallExpression && ARRAYS_EQUALS.test((PsiMethodCallExpression)firstArgument)) { final PsiExpression[] args = ((PsiMethodCallExpression)firstArgument).getArgumentList().getExpressions(); lhs = args[0]; rhs = args[1]; } } if (!(lhs instanceof PsiLiteralExpression) && rhs instanceof PsiLiteralExpression) { final PsiExpression temp = lhs; lhs = rhs; rhs = temp; } if (lhs == null || rhs == null) { return; } if (checkTestNG()) { final PsiExpression temp = lhs; lhs = rhs; rhs = temp; } final StringBuilder buf = new StringBuilder(); final PsiType lhsType = lhs.getType(); final PsiType rhsType = rhs.getType(); if (lhsType != null && rhsType != null && PsiUtil.isLanguageLevel5OrHigher(lhs)) { final PsiPrimitiveType rhsUnboxedType = PsiPrimitiveType.getUnboxedType(rhsType); if (isPrimitiveAndBoxedWithOverloads(lhsType, rhsType) && rhsUnboxedType != null) { buf.append(lhs.getText()).append(",(").append(rhsUnboxedType.getCanonicalText()).append(')').append(rhs.getText()); } else { final PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(lhsType); if (isPrimitiveAndBoxedWithOverloads(rhsType, lhsType) && unboxedType != null) { buf.append('(').append(unboxedType.getCanonicalText()).append(')').append(lhs.getText()).append(',').append(rhs.getText()); } else { buf.append(lhs.getText()).append(',').append(rhs.getText()); } } } else { buf.append(lhs.getText()).append(',').append(rhs.getText()); } final PsiExpression originalExpression = assertHint.getOriginalExpression(); if (lhsType != null && TypeConversionUtil.isFloatOrDoubleType(lhsType.getDeepComponentType()) || rhsType != null && TypeConversionUtil.isFloatOrDoubleType(rhsType.getDeepComponentType()) || isPrimitiveAndBoxedFloat(lhsType, rhsType) || isPrimitiveAndBoxedFloat(rhsType, lhsType)) { final String noDelta = compoundMethodCall(methodName, assertHint, buf.toString()); final PsiElementFactory factory = JavaPsiFacade.getElementFactory(originalExpression.getProject()); final PsiExpression expression = methodName.equals("assertNotEquals") ? null : factory.createExpressionFromText(noDelta, originalExpression); final PsiMethod method = expression instanceof PsiMethodCallExpression ? ((PsiMethodCallExpression)expression).resolveMethod() : null; if (method == null || method.isDeprecated()) { buf.append(",0.0"); } } final String newExpression = compoundMethodCall(methodName, assertHint, buf.toString()); PsiReplacementUtil.replaceExpressionAndShorten(originalExpression, newExpression); } private boolean isPrimitiveAndBoxedWithOverloads(PsiType lhsType, PsiType rhsType) { if (lhsType instanceof PsiPrimitiveType && !PsiType.FLOAT.equals(lhsType) && !PsiType.DOUBLE.equals(lhsType)) { return rhsType instanceof PsiClassType; } return false; } private boolean isPrimitiveAndBoxedFloat(PsiType lhsType, PsiType rhsType) { return lhsType instanceof PsiPrimitiveType && rhsType instanceof PsiClassType && (PsiType.DOUBLE.equals(rhsType) && PsiType.FLOAT.equals(rhsType)); } private void replaceWithNegatedBooleanAssertion(AssertHint assertHint) { final PsiPrefixExpression expression = (PsiPrefixExpression)assertHint.getFirstArgument(); final PsiExpression operand = PsiUtil.skipParenthesizedExprDown(expression.getOperand()); if (operand == null) { return; } final String newMethodName = assertHint.isAssertTrue() ? "assertFalse" : "assertTrue"; final String newExpression = compoundMethodCall(newMethodName, assertHint, operand.getText()); PsiReplacementUtil.replaceExpressionAndShorten(assertHint.getOriginalExpression(), newExpression); } private void replaceAssertWithAssertNull(AssertHint assertHint) { final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)assertHint.getFirstArgument(); final PsiExpression lhs = binaryExpression.getLOperand(); PsiExpression rhs = binaryExpression.getROperand(); if (rhs == null) { return; } final IElementType tokenType = binaryExpression.getOperationTokenType(); if (!(lhs instanceof PsiLiteralExpression) && rhs instanceof PsiLiteralExpression) { rhs = lhs; } @NonNls final String methodName = assertHint.getMethod().getName(); @NonNls final String memberName; if ("assertFalse".equals(methodName) ^ tokenType.equals(JavaTokenType.NE)) { memberName = "assertNotNull"; } else { memberName = "assertNull"; } final String newExpression = compoundMethodCall(memberName, assertHint, rhs.getText()); PsiReplacementUtil.replaceExpressionAndShorten(assertHint.getOriginalExpression(), newExpression); } private String compoundMethodCall(String methodName, AssertHint assertHint, String args) { final PsiExpression message = assertHint.getMessage(); final StringBuilder newExpression = new StringBuilder(); addStaticImportOrQualifier(methodName, assertHint, newExpression); newExpression.append(methodName).append('('); final int index = assertHint.getArgIndex(); if (message != null && index != 0) { newExpression.append(message.getText()).append(','); } newExpression.append(args); if (message != null && index == 0) { newExpression.append(',').append(message.getText()); } newExpression.append(')'); return newExpression.toString(); } private void replaceWithAssertSame(AssertHint assertHint) { final PsiBinaryExpression firstArgument = (PsiBinaryExpression)assertHint.getFirstArgument(); PsiExpression lhs = firstArgument.getLOperand(); PsiExpression rhs = firstArgument.getROperand(); final IElementType tokenType = firstArgument.getOperationTokenType(); if (!(lhs instanceof PsiLiteralExpression) && rhs instanceof PsiLiteralExpression) { final PsiExpression temp = lhs; lhs = rhs; rhs = temp; } if (rhs == null) { return; } @NonNls final String methodName = assertHint.getMethod().getName(); @NonNls final String memberName; if ("assertFalse".equals(methodName) ^ tokenType.equals(JavaTokenType.NE)) { memberName = "assertNotSame"; } else { memberName = "assertSame"; } final String newExpression = compoundMethodCall(memberName, assertHint, lhs.getText() + "," + rhs.getText()); PsiReplacementUtil.replaceExpressionAndShorten(assertHint.getOriginalExpression(), newExpression); } private void replaceAssertEqualsWithAssertLiteral(AssertHint assertHint) { final PsiExpression firstTestArgument = assertHint.getFirstArgument(); final PsiExpression secondTestArgument = assertHint.getSecondArgument(); final String literalValue; final String compareValue; if (isSimpleLiteral(firstTestArgument, secondTestArgument)) { literalValue = firstTestArgument.getText(); compareValue = secondTestArgument.getText(); } else { literalValue = secondTestArgument.getText(); compareValue = firstTestArgument.getText(); } final String uppercaseLiteralValue = Character.toUpperCase(literalValue.charAt(0)) + literalValue.substring(1); @NonNls final String methodName = "assert" + uppercaseLiteralValue; final String newExpression = compoundMethodCall(methodName, assertHint, compareValue); PsiReplacementUtil.replaceExpressionAndShorten(assertHint.getOriginalExpression(), newExpression); } } private class SimplifiableJUnitAssertionVisitor extends BaseInspectionVisitor { @Override public void visitMethodCallExpression(@NotNull PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final AssertHint assertHint = AssertHint.createAssertEqualsHint(expression, checkTestNG()); if (assertHint != null && isAssertEqualsThatCouldBeAssertLiteral(assertHint)) { registerMethodCallError(expression, getReplacementMethodName(assertHint)); } else { final AssertHint assertTrueFalseHint = AssertHint.createAssertTrueFalseHint(expression, checkTestNG()); if (assertTrueFalseHint == null) { return; } final boolean assertTrue = assertTrueFalseHint.isAssertTrue(); final PsiExpression firstArgument = assertTrueFalseHint.getFirstArgument(); if (ComparisonUtils.isNullComparison(firstArgument)) { registerMethodCallError(expression, assertTrue == isEqEqExpression(firstArgument) ? "assertNull()" : "assertNotNull()"); } else if (isIdentityComparison(firstArgument)) { registerMethodCallError(expression, assertTrue == isEqEqExpression(firstArgument) ? "assertSame()" : "assertNotSame()"); } else { if (isEqualityComparison(firstArgument)) { if (assertTrue) { registerMethodCallError(expression, "assertEquals()"); } else if (firstArgument instanceof PsiMethodCallExpression || hasPrimitiveOverload(assertTrueFalseHint)) { registerMethodCallError(expression, "assertNotEquals()"); } } else if (isAssertThatCouldBeFail(firstArgument, !assertTrue)) { registerMethodCallError(expression, "fail()"); } else if (assertTrue && !checkTestNG() && isArrayEqualityComparison(firstArgument)) { registerMethodCallError(expression, "assertArrayEquals()"); } else if (BoolUtils.isNegation(firstArgument)) { registerMethodCallError(expression, assertTrue ? "assertFalse()" : "assertTrue()"); } } } } private boolean hasPrimitiveOverload(AssertHint assertHint) { final PsiClass containingClass = assertHint.getMethod().getContainingClass(); if (containingClass == null) { return false; } final PsiMethod primitiveOverload = CachedValuesManager.getCachedValue(containingClass, () -> { final PsiMethod patternMethod = JavaPsiFacade.getElementFactory(containingClass.getProject()) .createMethodFromText("public static void assertNotEquals(long a, long b){}", containingClass); return new CachedValueProvider.Result<>(containingClass.findMethodBySignature(patternMethod, true), PsiModificationTracker.JAVA_STRUCTURE_MODIFICATION_COUNT); }); return primitiveOverload != null; } @NonNls private String getReplacementMethodName(AssertHint assertHint) { final PsiExpression firstArgument = assertHint.getFirstArgument(); final PsiExpression secondArgument = assertHint.getSecondArgument(); final PsiLiteralExpression literalExpression; if (firstArgument instanceof PsiLiteralExpression) { literalExpression = (PsiLiteralExpression)firstArgument; } else if (secondArgument instanceof PsiLiteralExpression) { literalExpression = (PsiLiteralExpression)secondArgument; } else { return ""; } final Object value = literalExpression.getValue(); if (value == Boolean.TRUE) { return "assertTrue()"; } else if (value == Boolean.FALSE) { return "assertFalse()"; } else if (value == null) { return "assertNull()"; } return ""; } private boolean isEqEqExpression(PsiExpression argument) { if (!(argument instanceof PsiBinaryExpression)) { return false; } final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)argument; final IElementType tokenType = binaryExpression.getOperationTokenType(); return JavaTokenType.EQEQ.equals(tokenType); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.transform.sc; import groovy.lang.Reference; import groovy.transform.CompileStatic; import groovy.transform.TypeChecked; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.*; import org.codehaus.groovy.ast.stmt.EmptyStatement; import org.codehaus.groovy.ast.stmt.ExpressionStatement; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.ast.stmt.Statement; import org.codehaus.groovy.ast.tools.GeneralUtils; import org.codehaus.groovy.classgen.asm.*; import org.codehaus.groovy.classgen.asm.sc.StaticCompilationMopWriter; import org.codehaus.groovy.classgen.asm.sc.StaticTypesTypeChooser; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.syntax.Token; import org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport; import org.codehaus.groovy.transform.stc.StaticTypeCheckingVisitor; import org.codehaus.groovy.transform.stc.StaticTypesMarker; import org.objectweb.asm.Opcodes; import java.util.*; import static org.codehaus.groovy.ast.tools.GenericsUtils.*; import static org.codehaus.groovy.transform.sc.StaticCompilationMetadataKeys.*; import static org.codehaus.groovy.transform.stc.StaticTypesMarker.DIRECT_METHOD_CALL_TARGET; import static org.objectweb.asm.Opcodes.ACC_PUBLIC; /** * This visitor is responsible for amending the AST with static compilation metadata or transform the AST so that * a class or a method can be statically compiled. It may also throw errors specific to static compilation which * are not considered as an error at the type check pass. For example, usage of spread operator is not allowed * in statically compiled portions of code, while it may be statically checked. * * Static compilation relies on static type checking, which explains why this visitor extends the type checker * visitor. * * @author Cedric Champeau */ public class StaticCompilationVisitor extends StaticTypeCheckingVisitor { private static final ClassNode TYPECHECKED_CLASSNODE = ClassHelper.make(TypeChecked.class); private static final ClassNode COMPILESTATIC_CLASSNODE = ClassHelper.make(CompileStatic.class); private static final ClassNode[] TYPECHECKED_ANNOTATIONS = {TYPECHECKED_CLASSNODE, COMPILESTATIC_CLASSNODE}; public static final ClassNode ARRAYLIST_CLASSNODE = ClassHelper.make(ArrayList.class); public static final MethodNode ARRAYLIST_CONSTRUCTOR; public static final MethodNode ARRAYLIST_ADD_METHOD = ARRAYLIST_CLASSNODE.getMethod("add", new Parameter[]{new Parameter(ClassHelper.OBJECT_TYPE, "o")}); static { ARRAYLIST_CONSTRUCTOR = new ConstructorNode(ACC_PUBLIC, Parameter.EMPTY_ARRAY, ClassNode.EMPTY_ARRAY, EmptyStatement.INSTANCE); ARRAYLIST_CONSTRUCTOR.setDeclaringClass(StaticCompilationVisitor.ARRAYLIST_CLASSNODE); } private final TypeChooser typeChooser = new StaticTypesTypeChooser(); private ClassNode classNode; public StaticCompilationVisitor(final SourceUnit unit, final ClassNode node) { super(unit, node); } @Override protected ClassNode[] getTypeCheckingAnnotations() { return TYPECHECKED_ANNOTATIONS; } public static boolean isStaticallyCompiled(AnnotatedNode node) { if (node.getNodeMetaData(STATIC_COMPILE_NODE)!=null) return (Boolean)node.getNodeMetaData(STATIC_COMPILE_NODE); if (node instanceof MethodNode) { return isStaticallyCompiled(node.getDeclaringClass()); } if (node instanceof InnerClassNode) { return isStaticallyCompiled(((InnerClassNode)node).getOuterClass()); } return false; } private void addPrivateFieldAndMethodAccessors(ClassNode node) { addPrivateBridgeMethods(node); addPrivateFieldsAccessors(node); Iterator<InnerClassNode> it = node.getInnerClasses(); while (it.hasNext()) { addPrivateFieldAndMethodAccessors(it.next()); } } @Override public void visitClass(final ClassNode node) { boolean skip = shouldSkipClassNode(node); if (!skip && !anyMethodSkip(node)) { node.putNodeMetaData(MopWriter.Factory.class, StaticCompilationMopWriter.FACTORY); } ClassNode oldCN = classNode; classNode = node; Iterator<InnerClassNode> innerClasses = classNode.getInnerClasses(); while (innerClasses.hasNext()) { InnerClassNode innerClassNode = innerClasses.next(); boolean innerStaticCompile = !(skip || isSkippedInnerClass(innerClassNode)); innerClassNode.putNodeMetaData(STATIC_COMPILE_NODE, innerStaticCompile); innerClassNode.putNodeMetaData(WriterControllerFactory.class, node.getNodeMetaData(WriterControllerFactory.class)); if (innerStaticCompile && !anyMethodSkip(innerClassNode)) { innerClassNode.putNodeMetaData(MopWriter.Factory.class, StaticCompilationMopWriter.FACTORY); } } super.visitClass(node); addPrivateFieldAndMethodAccessors(node); classNode = oldCN; } private boolean anyMethodSkip(final ClassNode node) { for (MethodNode methodNode : node.getMethods()) { if (isSkipMode(methodNode)) return true; } return false; } /** * If we are in a constructor, that is static compiled, but in a class, that * is not, it may happen that init code from object initializers, fields * or properties is added into the constructor code. The backend assumes * a purely static contructor, so it may fail if it encounters dynamic * code here. Thus we make this kind of code fail */ private void checkForConstructorWithCSButClassWithout(MethodNode node) { if (!(node instanceof ConstructorNode)) return; Object meta = node.getNodeMetaData(STATIC_COMPILE_NODE); if (!Boolean.TRUE.equals(meta)) return; ClassNode clz = typeCheckingContext.getEnclosingClassNode(); meta = clz.getNodeMetaData(STATIC_COMPILE_NODE); if (Boolean.TRUE.equals(meta)) return; if ( clz.getObjectInitializerStatements().isEmpty() && clz.getFields().isEmpty() && clz.getProperties().isEmpty()) { return; } addStaticTypeError("Cannot statically compile constructor implicitly including non static elements from object initializers, properties or fields.",node); } @Override public void visitMethod(final MethodNode node) { if (isSkipMode(node)) { node.putNodeMetaData(STATIC_COMPILE_NODE, false); } super.visitMethod(node); checkForConstructorWithCSButClassWithout(node); } /** * Adds special accessors and mutators for private fields so that inner classes can get/set them */ @SuppressWarnings("unchecked") private void addPrivateFieldsAccessors(ClassNode node) { Set<ASTNode> accessedFields = (Set<ASTNode>) node.getNodeMetaData(StaticTypesMarker.PV_FIELDS_ACCESS); Set<ASTNode> mutatedFields = (Set<ASTNode>) node.getNodeMetaData(StaticTypesMarker.PV_FIELDS_MUTATION); if (accessedFields == null && mutatedFields == null) return; Map<String, MethodNode> privateFieldAccessors = (Map<String, MethodNode>) node.getNodeMetaData(PRIVATE_FIELDS_ACCESSORS); Map<String, MethodNode> privateFieldMutators = (Map<String, MethodNode>) node.getNodeMetaData(PRIVATE_FIELDS_MUTATORS); if (privateFieldAccessors != null || privateFieldMutators != null) { // already added return; } int acc = -1; privateFieldAccessors = accessedFields != null ? new HashMap<String, MethodNode>() : null; privateFieldMutators = mutatedFields != null ? new HashMap<String, MethodNode>() : null; final int access = Opcodes.ACC_STATIC | Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; for (FieldNode fieldNode : node.getFields()) { boolean generateAccessor = accessedFields != null && accessedFields.contains(fieldNode); boolean generateMutator = mutatedFields != null && mutatedFields.contains(fieldNode); if (generateAccessor) { acc++; Parameter param = new Parameter(node.getPlainNodeReference(), "$that"); Expression receiver = fieldNode.isStatic() ? new ClassExpression(node) : new VariableExpression(param); Statement stmt = new ExpressionStatement(new PropertyExpression( receiver, fieldNode.getName() )); MethodNode accessor = node.addMethod("pfaccess$" + acc, access, fieldNode.getOriginType(), new Parameter[]{param}, ClassNode.EMPTY_ARRAY, stmt); privateFieldAccessors.put(fieldNode.getName(), accessor); } if (generateMutator) { //increment acc if it hasn't been incremented in the current iteration if (!generateAccessor) acc++; Parameter param = new Parameter(node.getPlainNodeReference(), "$that"); Expression receiver = fieldNode.isStatic() ? new ClassExpression(node) : new VariableExpression(param); Parameter value = new Parameter(fieldNode.getOriginType(), "$value"); Statement stmt = GeneralUtils.assignS( new PropertyExpression(receiver, fieldNode.getName()), new VariableExpression(value) ); MethodNode mutator = node.addMethod("pfaccess$0" + acc, access, fieldNode.getOriginType(), new Parameter[]{param, value}, ClassNode.EMPTY_ARRAY, stmt); privateFieldMutators.put(fieldNode.getName(), mutator); } } if (privateFieldAccessors != null) node.setNodeMetaData(PRIVATE_FIELDS_ACCESSORS, privateFieldAccessors); if (privateFieldMutators != null) node.setNodeMetaData(PRIVATE_FIELDS_MUTATORS, privateFieldMutators); } /** * This method is used to add "bridge" methods for private methods of an inner/outer * class, so that the outer class is capable of calling them. It does basically * the same job as access$000 like methods in Java. * * @param node an inner/outer class node for which to generate bridge methods */ @SuppressWarnings("unchecked") private void addPrivateBridgeMethods(final ClassNode node) { Set<ASTNode> accessedMethods = (Set<ASTNode>) node.getNodeMetaData(StaticTypesMarker.PV_METHODS_ACCESS); if (accessedMethods==null) return; List<MethodNode> methods = new ArrayList<MethodNode>(node.getAllDeclaredMethods()); Map<MethodNode, MethodNode> privateBridgeMethods = (Map<MethodNode, MethodNode>) node.getNodeMetaData(PRIVATE_BRIDGE_METHODS); if (privateBridgeMethods!=null) { // private bridge methods already added return; } privateBridgeMethods = new HashMap<MethodNode, MethodNode>(); int i=-1; final int access = Opcodes.ACC_STATIC | Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; for (MethodNode method : methods) { if (accessedMethods.contains(method)) { List<String> methodSpecificGenerics = methodSpecificGenerics(method); i++; ClassNode declaringClass = method.getDeclaringClass(); Map<String,ClassNode> genericsSpec = createGenericsSpec(node); genericsSpec = addMethodGenerics(method, genericsSpec); extractSuperClassGenerics(node, declaringClass, genericsSpec); Parameter[] methodParameters = method.getParameters(); Parameter[] newParams = new Parameter[methodParameters.length+1]; for (int j = 1; j < newParams.length; j++) { Parameter orig = methodParameters[j-1]; newParams[j] = new Parameter( correctToGenericsSpecRecurse(genericsSpec, orig.getOriginType(), methodSpecificGenerics), orig.getName() ); } newParams[0] = new Parameter(node.getPlainNodeReference(), "$that"); Expression arguments; if (method.getParameters()==null || method.getParameters().length==0) { arguments = ArgumentListExpression.EMPTY_ARGUMENTS; } else { List<Expression> args = new LinkedList<Expression>(); for (Parameter parameter : methodParameters) { args.add(new VariableExpression(parameter)); } arguments = new ArgumentListExpression(args); } Expression receiver = method.isStatic()?new ClassExpression(node):new VariableExpression(newParams[0]); MethodCallExpression mce = new MethodCallExpression(receiver, method.getName(), arguments); mce.setMethodTarget(method); ExpressionStatement returnStatement = new ExpressionStatement(mce); MethodNode bridge = node.addMethod( "access$"+i, access, correctToGenericsSpecRecurse(genericsSpec, method.getReturnType(), methodSpecificGenerics), newParams, method.getExceptions(), returnStatement); GenericsType[] origGenericsTypes = method.getGenericsTypes(); if (origGenericsTypes !=null) { bridge.setGenericsTypes(applyGenericsContextToPlaceHolders(genericsSpec,origGenericsTypes)); } privateBridgeMethods.put(method, bridge); bridge.addAnnotation(new AnnotationNode(COMPILESTATIC_CLASSNODE)); } } if (!privateBridgeMethods.isEmpty()) { node.setNodeMetaData(PRIVATE_BRIDGE_METHODS, privateBridgeMethods); } } private static List<String> methodSpecificGenerics(final MethodNode method) { List<String> genericTypeTokens = new ArrayList<String>(); GenericsType[] candidateGenericsTypes = method.getGenericsTypes(); if (candidateGenericsTypes != null) { for (GenericsType gt : candidateGenericsTypes) { genericTypeTokens.add(gt.getName()); } } return genericTypeTokens; } private void memorizeInitialExpressions(final MethodNode node) { // add node metadata for default parameters because they are erased by the Verifier if (node.getParameters()!=null) { for (Parameter parameter : node.getParameters()) { parameter.putNodeMetaData(StaticTypesMarker.INITIAL_EXPRESSION, parameter.getInitialExpression()); } } } @Override public void visitSpreadExpression(final SpreadExpression expression) { } @Override public void visitMethodCallExpression(final MethodCallExpression call) { super.visitMethodCallExpression(call); MethodNode target = (MethodNode) call.getNodeMetaData(DIRECT_METHOD_CALL_TARGET); if (target!=null) { call.setMethodTarget(target); memorizeInitialExpressions(target); } if (call.getMethodTarget()==null && call.getLineNumber()>0) { addError("Target method for method call expression hasn't been set", call); } } @Override public void visitConstructorCallExpression(final ConstructorCallExpression call) { super.visitConstructorCallExpression(call); MethodNode target = (MethodNode) call.getNodeMetaData(DIRECT_METHOD_CALL_TARGET); if (target==null && call.getLineNumber()>0) { addError("Target constructor for constructor call expression hasn't been set", call); } else { if (target==null) { // try to find a target ArgumentListExpression argumentListExpression = InvocationWriter.makeArgumentList(call.getArguments()); List<Expression> expressions = argumentListExpression.getExpressions(); ClassNode[] args = new ClassNode[expressions.size()]; for (int i = 0; i < args.length; i++) { args[i] = typeChooser.resolveType(expressions.get(i), classNode); } MethodNode constructor = findMethodOrFail(call, call.isSuperCall() ? classNode.getSuperClass() : classNode, "<init>", args); call.putNodeMetaData(DIRECT_METHOD_CALL_TARGET, constructor); target = constructor; } } if (target!=null) { memorizeInitialExpressions(target); } } @Override public void visitForLoop(final ForStatement forLoop) { super.visitForLoop(forLoop); Expression collectionExpression = forLoop.getCollectionExpression(); if (!(collectionExpression instanceof ClosureListExpression)) { final ClassNode collectionType = getType(forLoop.getCollectionExpression()); ClassNode componentType = inferLoopElementType(collectionType); forLoop.getVariable().setType(componentType); forLoop.getVariable().setOriginType(componentType); } } @Override protected MethodNode findMethodOrFail(final Expression expr, final ClassNode receiver, final String name, final ClassNode... args) { MethodNode methodNode = super.findMethodOrFail(expr, receiver, name, args); if (expr instanceof BinaryExpression && methodNode!=null) { expr.putNodeMetaData(BINARY_EXP_TARGET, new Object[] {methodNode, name}); } return methodNode; } @Override protected boolean existsProperty(final PropertyExpression pexp, final boolean checkForReadOnly, final ClassCodeVisitorSupport visitor) { Expression objectExpression = pexp.getObjectExpression(); ClassNode objectExpressionType = getType(objectExpression); final Reference<ClassNode> rType = new Reference<ClassNode>(objectExpressionType); ClassCodeVisitorSupport receiverMemoizer = new ClassCodeVisitorSupport() { @Override protected SourceUnit getSourceUnit() { return null; } public void visitField(final FieldNode node) { if (visitor!=null) visitor.visitField(node); ClassNode declaringClass = node.getDeclaringClass(); if (declaringClass!=null) { if (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(declaringClass, ClassHelper.LIST_TYPE)) { boolean spread = declaringClass.getDeclaredField(node.getName()) != node; pexp.setSpreadSafe(spread); } rType.set(declaringClass); } } public void visitMethod(final MethodNode node) { if (visitor!=null) visitor.visitMethod(node); ClassNode declaringClass = node.getDeclaringClass(); if (declaringClass!=null){ if (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(declaringClass, ClassHelper.LIST_TYPE)) { List<MethodNode> properties = declaringClass.getDeclaredMethods(node.getName()); boolean spread = true; for (MethodNode mn : properties) { if (node==mn) { spread = false; break; } } // it's no real property but a property of the component pexp.setSpreadSafe(spread); } rType.set(declaringClass); } } @Override public void visitProperty(final PropertyNode node) { if (visitor!=null) visitor.visitProperty(node); ClassNode declaringClass = node.getDeclaringClass(); if (declaringClass!=null) { if (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(declaringClass, ClassHelper.LIST_TYPE)) { List<PropertyNode> properties = declaringClass.getProperties(); boolean spread = true; for (PropertyNode propertyNode : properties) { if (propertyNode==node) { spread = false; break; } } // it's no real property but a property of the component pexp.setSpreadSafe(spread); } rType.set(declaringClass); } } }; boolean exists = super.existsProperty(pexp, checkForReadOnly, receiverMemoizer); if (exists) { if (objectExpression.getNodeMetaData(StaticCompilationMetadataKeys.PROPERTY_OWNER)==null) { objectExpression.putNodeMetaData(StaticCompilationMetadataKeys.PROPERTY_OWNER, rType.get()); } if (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(objectExpressionType, ClassHelper.LIST_TYPE)) { objectExpression.putNodeMetaData(COMPONENT_TYPE, inferComponentType(objectExpressionType, ClassHelper.int_TYPE)); } } return exists; } @Override public void visitPropertyExpression(final PropertyExpression pexp) { super.visitPropertyExpression(pexp); Object dynamic = pexp.getNodeMetaData(StaticTypesMarker.DYNAMIC_RESOLUTION); if (dynamic !=null) { pexp.getObjectExpression().putNodeMetaData(StaticCompilationMetadataKeys.RECEIVER_OF_DYNAMIC_PROPERTY, dynamic); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.youtube.model; /** * Model definition for SearchListResponse. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the YouTube Data API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class SearchListResponse extends com.google.api.client.json.GenericJson { /** * Etag of this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String etag; /** * Serialized EventId of the request which produced this response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String eventId; /** * A list of results that match the search criteria. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<SearchResult> items; /** * Identifies what kind of resource this is. Value: the fixed string "youtube#searchListResponse". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private PageInfo pageInfo; /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String prevPageToken; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String regionCode; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private TokenPagination tokenPagination; /** * The visitorId identifies the visitor. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visitorId; /** * Etag of this resource. * @return value or {@code null} for none */ public java.lang.String getEtag() { return etag; } /** * Etag of this resource. * @param etag etag or {@code null} for none */ public SearchListResponse setEtag(java.lang.String etag) { this.etag = etag; return this; } /** * Serialized EventId of the request which produced this response. * @return value or {@code null} for none */ public java.lang.String getEventId() { return eventId; } /** * Serialized EventId of the request which produced this response. * @param eventId eventId or {@code null} for none */ public SearchListResponse setEventId(java.lang.String eventId) { this.eventId = eventId; return this; } /** * A list of results that match the search criteria. * @return value or {@code null} for none */ public java.util.List<SearchResult> getItems() { return items; } /** * A list of results that match the search criteria. * @param items items or {@code null} for none */ public SearchListResponse setItems(java.util.List<SearchResult> items) { this.items = items; return this; } /** * Identifies what kind of resource this is. Value: the fixed string "youtube#searchListResponse". * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies what kind of resource this is. Value: the fixed string "youtube#searchListResponse". * @param kind kind or {@code null} for none */ public SearchListResponse setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * @param nextPageToken nextPageToken or {@code null} for none */ public SearchListResponse setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * @return value or {@code null} for none */ public PageInfo getPageInfo() { return pageInfo; } /** * @param pageInfo pageInfo or {@code null} for none */ public SearchListResponse setPageInfo(PageInfo pageInfo) { this.pageInfo = pageInfo; return this; } /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * @return value or {@code null} for none */ public java.lang.String getPrevPageToken() { return prevPageToken; } /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * @param prevPageToken prevPageToken or {@code null} for none */ public SearchListResponse setPrevPageToken(java.lang.String prevPageToken) { this.prevPageToken = prevPageToken; return this; } /** * @return value or {@code null} for none */ public java.lang.String getRegionCode() { return regionCode; } /** * @param regionCode regionCode or {@code null} for none */ public SearchListResponse setRegionCode(java.lang.String regionCode) { this.regionCode = regionCode; return this; } /** * @return value or {@code null} for none */ public TokenPagination getTokenPagination() { return tokenPagination; } /** * @param tokenPagination tokenPagination or {@code null} for none */ public SearchListResponse setTokenPagination(TokenPagination tokenPagination) { this.tokenPagination = tokenPagination; return this; } /** * The visitorId identifies the visitor. * @return value or {@code null} for none */ public java.lang.String getVisitorId() { return visitorId; } /** * The visitorId identifies the visitor. * @param visitorId visitorId or {@code null} for none */ public SearchListResponse setVisitorId(java.lang.String visitorId) { this.visitorId = visitorId; return this; } @Override public SearchListResponse set(String fieldName, Object value) { return (SearchListResponse) super.set(fieldName, value); } @Override public SearchListResponse clone() { return (SearchListResponse) super.clone(); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2017 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.parosproxy.paros.core.scanner; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.network.HttpHeader; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.core.scanner.InputVector; /** * Variant used for "multipart/form-data" POST request handling. Takes all parameters passed inside * the form-data structure and sets them for injection. */ public class VariantMultipartFormParameters implements Variant { private static final Logger LOGGER = LogManager.getLogger(VariantMultipartFormParameters.class); private static final Pattern FIELD_NAME_PATTERN = Pattern.compile( "\\s*content-disposition\\s*:.*\\s+name\\s*\\=?\\s*\\\"?(?<name>.[^;\\\"\\n]*)\\\"?\\;?.*", Pattern.CASE_INSENSITIVE); private static final Pattern FIELD_VALUE_PATTERN = Pattern.compile("[\\r\\n]{2}(?<value>.*)"); private static final Pattern FILENAME_PART_PATTERN = Pattern.compile( "\\s*content-disposition\\s*:.*filename\\s*\\=?\\s*\\\"?(?<filename>.[^;\"\\n]*)\\\"?\\;?.*", Pattern.CASE_INSENSITIVE); // http://fiddle.re/etxbnd (Click Java, set case insensitive, and hit "test") private static final Pattern CONTENTTYPE_PART_PATTERN = Pattern.compile( "\\s*content-disposition.*content-type\\s*:\\s*\\s*\\\"?(?<contenttype>.[^;\"\\r\\n]*)\\\"?\\;?.*", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); // http://www.regexplanet.com/share/index.html?share=yyyyyythear (Click Java, set case // insensitive & DOTALL, and hit "test") private List<NameValuePair> params = Collections.emptyList(); private List<MultipartFormParameter> multiPartParams = new ArrayList<>(); @Override public void setMessage(HttpMessage msg) { if (msg == null) { throw new IllegalArgumentException("Parameter message must not be null."); } String contentType = msg.getRequestHeader().getHeader(HttpHeader.CONTENT_TYPE); if (contentType == null || !contentType.toLowerCase().startsWith("multipart/form-data")) { return; } ArrayList<NameValuePair> extractedParameters = new ArrayList<>(); int position = 0; int offset = 0; String boundary = getBoundary(contentType) + HttpHeader.CRLF; for (String part : msg.getRequestBody().toString().split(boundary)) { if (!StringUtils.isBlank(part)) { String partHeaderLine = part.substring(0, part.indexOf(HttpHeader.CRLF + HttpHeader.CRLF)); boolean isFileParam = partHeaderLine.contains("filename="); part = boundary + part; Matcher nameMatcher = FIELD_NAME_PATTERN.matcher(partHeaderLine); Matcher valueMatcher = FIELD_VALUE_PATTERN.matcher(part); nameMatcher.find(); valueMatcher.find(); if (StringUtils.isBlank(valueMatcher.group("value"))) { valueMatcher.find(); // Need to skip one find for some reason... // https://regex101.com/r/4ig6Wk/1 // http://fiddle.re/23cudd (Click Java, hit "test") } String name = nameMatcher.group("name"); // Value doesn't include boundary, headerline, or double CRLF String value = part.replaceAll( Pattern.quote(boundary + partHeaderLine) + HttpHeader.CRLF + HttpHeader.CRLF, ""); value = value.replaceAll( HttpHeader.CRLF + "(" + Pattern.quote(getBoundary(contentType)) + "--" + HttpHeader.CRLF + ")?$", ""); // Strip final boundary if (isFileParam) { position += 2; extractedParameters.add( new NameValuePair( NameValuePair.TYPE_MULTIPART_DATA_FILE_PARAM, name, value, position)); } else { extractedParameters.add( new NameValuePair( NameValuePair.TYPE_MULTIPART_DATA_PARAM, name, value, position)); } int start = offset + part.indexOf(HttpHeader.CRLF + HttpHeader.CRLF) + 4; // 4 for two CRLFs int end = start + value.length(); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Name: " + name + " O: " + offset + " S: " + start + " E: " + end + " Pos: " + position); } multiPartParams.add( new MultipartFormParameter( name, valueMatcher.group("value"), start, end, position, MultipartFormParameter.Type.GENERAL)); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Name: " + name + " value: " + valueMatcher.group("value")); } if (isFileParam) { position -= 2; // Extract the filename Matcher fnValueMatcher = FILENAME_PART_PATTERN.matcher(part); fnValueMatcher.find(); String fnValue = fnValueMatcher.group("filename"); extractedParameters.add( extractedParameters.size() - 1, new NameValuePair( NameValuePair.TYPE_MULTIPART_DATA_FILE_NAME, name, fnValue, position)); int fnStart = offset + part.indexOf(fnValue); int fnEnd = fnStart + fnValue.length(); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Name: " + name + " O: " + offset + " S: " + fnStart + " E: " + fnEnd + " Pos: " + position); } multiPartParams.add( multiPartParams.size() - 1, new MultipartFormParameter( name, fnValue, fnStart, fnEnd, position, MultipartFormParameter.Type.FILE_NAME)); // Extract the content-type Matcher ctValueMatcher = CONTENTTYPE_PART_PATTERN.matcher(part); ctValueMatcher.find(); String ctValue = ctValueMatcher.group("contenttype"); extractedParameters.add( extractedParameters.size() - 1, new NameValuePair( NameValuePair.TYPE_MULTIPART_DATA_FILE_CONTENTTYPE, name, ctValue, ++position)); int ctStart = offset + part.indexOf(ctValue); int ctEnd = ctStart + ctValue.length(); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "Name: " + name + " O: " + offset + " S: " + ctStart + " E: " + ctEnd + " Pos: " + position); } multiPartParams.add( multiPartParams.size() - 1, new MultipartFormParameter( name, ctValue, ctStart, ctEnd, position, MultipartFormParameter.Type.FILE_CONTENT_TYPE)); } } position++; offset = offset + part.length(); } params = Collections.unmodifiableList(extractedParameters); } @Override public List<NameValuePair> getParamList() { return params; } @Override public String setParameter( HttpMessage msg, NameValuePair originalPair, String name, String value) { return setParameter( msg, Collections.singletonList(originalPair.getPosition()), Collections.singletonList(value)); } @Override public String setEscapedParameter( HttpMessage msg, NameValuePair originalPair, String name, String value) { return setParameter( msg, Collections.singletonList(originalPair.getPosition()), Collections.singletonList(value)); } @Override public void setParameters(HttpMessage msg, List<InputVector> inputVectors) { this.setParameter( msg, inputVectors.stream().map(InputVector::getPosition).collect(Collectors.toList()), inputVectors.stream().map(InputVector::getValue).collect(Collectors.toList())); } private String setParameter( HttpMessage msg, List<Integer> nameValuePairPositions, List<String> values) { StringBuilder newBodyBuilder = new StringBuilder(msg.getRequestBody().toString()); int offset = 0; for (int index = 0; index < nameValuePairPositions.size(); index++) { int originalPosition = nameValuePairPositions.get(index); String value = values.get(index); int idx = originalPosition - 1; MultipartFormParameter mpPart = this.multiPartParams.get(idx); if (LOGGER.isDebugEnabled()) { LOGGER.debug( "i: " + idx + " pos: " + originalPosition + " S: " + mpPart.getStart() + " E: " + mpPart.getEnd() + " O: " + offset); } newBodyBuilder.replace(mpPart.getStart() + offset, mpPart.getEnd() + offset, value); offset = offset + value.length() - mpPart.getEnd() + mpPart.getStart(); } String newBody = newBodyBuilder.toString(); msg.getRequestBody().setBody(newBody); return newBody; } private String getBoundary(String contentTypeHeader) { int index = contentTypeHeader.lastIndexOf("boundary="); if (index == -1) { return null; } String boundary = contentTypeHeader.substring(index + 9); // "boundary=" is 9 if (boundary.charAt(0) == '"') { index = boundary.lastIndexOf('"'); boundary = boundary.substring(1, index); } // The real token is always preceded by an extra "--" boundary = "--" + boundary; return boundary; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.editor.impl; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.ex.MarkupIterator; import com.intellij.openapi.editor.ex.RangeMarkerEx; import com.intellij.openapi.util.Getter; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import com.intellij.util.SmartList; import com.intellij.util.WalkingState; import com.intellij.util.concurrency.AtomicFieldUpdater; import gnu.trove.TLongHashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.util.Comparator; import java.util.ConcurrentModificationException; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; abstract class IntervalTreeImpl<T> extends RedBlackTree<T> implements IntervalTree<T> { static final Logger LOG = Logger.getInstance("#com.intellij.openapi.editor.impl.RangeMarkerTree"); static final boolean DEBUG = LOG.isDebugEnabled() || ApplicationManager.getApplication() != null && ApplicationManager.getApplication().isUnitTestMode(); private int keySize; // number of all intervals, counting all duplicates, some of them maybe gced final ReadWriteLock l = new ReentrantReadWriteLock(); protected abstract int compareEqualStartIntervals(@NotNull IntervalNode<T> i1, @NotNull IntervalNode<T> i2); private final ReferenceQueue<T> myReferenceQueue = new ReferenceQueue<>(); private int deadReferenceCount; static class IntervalNode<E> extends RedBlackTree.Node<E> implements MutableInterval { private volatile int myStart; private volatile int myEnd; private static final byte ATTACHED_TO_TREE_FLAG = COLOR_MASK <<1; // true if the node is inserted to the tree final List<Getter<E>> intervals; int maxEnd; // max of all intervalEnd()s among all children. int delta; // delta of startOffset. getStartOffset() = myStartOffset + Sum of deltas up to root private volatile long cachedDeltaUpToRoot; // field (packed to long for atomicity) containing deltaUpToRoot, node modCount and allDeltasUpAreNull flag // fields are packed as following // private int modCount; // if it equals to the com.intellij.openapi.editor.impl.RedBlackTree.modCount then deltaUpToRoot can be used, otherwise it is expired // private int deltaUpToRoot; // sum of all deltas up to the root (including this node' delta). Has valid value only if modCount == IntervalTreeImpl.this.modCount // private boolean allDeltasUpAreNull; // true if all deltas up the tree (including this node) are 0. Has valid value only if modCount == IntervalTreeImpl.this.modCount @NotNull private final IntervalTreeImpl<E> myIntervalTree; IntervalNode(@NotNull IntervalTreeImpl<E> intervalTree, @NotNull E key, int start, int end) { // maxEnd == 0 so to not disrupt existing maxes myIntervalTree = intervalTree; myStart = start; myEnd = end; intervals = new SmartList<>(createGetter(key)); setValid(true); } @Override public IntervalNode<E> getLeft() { return (IntervalNode<E>)left; } @Override public IntervalNode<E> getRight() { return (IntervalNode<E>)right; } @Override public IntervalNode<E> getParent() { return (IntervalNode<E>)parent; } @Override public boolean processAliveKeys(@NotNull Processor<? super E> processor) { //noinspection ForLoopReplaceableByForEach for (int i = 0; i < intervals.size(); i++) { Getter<E> interval = intervals.get(i); E key = interval.get(); if (key != null && !processor.process(key)) return false; } return true; } @Override public boolean hasAliveKey(boolean purgeDead) { boolean hasAliveInterval = false; for (int i = intervals.size() - 1; i >= 0; i--) { Getter<E> interval = intervals.get(i); if (interval.get() != null) { hasAliveInterval = true; if (purgeDead) { continue; } else { break; } } if (purgeDead) { myIntervalTree.assertUnderWriteLock(); removeIntervalInternal(i); } } return hasAliveInterval; } // removes interval and the node, if node became empty // returns true if node was removed private boolean removeInterval(@NotNull E key) { myIntervalTree.checkBelongsToTheTree(key, true); myIntervalTree.assertUnderWriteLock(); for (int i = intervals.size() - 1; i >= 0; i--) { Getter<E> interval = intervals.get(i); E t = interval.get(); if (t == key) { removeIntervalInternal(i); if (intervals.isEmpty()) { myIntervalTree.removeNode(this); return true; } return false; } } assert false: "interval not found: "+key +"; "+ intervals; return false; } private boolean isAttachedToTree() { return isFlagSet(ATTACHED_TO_TREE_FLAG); } private void setAttachedToTree(boolean attached) { setFlag(ATTACHED_TO_TREE_FLAG, attached); } void removeIntervalInternal(int i) { intervals.remove(i); if (isAttachedToTree()) { // for detached node, do not update tree node count assert myIntervalTree.keySize > 0 : myIntervalTree.keySize; myIntervalTree.keySize--; } } void addInterval(@NotNull E interval) { myIntervalTree.assertUnderWriteLock(); intervals.add(createGetter(interval)); if (isAttachedToTree()) { // for detached node, do not update tree node count myIntervalTree.keySize++; myIntervalTree.setNode(interval, this); } } void addIntervalsFrom(@NotNull IntervalNode<E> otherNode) { for (Getter<E> key : otherNode.intervals) { E interval = key.get(); if (interval != null) addInterval(interval); } } protected Getter<E> createGetter(@NotNull E interval) { return new WeakReferencedGetter<>(interval, myIntervalTree.myReferenceQueue); } private static class WeakReferencedGetter<T> extends WeakReference<T> implements Getter<T> { private WeakReferencedGetter(@NotNull T referent, @NotNull ReferenceQueue<? super T> q) { super(referent, q); } @NonNls @Override public String toString() { return "Ref: " + get(); } } int computeDeltaUpToRoot() { restart: while (true) { // have to restart on failure to update cached offsets in case of concurrent modification if (!isValid()) return 0; int treeModCount = myIntervalTree.getModCount(); long packedOffsets = cachedDeltaUpToRoot; if (modCount(packedOffsets) == treeModCount) { return deltaUpToRoot(packedOffsets); } try { myIntervalTree.l.readLock().lock(); IntervalNode<E> node = this; IntervalNode<E> treeRoot = myIntervalTree.getRoot(); if (treeRoot == null) return delta; // someone modified the tree in the meantime int deltaUp = 0; boolean allDeltasAreNull = true; int height = 0; long path = 0; // path to this node from the root; 0 bit means we choose left subtree, 1 bit means we choose right subtree while (node != treeRoot) { long nodePackedOffsets = node.cachedDeltaUpToRoot; if (node.isValid() && modCount(nodePackedOffsets) == treeModCount) { deltaUp = deltaUpToRoot(nodePackedOffsets) - node.delta; allDeltasAreNull = allDeltasUpAreNull(nodePackedOffsets); break; } IntervalNode<E> parent = node.getParent(); if (parent == null) { return deltaUp; // can happen when remove node and explicitly set valid to true (e.g. in RangeMarkerTree) } path = (path << 1) | (parent.getLeft() == node ? 0 : 1); node = parent; height++; } // path to this node fits to long assert height < 63 : height; // cache deltas in every node from the root down this while (true) { if (node.isValid()) { int nodeDelta = node.delta; deltaUp += nodeDelta; allDeltasAreNull &= nodeDelta == 0; if (!node.tryToSetCachedValues(deltaUp, allDeltasAreNull, treeModCount)) { continue restart; } } if (node == this) break; node = (path & 1) == 0 ? node.getLeft() : node.getRight(); path >>= 1; if (node == null) return deltaUp; // can only happen in case of concurrently modification } assert deltaUp == 0 || !allDeltasAreNull; return deltaUp; } finally { myIntervalTree.l.readLock().unlock(); } } } int changeDelta(int change) { if (change != 0) { setCachedValues(0, false, 0); // deltaUpToRoot is not valid anymore return delta += change; } return delta; } void clearDelta() { if (delta != 0) { setCachedValues(0, false, 0); // deltaUpToRoot is not valid anymore delta = 0; } } @Override public int setIntervalStart(int start) { return myStart = start; } @Override public int setIntervalEnd(int end) { return myEnd = end; } static final byte VALID_FLAG = ATTACHED_TO_TREE_FLAG << 1; @Override public boolean isValid() { return isFlagSet(VALID_FLAG); } @Override public boolean setValid(boolean value) { setFlag(VALID_FLAG, value); return value; } @Override public int intervalStart() { return myStart; } @Override public int intervalEnd() { return myEnd; } @NotNull public IntervalTreeImpl<E> getTree() { return myIntervalTree; } /** * packing/unpacking cachedDeltaUpToRoot field parts * Bits layout: * XXXXXXXXNMMMMMMMM where * XXXXXXXX - 31bit int containing cached delta up to root * N - 1bit flag. if set then all deltas up to root are null * MMMMMMMM - 32bit int containing this node modification count */ private static final AtomicFieldUpdater<IntervalNode, Long> cachedDeltaUpdater = AtomicFieldUpdater.forLongFieldIn(IntervalNode.class); private void setCachedValues(int deltaUpToRoot, boolean allDeltaUpToRootAreNull, int modCount) { cachedDeltaUpToRoot = packValues(deltaUpToRoot, allDeltaUpToRootAreNull, modCount); } private static long packValues(long deltaUpToRoot, boolean allDeltaUpToRootAreNull, int modCount) { return deltaUpToRoot << 33 | (allDeltaUpToRootAreNull ? 0x100000000L : 0) | modCount; } private boolean tryToSetCachedValues(int deltaUpToRoot, boolean allDeltasUpAreNull, int treeModCount) { if (myIntervalTree.getModCount() != treeModCount) return false; long newValue = packValues(deltaUpToRoot, allDeltasUpAreNull, treeModCount); long oldValue = cachedDeltaUpToRoot; return cachedDeltaUpdater.compareAndSetLong(this, oldValue, newValue); } private static boolean allDeltasUpAreNull(long packedOffsets) { return ((packedOffsets >> 32) & 1) != 0; } private static int modCount(long packedOffsets) { return (int)packedOffsets; } private static int deltaUpToRoot(long packedOffsets) { return (int)(packedOffsets >> 33); } // finds previous in the in-order traversal IntervalNode<E> previous() { IntervalNode<E> left = getLeft(); if (left != null) { while (left.getRight() != null) { left = left.getRight(); } return left; } IntervalNode<E> parent = getParent(); IntervalNode<E> prev = this; while (parent != null) { if (parent.getRight() == prev) break; prev = parent; parent = parent.getParent(); } return parent; } // finds next node in the in-order traversal IntervalNode<E> next() { IntervalNode<E> right = getRight(); if (right != null) { while (right.getLeft() != null) { right = right.getLeft(); } return right; } IntervalNode<E> parent = getParent(); IntervalNode<E> prev = this; while (parent != null) { if (parent.getLeft() == prev) break; prev = parent; parent = parent.getParent(); } return parent; } @NonNls @Override public String toString() { return "Node: " + intervals; } } private void assertUnderWriteLock() { if (DEBUG && !ApplicationInfoImpl.isInStressTest()) { assert isAcquired(l.writeLock()) : l.writeLock(); } } private static boolean isAcquired(@NotNull Lock l) { String s = l.toString(); return s.contains("Locked by thread"); } private void pushDeltaFromRoot(@Nullable IntervalNode<T> node) { if (node != null) { long packedOffsets = node.cachedDeltaUpToRoot; if (IntervalNode.allDeltasUpAreNull(packedOffsets) && node.isValid() && IntervalNode.modCount(packedOffsets) == getModCount()) return; pushDeltaFromRoot(node.getParent()); pushDelta(node); } } @NotNull protected abstract IntervalNode<T> createNewNode(@NotNull T key, int start, int end, boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer); protected abstract IntervalNode<T> lookupNode(@NotNull T key); protected abstract void setNode(@NotNull T key, @Nullable IntervalNode<T> node); private int compareNodes(@NotNull IntervalNode<T> i1, int delta1, @NotNull IntervalNode<T> i2, int delta2, @NotNull List<? super IntervalNode<T>> invalid) { if (!i2.hasAliveKey(false)) { invalid.add(i2); //gced } int start1 = i1.intervalStart() + delta1; int start2 = i2.intervalStart() + delta2; if (start1 != start2) return start1 - start2; return compareEqualStartIntervals(i1, i2); } protected IntervalNode<T> getRoot() { return (IntervalNode<T>)root; } @Override public boolean processAll(@NotNull Processor<? super T> processor) { try { l.readLock().lock(); checkMax(true); return process(getRoot(), getModCount(), processor); } finally { l.readLock().unlock(); } } private boolean process(@Nullable IntervalNode<T> root, final int modCountBefore, @NotNull final Processor<? super T> processor) { if (root == null) return true; WalkingState.TreeGuide<IntervalNode<T>> guide = getGuide(); return WalkingState.processAll(root, guide, node -> { if (!node.processAliveKeys(processor)) return false; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); return true; }); } @Override public boolean processOverlappingWith(int start, int end, @NotNull Processor<? super T> processor) { try { l.readLock().lock(); checkMax(true); return processOverlappingWith(getRoot(), start, end, getModCount(), 0, processor); } finally { l.readLock().unlock(); } } private boolean processOverlappingWith(@Nullable IntervalNode<T> root, int start, int end, int modCountBefore, int deltaUpToRootExclusive, @NotNull Processor<? super T> processor) { if (root == null) { return true; } assert root.isValid(); int delta = deltaUpToRootExclusive + root.delta; if (start > maxEndOf(root, deltaUpToRootExclusive)) { return true; // right of the rightmost interval in the subtree } if (!processOverlappingWith(root.getLeft(), start, end, modCountBefore, delta, processor)) return false; int myStartOffset = root.intervalStart() + delta; int myEndOffset = root.intervalEnd() + delta; boolean overlaps = Math.max(myStartOffset, start) <= Math.min(myEndOffset, end); if (overlaps) { if (!root.processAliveKeys(processor)) return false; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); } if (end < myStartOffset) { return true; // left of the root, cant be in the right subtree } return processOverlappingWith(root.getRight(), start, end, modCountBefore, delta, processor); } @Override public boolean processOverlappingWithOutside(int start, int end, @NotNull Processor<? super T> processor) { try { l.readLock().lock(); checkMax(true); return processOverlappingWithOutside(getRoot(), start, end, getModCount(), 0, processor); } finally { l.readLock().unlock(); } } private boolean processOverlappingWithOutside(@Nullable IntervalNode<T> root, int start, int end, int modCountBefore, int deltaUpToRootExclusive, @NotNull Processor<? super T> processor) { if (root == null) { return true; } assert root.isValid(); int delta = deltaUpToRootExclusive + root.delta; int rootMaxEnd = maxEndOf(root, deltaUpToRootExclusive); int rootStartOffset = root.intervalStart() + delta; int rootEndOffset = root.intervalEnd() + delta; if (!processOverlappingWithOutside(root.getLeft(), start, end, modCountBefore, delta, processor)) return false; boolean toProcess = rootStartOffset < start || rootEndOffset > end; if (toProcess) { if (!root.processAliveKeys(processor)) return false; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); } if (rootStartOffset >= start && rootMaxEnd <= end) return true; // cant intersect outside return processOverlappingWithOutside(root.getRight(), start, end, modCountBefore, delta, processor); } @Override public boolean processContaining(int offset, @NotNull Processor<? super T> processor) { try { l.readLock().lock(); checkMax(true); return processContaining(getRoot(), offset, getModCount(), 0, processor); } finally { l.readLock().unlock(); } } private boolean processContaining(@Nullable IntervalNode<T> root, int offset, int modCountBefore, int deltaUpToRootExclusive, @NotNull Processor<? super T> processor) { if (root == null) { return true; } assert root.isValid(); int delta = deltaUpToRootExclusive + root.delta; if (offset > maxEndOf(root, deltaUpToRootExclusive)) { return true; // right of the rightmost interval in the subtree } if (!processContaining(root.getLeft(), offset, modCountBefore, delta, processor)) return false; int myStartOffset = root.intervalStart() + delta; int myEndOffset = root.intervalEnd() + delta; boolean overlaps = myStartOffset <= offset && offset < myEndOffset; if (overlaps) { if (!root.processAliveKeys(processor)) return false; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); } if (offset < myStartOffset) { return true; // left of the root, cant be in the right subtree } return processContaining(root.getRight(), offset, modCountBefore, delta, processor); } @NotNull private MarkupIterator<T> overlappingIterator(@NotNull final TextRangeInterval rangeInterval) { l.readLock().lock(); try { final int startOffset = rangeInterval.getStartOffset(); final int endOffset = rangeInterval.getEndOffset(); final IntervalNode<T> firstOverlap = findMinOverlappingWith(getRoot(), rangeInterval, getModCount(), 0); if (firstOverlap == null) { l.readLock().unlock(); //noinspection unchecked return MarkupIterator.EMPTY; } final int firstOverlapDelta = firstOverlap.computeDeltaUpToRoot(); final int firstOverlapStart = firstOverlap.intervalStart() + firstOverlapDelta; final int modCountBefore = getModCount(); return new MarkupIterator<T>() { private IntervalNode<T> currentNode = firstOverlap; private int deltaUpToRootExclusive = firstOverlapDelta-firstOverlap.delta; private int indexInCurrentList; private T current; @Override public boolean hasNext() { if (current != null) return true; if (currentNode == null) return false; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); while (indexInCurrentList != currentNode.intervals.size()) { T t = currentNode.intervals.get(indexInCurrentList++).get(); if (t != null) { current = t; return true; } } indexInCurrentList = 0; while (true) { currentNode = nextNode(currentNode); if (currentNode == null) { return false; } if (overlaps(currentNode, rangeInterval, deltaUpToRootExclusive)) { assert currentNode.intervalStart() + deltaUpToRootExclusive + currentNode.delta >= firstOverlapStart; indexInCurrentList = 0; while (indexInCurrentList != currentNode.intervals.size()) { T t = currentNode.intervals.get(indexInCurrentList++).get(); if (t != null) { current = t; return true; } } indexInCurrentList = 0; } } } @Override public T next() { if (!hasNext()) throw new NoSuchElementException(); T t = current; current = null; return t; } @Override public T peek() { if (!hasNext()) throw new NoSuchElementException(); return current; } @Override public void remove() { throw new IncorrectOperationException(); } @Override public void dispose() { l.readLock().unlock(); } // next node in in-order traversal private IntervalNode<T> nextNode(@NotNull IntervalNode<T> root) { assert root.isValid() : root; int delta = deltaUpToRootExclusive + root.delta; int myMaxEnd = maxEndOf(root, deltaUpToRootExclusive); if (startOffset > myMaxEnd) return null; // tree changed // try to go right down IntervalNode<T> right = root.getRight(); if (right != null) { int rightMaxEnd = maxEndOf(right, delta); if (startOffset <= rightMaxEnd) { int rightDelta = delta + right.delta; while (right.getLeft() != null && startOffset <= maxEndOf(right.getLeft(), rightDelta)) { right = right.getLeft(); rightDelta += right.delta; } deltaUpToRootExclusive = rightDelta - right.delta; return right; } } // go up while (true) { IntervalNode<T> parent = root.getParent(); if (parent == null) return null; if (parent.intervalStart() + deltaUpToRootExclusive > endOffset) return null; // can't move right deltaUpToRootExclusive -= parent.delta; if (parent.getLeft() == root) { return parent; } root = parent; } } }; } catch (RuntimeException | Error e) { l.readLock().unlock(); throw e; } } private boolean overlaps(@Nullable IntervalNode<T> root, @NotNull TextRangeInterval rangeInterval, int deltaUpToRootExclusive) { if (root == null) return false; int delta = root.delta + deltaUpToRootExclusive; int start = root.intervalStart() + delta; int end = root.intervalEnd() + delta; return rangeInterval.intersects(start, end); } @NotNull IntervalNode<T> findOrInsert(@NotNull IntervalNode<T> node) { assertUnderWriteLock(); node.setRed(); node.setParent(null); node.setValid(true); node.maxEnd = 0; node.clearDelta(); node.setLeft(null); node.setRight(null); List<IntervalNode<T>> gced = new SmartList<>(); if (root == null) { root = node; } else { IntervalNode<T> current = getRoot(); while (true) { pushDelta(current); int compResult = compareNodes(node, 0, current, 0, gced); if (compResult == 0) { return current; } if (compResult < 0) { if (current.getLeft() == null) { current.setLeft(node); break; } current = current.getLeft(); } else /*if (compResult > 0)*/ { if (current.getRight() == null) { current.setRight(node); break; } current = current.getRight(); } } node.setParent(current); } node.setCachedValues(0, true, getModCount()); correctMaxUp(node); onInsertNode(); keySize += node.intervals.size(); insertCase1(node); node.setAttachedToTree(true); verifyProperties(); deleteNodes(gced); return node; } private void deleteNodes(@NotNull List<? extends IntervalNode<T>> collectedAway) { if (collectedAway.isEmpty()) return; try { l.writeLock().lock(); for (IntervalNode<T> node : collectedAway) { removeNode(node); } } finally { l.writeLock().unlock(); } } @NotNull public IntervalTreeImpl.IntervalNode<T> addInterval(@NotNull T interval, int start, int end, boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer) { try { l.writeLock().lock(); if (firingBeforeRemove) { throw new IncorrectOperationException("Must not add rangemarker from within beforeRemoved listener"); } checkMax(true); processReferenceQueue(); incModCount(); IntervalNode<T> newNode = createNewNode(interval, start, end, greedyToLeft, greedyToRight, stickingToRight, layer); IntervalNode<T> insertedNode = findOrInsert(newNode); if (insertedNode == newNode) { setNode(interval, insertedNode); } else { // merged insertedNode.addInterval(interval); } checkMax(true); checkBelongsToTheTree(interval, true); return insertedNode; } finally { l.writeLock().unlock(); } } // returns true if all markers are valid boolean checkMax(boolean assertInvalid) { return VERIFY && doCheckMax(assertInvalid); } private boolean doCheckMax(boolean assertInvalid) { try { l.readLock().lock(); AtomicBoolean allValid = new AtomicBoolean(true); int[] keyCounter = new int[1]; int[] nodeCounter = new int[1]; TLongHashSet ids = new TLongHashSet(keySize); checkMax(getRoot(), 0, assertInvalid, allValid, keyCounter, nodeCounter, ids, true); if (assertInvalid) { assert nodeSize() == nodeCounter[0] : "node size: "+ nodeSize() +"; actual: "+nodeCounter[0]; assert keySize == keyCounter[0] : "key size: "+ keySize +"; actual: "+keyCounter[0]; assert keySize >= nodeSize() : keySize + "; "+nodeSize(); } return allValid.get(); } finally { l.readLock().unlock(); } } private static class IntTrinity { private final int first; private final int second; private final int third; private IntTrinity(int first, int second, int third) { this.first = first; this.second = second; this.third = third; } } // returns real (minStart, maxStart, maxEnd) private IntTrinity checkMax(@Nullable IntervalNode<T> root, int deltaUpToRootExclusive, boolean assertInvalid, @NotNull AtomicBoolean allValid, @NotNull int[] keyCounter, @NotNull int[] nodeCounter, @NotNull TLongHashSet ids, boolean allDeltasUpAreNull) { if (root == null) return new IntTrinity(Integer.MAX_VALUE,Integer.MIN_VALUE,Integer.MIN_VALUE); long packedOffsets = root.cachedDeltaUpToRoot; if (IntervalNode.modCount(packedOffsets) == getModCount()) { assert IntervalNode.allDeltasUpAreNull(packedOffsets) == (root.delta == 0 && allDeltasUpAreNull); assert IntervalNode.deltaUpToRoot(packedOffsets) == root.delta + deltaUpToRootExclusive; } T liveInterval = null; for (int i = root.intervals.size() - 1; i >= 0; i--) { T t = root.intervals.get(i).get(); if (t == null) continue; liveInterval = t; checkBelongsToTheTree(t, false); boolean added = ids.add(((RangeMarkerImpl)t).getId()); assert added : t; } if (assertInvalid && liveInterval != null) { checkBelongsToTheTree(liveInterval, true); } keyCounter[0]+= root.intervals.size(); nodeCounter[0]++; int delta = deltaUpToRootExclusive + (root.isValid() ? root.delta : 0); IntTrinity l = checkMax(root.getLeft(), delta, assertInvalid, allValid, keyCounter, nodeCounter, ids, root.delta == 0 && allDeltasUpAreNull); int minLeftStart = l.first; int maxLeftStart = l.second; int maxLeftEnd = l.third; IntTrinity r = checkMax(root.getRight(), delta, assertInvalid, allValid, keyCounter, nodeCounter, ids, root.delta == 0 && allDeltasUpAreNull); int maxRightEnd = r.third; int minRightStart = r.first; int maxRightStart = r.second; if (!root.isValid()) { allValid.set(false); if (assertInvalid) assert false : root; return new IntTrinity(Math.min(minLeftStart, minRightStart), Math.max(maxLeftStart, maxRightStart), Math.max(maxRightEnd, maxLeftEnd)); } IntervalNode<T> parent = root.getParent(); if (parent != null && assertInvalid && root.hasAliveKey(false)) { int c = compareNodes(root, delta, parent, delta - root.delta, new SmartList<>()); assert c != 0; assert c < 0 && parent.getLeft() == root || c > 0 && parent.getRight() == root; } assert delta + root.maxEnd == Math.max(maxLeftEnd, Math.max(maxRightEnd, delta + root.intervalEnd())); int myStartOffset = delta + root.intervalStart(); assert maxLeftStart <= myStartOffset; assert minRightStart >= myStartOffset; assert myStartOffset >= 0; assert minLeftStart == Integer.MAX_VALUE || minLeftStart <= myStartOffset; assert maxRightStart == Integer.MIN_VALUE || maxRightStart >= myStartOffset; int minStart = Math.min(minLeftStart, myStartOffset); int maxStart = Math.max(myStartOffset, Math.max(maxLeftStart, maxRightStart)); assert minStart <= maxStart; return new IntTrinity(minStart, maxStart, root.maxEnd + delta); } @NotNull @Override protected Node<T> maximumNode(@NotNull Node<T> n) { IntervalNode<T> root = (IntervalNode<T>)n; pushDelta(root.getParent()); pushDelta(root); while (root.getRight() != null) { root = root.getRight(); pushDelta(root); } return root; } private void checkBelongsToTheTree(@NotNull T interval, boolean assertInvalid) { IntervalNode<T> root = lookupNode(interval); if (root == null) return; //noinspection NumberEquality assert root.getTree() == this : root.getTree() +"; this: "+this; if (!VERIFY) return; if (assertInvalid) { assert !root.intervals.isEmpty(); boolean contains = false; for (int i = root.intervals.size() - 1; i >= 0; i--) { T key = root.intervals.get(i).get(); if (key == null) continue; contains |= key == interval; IntervalNode<T> node = lookupNode(key); assert node == root : node; //noinspection NumberEquality assert node.getTree() == this : node; } assert contains : root.intervals + "; " + interval; } IntervalNode<T> e = root; while (e.getParent() != null) e = e.getParent(); assert e == getRoot(); // assert the node belongs to our tree } @Override public boolean removeInterval(@NotNull T interval) { if (!((RangeMarkerEx)interval).isValid()) return false; try { l.writeLock().lock(); incModCount(); if (!((RangeMarkerEx)interval).isValid()) return false; checkBelongsToTheTree(interval, true); checkMax(true); processReferenceQueue(); IntervalNode<T> node = lookupNode(interval); if (node == null) return false; beforeRemove(interval, "Explicit Dispose"); node.removeInterval(interval); setNode(interval, null); checkMax(true); return true; } finally { l.writeLock().unlock(); } } // run under write lock void removeNode(@NotNull IntervalNode<T> node) { deleteNode(node); IntervalNode<T> parent = node.getParent(); correctMaxUp(parent); } @Override protected void deleteNode(@NotNull Node<T> n) { assertUnderWriteLock(); IntervalNode<T> node = (IntervalNode<T>)n; pushDeltaFromRoot(node); assertAllDeltasAreNull(node); super.deleteNode(n); keySize -= node.intervals.size(); assert keySize >= 0 : keySize; node.setAttachedToTree(false); } @Override public int size() { return keySize; } // returns true if all deltas involved are still 0 boolean pushDelta(@Nullable IntervalNode<T> root) { if (root == null || !root.isValid()) return true; IntervalNode<T> parent = root.getParent(); assertAllDeltasAreNull(parent); int delta = root.delta; root.setCachedValues(0, true, 0); if (delta != 0) { root.setIntervalStart(root.intervalStart() + delta); root.setIntervalEnd(root.intervalEnd() + delta); root.maxEnd += delta; root.delta = 0; //noinspection NonShortCircuitBooleanExpression return incDelta(root.getLeft(), delta) & incDelta(root.getRight(), delta); } root.setCachedValues(0, true, getModCount()); return true; } // returns true if all deltas involved are still 0 private boolean incDelta(@Nullable IntervalNode<T> root, int delta) { if (root == null) return true; if (root.isValid()) { int newDelta = root.changeDelta(delta); return newDelta == 0; } else { //noinspection NonShortCircuitBooleanExpression return incDelta(root.getLeft(), delta) & incDelta(root.getRight(), delta); } } @Override @NotNull protected IntervalNode<T> swapWithMaxPred(@NotNull Node<T> root, @NotNull Node<T> maxPred) { checkMax(false); IntervalNode<T> a = (IntervalNode<T>)root; IntervalNode<T> d = (IntervalNode<T>)maxPred; boolean acolor = a.isBlack(); boolean dcolor = d.isBlack(); assert !a.isValid() || a.delta == 0 : a.delta; for (IntervalNode<T> n = a.getLeft(); n != null; n = n.getRight()) { assert !n.isValid() || n.delta == 0 : n.delta; } swapNodes(a, d); // set range of the key to be deleted so it wont disrupt maxes a.setValid(false); //a.key.setIntervalStart(d.key.intervalStart()); //a.key.setIntervalEnd(d.key.intervalEnd()); //correctMaxUp(a); a.setColor(dcolor); d.setColor(acolor); correctMaxUp(a); checkMax(false); assert a.delta == 0 : a.delta; assert d.delta == 0 : d.delta; return a; } private void swapNodes(@NotNull IntervalNode<T> n1, @NotNull IntervalNode<T> n2) { IntervalNode<T> l1 = n1.getLeft(); IntervalNode<T> r1 = n1.getRight(); IntervalNode<T> p1 = n1.getParent(); IntervalNode<T> l2 = n2.getLeft(); IntervalNode<T> r2 = n2.getRight(); IntervalNode<T> p2 = n2.getParent(); if (p1 != null) { if (p1.getLeft() == n1) p1.setLeft(n2); else p1.setRight(n2); } else { root = n2; } if (p2 != null) { if (p2.getLeft() == n2) p2.setLeft(p2 == n1 ? l2 : n1); else p2.setRight(p2 == n1 ? r2 : n1); } else { root = n1; } n1.setParent(p2 == n1 ? n2 : p2); n2.setParent(p1); n1.setLeft(l2); n2.setLeft(l1 == n2 ? n1 : l1); if (l1 != null) l1.setParent(n2 == l1 ? p1 : n2); if (r1 != null) r1.setParent(n2); n1.setRight(r2); n2.setRight(r1); if (l2 != null) l2.setParent(n1); if (r2 != null) r2.setParent(n1); } // returns real max endOffset of all intervals below private int maxEndOf(@Nullable IntervalNode<T> node, int deltaUpToRootExclusive) { if (node == null) { return 0; } if (node.isValid()) { return node.maxEnd + node.delta + deltaUpToRootExclusive; } // since node is invalid, ignore node.delta return Math.max(maxEndOf(node.getLeft(), deltaUpToRootExclusive), maxEndOf(node.getRight(), deltaUpToRootExclusive)); } // max of n.left's maxend, n.right's maxend and its own interval endOffset void correctMax(@NotNull IntervalNode<T> node, int deltaUpToRoot) { if (!node.isValid()) return; int realMax = Math.max(Math.max(maxEndOf(node.getLeft(), deltaUpToRoot), maxEndOf(node.getRight(), deltaUpToRoot)), deltaUpToRoot + node.intervalEnd()); node.maxEnd = realMax - deltaUpToRoot; } private void correctMaxUp(@Nullable IntervalNode<T> node) { int delta = node == null ? 0 : node.computeDeltaUpToRoot(); assert delta == 0 : delta; while (node != null) { if (node.isValid()) { int d = node.delta; correctMax(node, delta); delta -= d; } node = node.getParent(); } assert delta == 0 : delta; } @Override protected void rotateRight(@NotNull Node<T> n) { checkMax(false); IntervalNode<T> node1 = (IntervalNode<T>)n; IntervalNode<T> node2 = node1.getLeft(); IntervalNode<T> node3 = node1.getRight(); IntervalNode<T> parent = node1.getParent(); int deltaUp = parent == null ? 0 : parent.computeDeltaUpToRoot(); pushDelta(node1); pushDelta(node2); pushDelta(node3); super.rotateRight(node1); if (node3 != null) { correctMax(node3, deltaUp); } correctMax(node1, deltaUp); correctMax(node2, deltaUp); assertAllDeltasAreNull(node1); assertAllDeltasAreNull(node2); assertAllDeltasAreNull(node3); checkMax(false); } @Override protected void rotateLeft(@NotNull Node<T> n) { checkMax(false); IntervalNode<T> node1 = (IntervalNode<T>)n; IntervalNode<T> node2 = node1.getLeft(); IntervalNode<T> node3 = node1.getRight(); IntervalNode<T> parent = node1.getParent(); int deltaUp = parent == null ? 0 : parent.computeDeltaUpToRoot(); pushDelta(node1); pushDelta(node2); pushDelta(node3); checkMax(false); super.rotateLeft(node1); if (node2 != null) { correctMax(node2, deltaUp); } correctMax(node1, deltaUp); correctMax(node3, deltaUp); assertAllDeltasAreNull(node1); assertAllDeltasAreNull(node2); assertAllDeltasAreNull(node3); checkMax(false); } @Override protected void replaceNode(@NotNull Node<T> node, Node<T> child) { IntervalNode<T> myNode = (IntervalNode<T>)node; pushDelta(myNode); pushDelta((IntervalNode<T>)child); super.replaceNode(node, child); if (child != null && myNode.isValid()) { ((IntervalNode<T>)child).changeDelta(myNode.delta); //todo correct max up to root?? } } private void assertAllDeltasAreNull(@Nullable IntervalNode<T> node) { if (node == null) return; if (!node.isValid()) return; assert node.delta == 0; long packedOffsets = node.cachedDeltaUpToRoot; assert IntervalNode.modCount(packedOffsets) != getModCount() || IntervalNode.allDeltasUpAreNull(packedOffsets); } private IntervalNode<T> findMinOverlappingWith(@Nullable IntervalNode<T> root, @NotNull Interval interval, int modCountBefore, int deltaUpToRootExclusive) { if (root == null) { return null; } assert root.isValid(); int delta = deltaUpToRootExclusive + root.delta; if (interval.intervalStart() > maxEndOf(root, deltaUpToRootExclusive)) { return null; // right of the rightmost interval in the subtree } IntervalNode<T> inLeft = findMinOverlappingWith(root.getLeft(), interval, modCountBefore, delta); if (inLeft != null) return inLeft; int myStartOffset = root.intervalStart() + delta; int myEndOffset = root.intervalEnd() + delta; boolean overlaps = Math.max(myStartOffset, interval.intervalStart()) <= Math.min(myEndOffset, interval.intervalEnd()); if (overlaps) return root; if (getModCount() != modCountBefore) throw new ConcurrentModificationException(); if (interval.intervalEnd() < myStartOffset) { return null; // left of the root, cant be in the right subtree } return findMinOverlappingWith(root.getRight(), interval, modCountBefore, delta); } void changeData(@NotNull T interval, int start, int end, boolean greedyToLeft, boolean greedyToRight, boolean stickingToRight, int layer) { try { l.writeLock().lock(); IntervalNode<T> node = lookupNode(interval); if (node == null) return; int before = size(); boolean nodeRemoved = node.removeInterval(interval); assert nodeRemoved || !node.intervals.isEmpty(); IntervalNode<T> insertedNode = addInterval(interval, start, end, greedyToLeft, greedyToRight, stickingToRight, layer); assert node != insertedNode; int after = size(); // can be gced assert before >= after : before +";" + after; checkBelongsToTheTree(interval, true); checkMax(true); } finally { l.writeLock().unlock(); } } // called under write lock private void processReferenceQueue() { int dead = 0; while (myReferenceQueue.poll() != null) { dead++; } deadReferenceCount += dead; if (deadReferenceCount > Math.max(1, size() / 3)) { purgeDeadNodes(); deadReferenceCount = 0; } } private void purgeDeadNodes() { assertUnderWriteLock(); List<IntervalNode<T>> gced = new SmartList<>(); collectGced(getRoot(), gced); deleteNodes(gced); checkMax(true); } @Override public void clear() { l.writeLock().lock(); processAll(t -> { beforeRemove(t, "Clear all"); return true; }); try { super.clear(); keySize = 0; } finally { l.writeLock().unlock(); } } private void collectGced(@Nullable IntervalNode<T> root, @NotNull List<? super IntervalNode<T>> gced) { if (root == null) return; if (!root.hasAliveKey(true)) { gced.add(root); } collectGced(root.getLeft(), gced); collectGced(root.getRight(), gced); } void fireBeforeRemoved(@NotNull T markerEx, @NotNull @NonNls Object reason) { } private boolean firingBeforeRemove; // accessed under l.writeLock() only // must be called under l.writeLock() void beforeRemove(@NotNull T markerEx, @NonNls @NotNull Object reason) { if (firingBeforeRemove) { throw new IllegalStateException(); } firingBeforeRemove = true; try { fireBeforeRemoved(markerEx, reason); } finally { firingBeforeRemove = false; } } private static class IntervalTreeGuide<T extends MutableInterval> implements WalkingState.TreeGuide<IntervalNode<T>> { @Override public IntervalNode<T> getNextSibling(@NotNull IntervalNode<T> element) { IntervalNode<T> parent = element.getParent(); if (parent == null) return null; return parent.getLeft() == element ? parent.getRight() : null; } @Override public IntervalNode<T> getPrevSibling(@NotNull IntervalNode<T> element) { IntervalNode<T> parent = element.getParent(); if (parent == null) return null; return parent.getRight() == element ? parent.getLeft() : null; } @Override public IntervalNode<T> getFirstChild(@NotNull IntervalNode<T> element) { IntervalNode<T> left = element.getLeft(); return left == null ? element.getRight() : left; } @Override public IntervalNode<T> getParent(@NotNull IntervalNode<T> element) { return element.getParent(); } } private static final IntervalTreeGuide INTERVAL_TREE_GUIDE_INSTANCE = new IntervalTreeGuide(); @NotNull private static <T> WalkingState.TreeGuide<IntervalNode<T>> getGuide() { //noinspection unchecked return (WalkingState.TreeGuide)INTERVAL_TREE_GUIDE_INSTANCE; } public int maxHeight() { return maxHeight(root); } private int maxHeight(@Nullable Node<T> root) { return root == null ? 0 : 1 + Math.max(maxHeight(root.left), maxHeight(root.right)); } // combines iterators for two trees in one using specified comparator @NotNull static <T> MarkupIterator<T> mergingOverlappingIterator(@NotNull IntervalTreeImpl<T> tree1, @NotNull TextRangeInterval tree1Range, @NotNull IntervalTreeImpl<T> tree2, @NotNull TextRangeInterval tree2Range, @NotNull Comparator<? super T> comparator) { MarkupIterator<T> exact = tree1.overlappingIterator(tree1Range); MarkupIterator<T> lines = tree2.overlappingIterator(tree2Range); return MarkupIterator.mergeIterators(exact, lines, comparator); } T findRangeMarkerAfter(@NotNull T marker) { l.readLock().lock(); try { IntervalNode<T> node = lookupNode(marker); boolean foundMarker = false; while (node != null) { List<Getter<T>> intervals = node.intervals; //noinspection ForLoopReplaceableByForEach for (int i = 0; i < intervals.size(); i++) { Getter<T> interval = intervals.get(i); T m = interval.get(); if (m == null) continue; if (m == marker) { foundMarker = true; } else if (foundMarker) { // found next to marker return m; } } node = node.next(); foundMarker = true; // protection against sudden removal of marker } return null; } finally { l.readLock().unlock(); } } T findRangeMarkerBefore(@NotNull T marker) { l.readLock().lock(); try { IntervalNode<T> node = lookupNode(marker); boolean foundMarker = false; while (node != null) { List<Getter<T>> intervals = node.intervals; for (int i = intervals.size() - 1; i >= 0; i--) { Getter<T> interval = intervals.get(i); T m = interval.get(); if (m == null) continue; if (m == marker) { foundMarker = true; } else if (foundMarker) { // found next to marker return m; } } node = node.previous(); foundMarker = true; // protection against sudden removal of marker } return null; } finally { l.readLock().unlock(); } } }
/* * Autopsy * * Copyright 2019-2020 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.discovery.search; import com.google.common.eventbus.EventBus; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.sleuthkit.autopsy.discovery.search.DiscoveryKeyUtils.GroupKey; import org.sleuthkit.autopsy.discovery.search.SearchData.Type; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; /** * Class to handle event bus and events for discovery tool. */ public final class DiscoveryEventUtils { private final static EventBus discoveryEventBus = new EventBus(); /** * Get the discovery event bus. * * @return The discovery event bus. */ public static EventBus getDiscoveryEventBus() { return discoveryEventBus; } /** * Private no arg constructor for Utility class. */ private DiscoveryEventUtils() { //Utility class private constructor intentionally left blank. } /** * Event to signal the start of a search being performed. */ public static final class SearchStartedEvent { private final Type type; /** * Construct a new SearchStartedEvent. * * @param type The type of result the search event is for. */ public SearchStartedEvent(Type type) { this.type = type; } /** * Get the type of result the search is being performed for. * * @return The type of results being searched for. */ public Type getType() { return type; } } /** * Event to signal that the Instances list should have selection cleared. */ public static final class ClearInstanceSelectionEvent { /** * Construct a new ClearInstanceSelectionEvent. */ public ClearInstanceSelectionEvent() { //no arg constructor } } /** * Event to signal that any background tasks currently running should be * cancelled. */ public static final class CancelBackgroundTasksEvent { public CancelBackgroundTasksEvent() { //no-arg constructor } } /** * Event to signal that the Instances list should be populated. */ public static final class PopulateInstancesListEvent { private final List<AbstractFile> instances; /** * Construct a new PopulateInstancesListEvent. */ public PopulateInstancesListEvent(List<AbstractFile> files) { instances = files; } /** * Get the list of AbstractFiles for the instances list. * * @return The list of AbstractFiles for the instances list. */ public List<AbstractFile> getInstances() { return Collections.unmodifiableList(instances); } } /** * Event to signal that the list should be populated. */ public static final class PopulateDomainTabsEvent { private final String domain; /** * Construct a new PopulateDomainTabsEvent. */ public PopulateDomainTabsEvent(String domain) { this.domain = domain; } /** * Get the domain for the details area. * * @return The the domain for the details area. */ public String getDomain() { return domain; } } /** * Event to signal the completion of a search being performed. */ public static final class SearchCompleteEvent { private final Map<GroupKey, Integer> groupMap; private final List<AbstractFilter> searchFilters; private final DiscoveryAttributes.AttributeType groupingAttribute; private final Group.GroupSortingAlgorithm groupSort; private final ResultsSorter.SortingMethod sortMethod; /** * Construct a new SearchCompleteEvent, * * @param groupMap The map of groups which were found by the * search. * @param searchfilters The search filters which were used by the * search. * @param groupingAttribute The grouping attribute used by the search. * @param groupSort The sorting algorithm used for groups. * @param sortMethod The sorting method used for results. */ public SearchCompleteEvent(Map<GroupKey, Integer> groupMap, List<AbstractFilter> searchfilters, DiscoveryAttributes.AttributeType groupingAttribute, Group.GroupSortingAlgorithm groupSort, ResultsSorter.SortingMethod sortMethod) { this.groupMap = groupMap; this.searchFilters = searchfilters; this.groupingAttribute = groupingAttribute; this.groupSort = groupSort; this.sortMethod = sortMethod; } /** * Get the map of groups found by the search. * * @return The map of groups which were found by the search. */ public Map<GroupKey, Integer> getGroupMap() { return Collections.unmodifiableMap(groupMap); } /** * Get the filters used by the search. * * @return The search filters which were used by the search. */ public List<AbstractFilter> getFilters() { return Collections.unmodifiableList(searchFilters); } /** * Get the grouping attribute used by the search. * * @return The grouping attribute used by the search. */ public DiscoveryAttributes.AttributeType getGroupingAttr() { return groupingAttribute; } /** * Get the sorting algorithm used for groups. * * @return The sorting algorithm used for groups. */ public Group.GroupSortingAlgorithm getGroupSort() { return groupSort; } /** * Get the sorting method used for results. * * @return The sorting method used for results. */ public ResultsSorter.SortingMethod getResultSort() { return sortMethod; } } /** * Event to signal the completion of a search being performed. */ public static final class ArtifactSearchResultEvent { private final List<BlackboardArtifact> listOfArtifacts = new ArrayList<>(); private final BlackboardArtifact.ARTIFACT_TYPE artifactType; /** * Construct a new ArtifactSearchResultEvent with a list of specified * results and an artifact type. * * @param artifactType The type of results in the list. * @param listOfArtifacts The list of results retrieved. */ public ArtifactSearchResultEvent(BlackboardArtifact.ARTIFACT_TYPE artifactType, List<BlackboardArtifact> listOfArtifacts) { if (listOfArtifacts != null) { this.listOfArtifacts.addAll(listOfArtifacts); } this.artifactType = artifactType; } /** * Get the list of results included in the event. * * @return The list of results retrieved. */ public List<BlackboardArtifact> getListOfArtifacts() { return Collections.unmodifiableList(listOfArtifacts); } /** * Get the type of BlackboardArtifact type of which exist in the list. * * @return The BlackboardArtifact type of which exist in the list. */ public BlackboardArtifact.ARTIFACT_TYPE getArtifactType() { return artifactType; } } /** * Event to signal the completion of a search for mini timeline results * being performed. */ public static final class MiniTimelineResultEvent { private final List<MiniTimelineResult> results = new ArrayList<>(); /** * Construct a new MiniTimelineResultEvent. * * @param results The list of MiniTimelineResults contained in this * event. */ public MiniTimelineResultEvent(List<MiniTimelineResult> results) { if (results != null) { this.results.addAll(results); } } /** * Get the list of results included in the event. * * @return The list of results found. */ public List<MiniTimelineResult> getResultList() { return Collections.unmodifiableList(results); } } /** * Event to signal the completion of page retrieval and include the page * contents. */ public static final class PageRetrievedEvent { private final List<Result> results; private final int page; private final Type resultType; /** * Construct a new PageRetrievedEvent. * * @param resultType The type of results which exist in the page. * @param page The number of the page which was retrieved. * @param results The list of results in the page retrieved. */ public PageRetrievedEvent(Type resultType, int page, List<Result> results) { this.results = results; this.page = page; this.resultType = resultType; } /** * Get the list of results in the page retrieved. * * @return The list of results in the page retrieved. */ public List<Result> getSearchResults() { return Collections.unmodifiableList(results); } /** * Get the page number which was retrieved. * * @return The number of the page which was retrieved. */ public int getPageNumber() { return page; } /** * Get the type of results which exist in the page. * * @return The type of results which exist in the page. */ public Type getType() { return resultType; } } /** * Event to signal that there were no results for the search. */ public static final class NoResultsEvent { /** * Construct a new NoResultsEvent. */ public NoResultsEvent() { //no arg constructor } } /** * Event to signal that a search has been cancelled. */ public static final class SearchCancelledEvent { /** * Construct a new SearchCancelledEvent. */ public SearchCancelledEvent() { //no arg constructor } } /** * Event to signal that a group has been selected. */ public static final class GroupSelectedEvent { private final Type resultType; private final GroupKey groupKey; private final int groupSize; private final List<AbstractFilter> searchfilters; private final DiscoveryAttributes.AttributeType groupingAttribute; private final Group.GroupSortingAlgorithm groupSort; private final ResultsSorter.SortingMethod sortMethod; /** * Construct a new GroupSelectedEvent. * * @param searchfilters The search filters which were used by the * search. * @param groupingAttribute The grouping attribute used by the search. * @param groupSort The sorting algorithm used for groups. * @param sortMethod The sorting method used for results. * @param groupKey The key associated with the group which was * selected. * @param groupSize The number of results in the group which was * selected. * @param resultType The type of results which exist in the * group. */ public GroupSelectedEvent(List<AbstractFilter> searchfilters, DiscoveryAttributes.AttributeType groupingAttribute, Group.GroupSortingAlgorithm groupSort, ResultsSorter.SortingMethod sortMethod, GroupKey groupKey, int groupSize, Type resultType) { this.searchfilters = searchfilters; this.groupingAttribute = groupingAttribute; this.groupSort = groupSort; this.sortMethod = sortMethod; this.groupKey = groupKey; this.groupSize = groupSize; this.resultType = resultType; } /** * Get the type of results which exist in the group. * * @return The type of results which exist in the group. */ public Type getResultType() { return resultType; } /** * Get the group key which is used to uniquely identify the group * selected. * * @return The group key which is used to uniquely identify the group * selected. */ public GroupKey getGroupKey() { return groupKey; } /** * Get the number of results in the group which was selected. * * @return The number of results in the group which was selected. */ public int getGroupSize() { return groupSize; } /** * Get the sorting algorithm used in the group which was selected. * * @return The sorting algorithm used for groups. */ public Group.GroupSortingAlgorithm getGroupSort() { return groupSort; } /** * Get the sorting method used for results in the group. * * @return The sorting method used for results. */ public ResultsSorter.SortingMethod getResultSort() { return sortMethod; } /** * Get the result filters which were used by the search. * * @return The search filters which were used by the search. */ public List<AbstractFilter> getFilters() { return Collections.unmodifiableList(searchfilters); } /** * Get the grouping attribute used to create the groups. * * @return The grouping attribute used by the search. */ public DiscoveryAttributes.AttributeType getGroupingAttr() { return groupingAttribute; } } /** * Event to signal that the visibility of the Details area should change. */ public static class DetailsVisibleEvent { private final boolean showDetailsArea; /** * Construct a new DetailsVisibleEvent. * * @param isVisible True if the details area should be visible, false * otherwise. */ public DetailsVisibleEvent(boolean isVisible) { showDetailsArea = isVisible; } /** * Get the visibility of the Details area. * * @return True if the details area should be visible, false otherwise. */ public boolean isShowDetailsArea() { return showDetailsArea; } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.codeInsight.imports; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.QualifiedName; import com.intellij.util.IncorrectOperationException; import com.jetbrains.python.codeInsight.PyCodeInsightSettings; import com.jetbrains.python.documentation.DocStringUtil; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.resolve.QualifiedNameFinder; import com.jetbrains.python.sdk.PythonSdkType; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import static com.jetbrains.python.psi.PyUtil.sure; /** * Does the actual job of adding an import statement into a file. * User: dcheryasov * Date: Apr 24, 2009 3:17:59 AM */ public class AddImportHelper { private static final Logger LOG = Logger.getInstance("#" + AddImportHelper.class.getName()); private AddImportHelper() { } public static void addLocalImportStatement(@NotNull PsiElement element, @NotNull String name) { final PyElementGenerator generator = PyElementGenerator.getInstance(element.getProject()); final LanguageLevel languageLevel = LanguageLevel.forElement(element); final PsiElement anchor = getLocalInsertPosition(element); final PsiElement parentElement = sure(anchor).getParent(); if (parentElement != null) { parentElement.addBefore(generator.createImportStatement(languageLevel, name, null), anchor); } } public static void addLocalFromImportStatement(@NotNull PsiElement element, @NotNull String qualifier, @NotNull String name) { final PyElementGenerator generator = PyElementGenerator.getInstance(element.getProject()); final LanguageLevel languageLevel = LanguageLevel.forElement(element); final PsiElement anchor = getLocalInsertPosition(element); final PsiElement parentElement = sure(anchor).getParent(); if (parentElement != null) { parentElement.addBefore(generator.createFromImportStatement(languageLevel, qualifier, name, null), anchor); } } @Nullable public static PsiElement getLocalInsertPosition(@NotNull PsiElement anchor) { return PsiTreeUtil.getParentOfType(anchor, PyStatement.class, false); } public enum ImportPriority { BUILTIN, THIRD_PARTY, PROJECT } @Nullable public static PsiElement getFileInsertPosition(final PsiFile file) { return getInsertPosition(file, null, null); } @Nullable private static PsiElement getInsertPosition(final PsiElement insertParent, @Nullable String nameToImport, @Nullable ImportPriority priority) { PsiElement feeler = insertParent.getFirstChild(); if (feeler == null) return null; // skip initial comments and whitespace and try to get just below the last import stmt boolean skippedOverImports = false; boolean skippedOverDoc = false; PsiElement seeker = feeler; final boolean isInjected = InjectedLanguageManager.getInstance(feeler.getProject()).isInjectedFragment(feeler.getContainingFile()); do { if (feeler instanceof PyImportStatementBase && !isInjected) { if (nameToImport != null && priority != null && shouldInsertBefore(insertParent, (PyImportStatementBase)feeler, nameToImport, priority)) { break; } seeker = feeler; feeler = feeler.getNextSibling(); skippedOverImports = true; } else if (PyUtil.instanceOf(feeler, PsiWhiteSpace.class, PsiComment.class)) { seeker = feeler; feeler = feeler.getNextSibling(); } // maybe we arrived at the doc comment stmt; skip over it, too else if (!skippedOverImports && !skippedOverDoc && insertParent instanceof PyFile) { PsiElement doc_elt = DocStringUtil.findDocStringExpression((PyElement)insertParent); // this gives the literal; its parent is the expr seeker may have encountered if (doc_elt != null && doc_elt.getParent() == feeler) { feeler = feeler.getNextSibling(); seeker = feeler; // skip over doc even if there's nothing below it skippedOverDoc = true; } else { break; // not a doc comment, stop on it } } else { break; // some other statement, stop } } while (feeler != null); return seeker; } private static boolean shouldInsertBefore(PsiElement element, PyImportStatementBase relativeTo, String nameToImport, ImportPriority priority) { QualifiedName relativeToName; PsiElement source; if (relativeTo instanceof PyFromImportStatement) { final PyFromImportStatement fromImportStatement = (PyFromImportStatement)relativeTo; if (fromImportStatement.isFromFuture()) { return false; } relativeToName = fromImportStatement.getImportSourceQName(); source = fromImportStatement.resolveImportSource(); } else { final PyImportElement[] importElements = relativeTo.getImportElements(); if (importElements.length == 0) { return false; } relativeToName = importElements[0].getImportedQName(); source = importElements[0].resolve(); } if (relativeToName == null) { return false; } final PsiFileSystemItem containingFile; if (source instanceof PsiDirectory) { containingFile = (PsiDirectory)source; } else { containingFile = source != null ? source.getContainingFile() : null; } ImportPriority relativeToPriority = source == null || containingFile == null ? ImportPriority.BUILTIN : getImportPriority(element, containingFile); final int rc = priority.compareTo(relativeToPriority); if (rc < 0) { return true; } if (rc == 0) { return nameToImport.compareTo(relativeToName.toString()) < 0; } return false; } /** * Adds an import statement, if it doesn't exist yet, presumably below all other initial imports in the file. * * @param file where to operate * @param name which to import (qualified is OK) * @param asName optional name for 'as' clause * @param anchor place where the imported name was used. It will be used to determine proper block where new import should be inserted, * e.g. inside conditional block or try/except statement. Also if anchor is another import statement, new import statement * will be inserted right after it. * @return whether import statement was actually added */ public static boolean addImportStatement(@NotNull PsiFile file, @NotNull String name, @Nullable String asName, @Nullable ImportPriority priority, @Nullable PsiElement anchor) { if (!(file instanceof PyFile)) { return false; } final List<PyImportElement> existingImports = ((PyFile)file).getImportTargets(); for (PyImportElement element : existingImports) { final QualifiedName qName = element.getImportedQName(); if (qName != null && name.equals(qName.toString())) { if ((asName != null && asName.equals(element.getAsName())) || asName == null) { return false; } } } final PyElementGenerator generator = PyElementGenerator.getInstance(file.getProject()); final LanguageLevel languageLevel = LanguageLevel.forElement(file); final PyImportStatement importNodeToInsert = generator.createImportStatement(languageLevel, name, asName); final PyImportStatementBase importStatement = PsiTreeUtil.getParentOfType(anchor, PyImportStatementBase.class, false); final PsiElement insertParent = importStatement != null && importStatement.getContainingFile() == file ? importStatement.getParent() : file; try { if (anchor instanceof PyImportStatementBase) { insertParent.addAfter(importNodeToInsert, anchor); } else { insertParent.addBefore(importNodeToInsert, getInsertPosition(insertParent, name, priority)); } } catch (IncorrectOperationException e) { LOG.error(e); } return true; } /** * Adds a new {@link PyFromImportStatement} statement below other top-level imports or as specified by anchor. * * @param file where to operate * @param from import source (reference after {@code from} keyword) * @param name imported name (identifier after {@code import} keyword) * @param asName optional alias (identifier after {@code as} keyword) * @param anchor place where the imported name was used. It will be used to determine proper block where new import should be inserted, * e.g. inside conditional block or try/except statement. Also if anchor is another import statement, new import statement * will be inserted right after it. * @see #addOrUpdateFromImportStatement */ public static void addFromImportStatement(@NotNull PsiFile file, @NotNull String from, @NotNull String name, @Nullable String asName, @Nullable ImportPriority priority, @Nullable PsiElement anchor) { final PyElementGenerator generator = PyElementGenerator.getInstance(file.getProject()); final LanguageLevel languageLevel = LanguageLevel.forElement(file); final PyFromImportStatement nodeToInsert = generator.createFromImportStatement(languageLevel, from, name, asName); try { final PyImportStatementBase importStatement = PsiTreeUtil.getParentOfType(anchor, PyImportStatementBase.class, false); final PsiElement insertParent; if (importStatement != null && importStatement.getContainingFile() == file) { insertParent = importStatement.getParent(); } else { insertParent = file; } if (InjectedLanguageManager.getInstance(file.getProject()).isInjectedFragment(file)) { final PsiElement element = insertParent.addBefore(nodeToInsert, getInsertPosition(insertParent, from, priority)); PsiElement whitespace = element.getNextSibling(); if (!(whitespace instanceof PsiWhiteSpace)) { whitespace = PsiParserFacade.SERVICE.getInstance(file.getProject()).createWhiteSpaceFromText(" >>> "); } insertParent.addBefore(whitespace, element); } else { if (anchor instanceof PyImportStatementBase) { insertParent.addAfter(nodeToInsert, anchor); } else { insertParent.addBefore(nodeToInsert, getInsertPosition(insertParent, from, priority)); } } } catch (IncorrectOperationException e) { LOG.error(e); } } /** * Adds new {@link PyFromImportStatement} in file or append {@link PyImportElement} to * existing from import statement. * * @param file module where import will be added * @param from import source (reference after {@code from} keyword) * @param name imported name (identifier after {@code import} keyword) * @param asName optional alias (identifier after {@code as} keyword) * @param priority optional import priority used to sort imports * @param anchor place where the imported name was used. It will be used to determine proper block where new import should be inserted, * e.g. inside conditional block or try/except statement. Also if anchor is another import statement, new import statement * will be inserted right after it. * @return whether import was actually added * @see #addFromImportStatement */ public static boolean addOrUpdateFromImportStatement(@NotNull PsiFile file, @NotNull String from, @NotNull String name, @Nullable String asName, @Nullable ImportPriority priority, @Nullable PsiElement anchor) { final List<PyFromImportStatement> existingImports = ((PyFile)file).getFromImports(); for (PyFromImportStatement existingImport : existingImports) { if (existingImport.isStarImport()) { continue; } final QualifiedName qName = existingImport.getImportSourceQName(); if (qName != null && qName.toString().equals(from) && existingImport.getRelativeLevel() == 0) { for (PyImportElement el : existingImport.getImportElements()) { final QualifiedName importedQName = el.getImportedQName(); if (importedQName != null && StringUtil.equals(name, importedQName.toString()) && StringUtil.equals(asName, el.getAsName())) { return false; } } final PyElementGenerator generator = PyElementGenerator.getInstance(file.getProject()); final PyImportElement importElement = generator.createImportElement(LanguageLevel.forElement(file), name); existingImport.add(importElement); return false; } } addFromImportStatement(file, from, name, asName, priority, anchor); return true; } /** * Adds either {@link PyFromImportStatement} or {@link PyImportStatement} * to specified target depending on user preferences and whether it's possible to import element via "from" form of import * (e.g. consider top level module). * * @param target element import is pointing to * @param file file where import will be inserted * @param element used to determine where to insert import * @see PyCodeInsightSettings#PREFER_FROM_IMPORT * @see #addImportStatement * @see #addOrUpdateFromImportStatement */ public static void addImport(final PsiNamedElement target, final PsiFile file, final PyElement element) { final boolean useQualified = !PyCodeInsightSettings.getInstance().PREFER_FROM_IMPORT; final PsiFileSystemItem toImport = target instanceof PsiFileSystemItem ? ((PsiFileSystemItem)target).getParent() : target.getContainingFile(); if (toImport == null) return; final ImportPriority priority = getImportPriority(file, toImport); final QualifiedName qName = QualifiedNameFinder.findCanonicalImportPath(target, element); if (qName == null) return; String path = qName.toString(); if (target instanceof PsiFileSystemItem && qName.getComponentCount() == 1) { addImportStatement(file, path, null, priority, element); } else { final QualifiedName toImportQName = QualifiedNameFinder.findCanonicalImportPath(toImport, element); if (toImportQName == null) return; if (useQualified) { addImportStatement(file, path, null, priority, element); final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(file.getProject()); final String targetName = PyUtil.getElementNameWithoutExtension(target); element.replace(elementGenerator.createExpressionFromText(LanguageLevel.forElement(target), toImportQName + "." + targetName)); } else { final String name = target.getName(); if (name != null) addOrUpdateFromImportStatement(file, toImportQName.toString(), name, null, priority, element); } } } public static ImportPriority getImportPriority(PsiElement importLocation, @NotNull PsiFileSystemItem toImport) { final VirtualFile vFile = toImport.getVirtualFile(); if (vFile == null) { return ImportPriority.PROJECT; } final ProjectRootManager projectRootManager = ProjectRootManager.getInstance(toImport.getProject()); if (projectRootManager.getFileIndex().isInContent(vFile)) { return ImportPriority.PROJECT; } Module module = ModuleUtilCore.findModuleForPsiElement(importLocation); Sdk pythonSdk = module != null ? PythonSdkType.findPythonSdk(module) : projectRootManager.getProjectSdk(); return PythonSdkType.isStdLib(vFile, pythonSdk) ? ImportPriority.BUILTIN : ImportPriority.THIRD_PARTY; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.snapshot; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Properties; import com.examples.snapshot.MyPdxSerializer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.snapshot.RegionGenerator.RegionType; import org.apache.geode.cache.snapshot.SnapshotOptions.SnapshotFormat; import org.apache.geode.distributed.ConfigurationProperties; import org.apache.geode.internal.cache.snapshot.SnapshotOptionsImpl; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.SerializableCallable; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.SnapshotTest; import org.apache.geode.test.junit.rules.serializable.SerializableTemporaryFolder; @Category({SnapshotTest.class}) public class ParallelSnapshotDUnitTest extends JUnit4CacheTestCase { private static final byte[] ffff = new byte[] {0xf, 0xf, 0xf, 0xf}; private static final byte[] eeee = new byte[] {0xe, 0xe, 0xe, 0xe}; private static final int DATA_POINTS = 100; private File directory; @Rule public SerializableTemporaryFolder temporaryFolder = new SerializableTemporaryFolder(); @Before public void setup() throws IOException { directory = temporaryFolder.newFolder(); } @Override public Properties getDistributedSystemProperties() { Properties properties = super.getDistributedSystemProperties(); properties.put(ConfigurationProperties.SERIALIZABLE_OBJECT_FILTER, TestSnapshotFileMapper.class.getName()); return properties; } @Test public void testExportImport() throws Exception { loadCache(); doExport(false); doImport(false); } @Test public void testExportWithSequentialImport() throws Exception { loadCache(); doExport(false); doSequentialImport(); } @Test public void testExportImportErrors() throws Exception { loadCache(); try { doExport(true); fail("Expected exception not thrown"); } catch (Exception e) { // do nothing on expected exception from test } doExport(false); try { doImport(true); fail("Expected exception not thrown"); } catch (Exception e) { // do nothing on expected exception from test } } /** * This test ensures that parallel import succeeds even when each node does not have a file to * import (import cluster larger than export one) * */ @Test public void testImportOnLargerCluster() throws Exception { loadCache(2); doExport(false, 2); getCache().getRegion("test").destroyRegion(); loadCache(); doImport(false); } private void doExport(boolean explode) throws Exception { doExport(explode, Host.getHost(0).getVMCount()); } private void doExport(boolean explode, int nodes) throws Exception { Region region = getCache().getRegion("test"); for (int i = 0; i < DATA_POINTS; i++) { region.put(i, ffff); } RegionSnapshotService rss = region.getSnapshotService(); final TestSnapshotFileMapper mapper = new TestSnapshotFileMapper(); mapper.setShouldExplode(explode); SnapshotOptionsImpl opt = (SnapshotOptionsImpl) rss.createOptions(); opt.setParallelMode(true); opt.setMapper(mapper); File f = new File(directory, "mysnap.gfd").getAbsoluteFile(); rss.save(f, SnapshotFormat.GEMFIRE, opt); mapper.setShouldExplode(false); SerializableCallable check = new SerializableCallable() { @Override public Object call() throws Exception { getCache().getDistributedSystem().getDistributedMember(); File snap = mapper.mapExportPath(getCache().getDistributedSystem().getDistributedMember(), f); assertTrue("Could not find snapshot: " + snap, snap.exists()); return null; } }; forEachVm(check, true, nodes); } private void doImport(boolean explode) throws ClassNotFoundException, IOException { Region region = getCache().getRegion("test"); RegionSnapshotService rss = region.getSnapshotService(); final TestSnapshotFileMapper mapper = new TestSnapshotFileMapper(); mapper.setShouldExplode(explode); SnapshotOptionsImpl opt = (SnapshotOptionsImpl) rss.createOptions(); opt.setParallelMode(true); opt.setMapper(mapper); for (int i = 0; i < DATA_POINTS; i++) { region.put(i, eeee); } rss.load(directory, SnapshotFormat.GEMFIRE, opt); for (int i = 0; i < DATA_POINTS; i++) { assertTrue(Arrays.equals(ffff, (byte[]) region.get(i))); } } private void doSequentialImport() throws IOException, ClassNotFoundException { Region region = getCache().getRegion("test"); RegionSnapshotService rss = region.getSnapshotService(); SnapshotOptionsImpl opt = (SnapshotOptionsImpl) rss.createOptions(); for (int i = 0; i < DATA_POINTS; i++) { region.put(i, eeee); } int vmCount = Host.getHost(0).getVMCount(); for (int i = 0; i <= vmCount; i++) { rss.load(new File(directory, Integer.toString(i)), SnapshotFormat.GEMFIRE, opt); } for (int i = 0; i < DATA_POINTS; i++) { assertTrue(Arrays.equals(ffff, (byte[]) region.get(i))); } } private void forEachVm(SerializableCallable call, boolean local, int maxNodes) throws Exception { Host host = Host.getHost(0); int vms = Math.min(host.getVMCount(), maxNodes); for (int i = 0; i < vms; ++i) { host.getVM(i).invoke(call); } if (local) { call.call(); } } @Override public final void postSetUp() throws Exception {} private void loadCache() throws Exception { this.loadCache(Integer.MAX_VALUE); } private void loadCache(int maxNodes) throws Exception { SerializableCallable setup = new SerializableCallable() { @Override public Object call() throws Exception { CacheFactory cf = new CacheFactory().setPdxSerializer(new MyPdxSerializer()); Cache cache = getCache(cf); RegionGenerator rgen = new RegionGenerator(); rgen.createRegion(cache, null, RegionType.PARTITION, "test"); return null; } }; forEachVm(setup, true, maxNodes); } }
/* * Copyright (C) 2014 Dell, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dell.doradus.service.schema; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import com.dell.doradus.common.ApplicationDefinition; import com.dell.doradus.common.CommonDefs; import com.dell.doradus.common.ContentType; import com.dell.doradus.common.UNode; import com.dell.doradus.common.Utils; import com.dell.doradus.core.DoradusServer; import com.dell.doradus.core.ServerConfig; import com.dell.doradus.service.Service; import com.dell.doradus.service.StorageService; import com.dell.doradus.service.db.DBService; import com.dell.doradus.service.db.DBTransaction; import com.dell.doradus.service.db.DColumn; import com.dell.doradus.service.db.DRow; import com.dell.doradus.service.db.Tenant; import com.dell.doradus.service.rest.RESTCallback; import com.dell.doradus.service.rest.RESTService; import com.dell.doradus.service.taskmanager.TaskManagerService; import com.dell.doradus.service.tenant.TenantService; /** * Provides common schema services for the Doradus server. The SchemaService parses new * and modified application schemas, add them to the Applications table, and notify the * appropriate storage service of the change. */ public class SchemaService extends Service { // Application ColumnFamily name: public static final String APPS_STORE_NAME = "Applications"; // Application definition row column names: private static final String COLNAME_APP_SCHEMA = "_application"; private static final String COLNAME_APP_SCHEMA_FORMAT = "_format"; private static final String COLNAME_APP_SCHEMA_VERSION = "_version"; // Singleton instance: private static final SchemaService INSTANCE = new SchemaService(); // Current format version with which we store schema definitions: private static final int CURRENT_SCHEMA_LEVEL = 2; // REST commands supported by the SchemaService: private static final List<Class<? extends RESTCallback>> CMD_CLASSES = Arrays.asList( ListApplicationsCmd.class, ListApplicationCmd.class, DefineApplicationCmd.class, ModifyApplicationCmd.class, DeleteApplicationCmd.class, DeleteApplicationKeyCmd.class ); //----- Service methods /** * Get the singleton instance of the StorageService. The object may or may not have * been initialized yet. * * @return The singleton instance of the StorageService. */ public static SchemaService instance() { return INSTANCE; } // instance // Called once before startService. @Override public void initService() { RESTService.instance().registerCommands(CMD_CLASSES); } // initService // Wait for the DB service to be up and check application schemas. @Override public void startService() { TenantService.instance().waitForFullService(); TenantService.instance().createDefaultTenant(); checkAppStores(); } // startService // Currently, we have nothing special to do to "stop". @Override public void stopService() { } // stopService //----- Public SchemaService methods /** * Create the application with the given name in the default tenant. If the given * application already exists, the request is treated as an application update. If the * update is successfully validated, its schema is stored in the database, and the * appropriate storage service is notified to implement required physical database * changes, if any. * * @param appDef {@link ApplicationDefinition} of application to create or update. * Note that appDef is updated with the "Tenant" option. */ public void defineApplication(ApplicationDefinition appDef) { checkServiceState(); Tenant tenant = TenantService.instance().getDefaultTenant(); defineApplication(tenant, appDef); } // defineApplication /** * Create the application with the given name in the given Tenant. If the given * application already exists, the request is treated as an application update. If the * update is successfully validated, its schema is stored in the database, and the * appropriate storage service is notified to implement required physical database * changes, if any. * * @param tenant {@link Tenant} in which application is being created or updated. * @param appDef {@link ApplicationDefinition} of application to create or update. * Note that appDef is updated with the "Tenant" option. */ public void defineApplication(Tenant tenant, ApplicationDefinition appDef) { checkServiceState(); setTenant(appDef, tenant); ApplicationDefinition currAppDef = checkApplicationKey(appDef); StorageService storageService = verifyStorageServiceOption(currAppDef, appDef); storageService.validateSchema(appDef); initializeApplication(currAppDef, appDef); } // defineApplication /** * Return the {@link ApplicationDefinition} for all applications in the given Tenant. * * @param tenant Tenant in which to query all applications. * @return A collection of application definitions. */ public Collection<ApplicationDefinition> getAllApplications(Tenant tenant) { checkServiceState(); return findAllApplications(tenant); } // getAllApplications /** * Return the {@link ApplicationDefinition} for the application in the default tenant. * Null is returned if no application is found with the given name in the default * tenant. * * @return The {@link ApplicationDefinition} for the given application or null if no * no application such application is defined in the default tenant. * * @deprecated This method only works for the default tenant and hence only in * single-tenant mode. {@link SchemaService#getApplication(Tenant, String)} * should be used instead. */ public ApplicationDefinition getApplication(String appName) { checkServiceState(); Tenant tenant = TenantService.instance().getDefaultTenant(); return getApplicationDefinition(tenant, appName); } // getApplication /** * Return the {@link ApplicationDefinition} for the application in the given tenant. * Null is returned if no application is found with the given name and tenant. * * @return The {@link ApplicationDefinition} for the given application or null if no * no such application is defined in the default tenant. */ public ApplicationDefinition getApplication(Tenant tenant, String appName) { checkServiceState(); return getApplicationDefinition(tenant, appName); } // getApplication /** * Examine the given application's StorageService option and return the corresponding * {@link StorageService}. An error is thrown if the storage service is unknown or has * not been initialized. * * @param appDef {@link ApplicationDefinition} of an application. * @return The application's assigned {@link StorageService}. */ public StorageService getStorageService(ApplicationDefinition appDef) { checkServiceState(); String ssName = getStorageServiceOption(appDef); StorageService storageService = DoradusServer.instance().findStorageService(ssName); Utils.require(storageService != null, "StorageService is unknown or hasn't been initialized: " + ssName); return storageService; } // getStorageService /** * Get the given application's StorageService option. If none is found, assign and * return the default. Unlike {@link #getStorageService(ApplicationDefinition)}, this * method will not throw an exception if the storage service is unknown or has not * been initialized. * * @param appDef {@link ApplicationDefinition} of an application. * @return The application's declared or assigned StorageService option. */ public String getStorageServiceOption(ApplicationDefinition appDef) { String ssName = appDef.getOption(CommonDefs.OPT_STORAGE_SERVICE); if (Utils.isEmpty(ssName)) { ssName = DoradusServer.instance().getDefaultStorageService(); appDef.setOption(CommonDefs.OPT_STORAGE_SERVICE, ssName); } return ssName; } // getStorageServiceOption /** * Delete the given application, including all of its data, from the default tenant. * If the given application doesn't exist, the call is a no-op. WARNING: This method * deletes an application regardless of whether it has a key defined. * * @param appName Name of application to delete in default tenant. */ public void deleteApplication(String appName) { checkServiceState(); ApplicationDefinition appDef = getApplication(appName); if (appDef == null) { return; } deleteApplication(appName, appDef.getKey()); } // deleteApplication /** * Delete the given application, including all of its data, from the default tenant. * If the given application doesn't exist, the call is a no-op. If the application * exists, the given key must match the current key, if one is defined, or be * null/empty if no key is defined. * * @param appName Name of application to delete in the default tenant. * @param key Application key of existing application, if any. */ public void deleteApplication(String appName, String key) { checkServiceState(); ApplicationDefinition appDef = getApplication(appName); if (appDef == null) { return; } deleteApplication(appDef, key); } // deleteApplication /** * Delete the application with the given definition, including all of its data. The * given {@link ApplicationDefinition} must define the tenant in which the application * resides. If the given application doesn't exist, the call is a no-op. If the * application exists, the given key must match the current key, if one is defined, or * be null/empty if no key is defined. * * @param appDef {@link ApplicationDefinition} of application to delete. * @param key Application key of existing application, if any. */ public void deleteApplication(ApplicationDefinition appDef, String key) { checkServiceState(); String appKey = appDef.getKey(); if (Utils.isEmpty(appKey)) { Utils.require(Utils.isEmpty(key), "Application key does not match: %s", key); } else { Utils.require(appKey.equals(key), "Application key does not match: %s", key); } assert Tenant.getTenant(appDef) != null; // Delete storage service-specific data first. m_logger.info("Deleting application: {}", appDef.getAppName()); StorageService storageService = getStorageService(appDef); storageService.deleteApplication(appDef); TaskManagerService.instance().deleteApplicationTasks(appDef); deleteAppProperties(appDef); } // deleteApplication //----- Private methods // Singleton construction only private SchemaService() {} // Check to see if the storage manager is active for each application. private void checkAppStores() { m_logger.info("The following tenants and applications are defined:"); Collection<Tenant> tenantList = TenantService.instance().getTenants(); for (Tenant tenant : tenantList) { m_logger.info(" Tenant: {}", tenant.getKeyspace()); Iterator<DRow> rowIter = DBService.instance().getAllRowsAllColumns(tenant, SchemaService.APPS_STORE_NAME); if (!rowIter.hasNext()) { m_logger.info(" <no applications>"); } while (rowIter.hasNext()) { DRow row = rowIter.next(); ApplicationDefinition appDef = loadAppRow(tenant, getColumnMap(row.getColumns())); if (appDef != null) { String appName = appDef.getAppName(); String ssName = getStorageServiceOption(appDef); m_logger.info(" Application '{}': StorageService={}; keyspace={}", new Object[]{appName, ssName, tenant.getKeyspace()}); if (DoradusServer.instance().findStorageService(ssName) == null) { m_logger.warn(" >>>Application '{}' uses storage service '{}' which has not been " + "initialized; application will not be accessible via this server", appDef.getAppName(), ssName); } } } } if (tenantList.size() == 0) { m_logger.info(" <no tenants>"); } } // Delete the given application's schema row from the Applications CF. private void deleteAppProperties(ApplicationDefinition appDef) { Tenant tenant = Tenant.getTenant(appDef); DBTransaction dbTran = DBService.instance().startTransaction(tenant); dbTran.deleteRow(SchemaService.APPS_STORE_NAME, appDef.getAppName()); DBService.instance().commit(dbTran); } // deleteAppProperties // Initialize storage and store the given schema for the given new or updated application. private void initializeApplication(ApplicationDefinition currAppDef, ApplicationDefinition appDef) { Tenant tenant = Tenant.getTenant(appDef); if (tenant.getKeyspace().equals(ServerConfig.getInstance().keyspace)) { TenantService.instance().createDefaultTenant(); } getStorageService(appDef).initializeApplication(currAppDef, appDef); storeApplicationSchema(appDef); } // initializeApplication // Store the application row with schema, version, and format. private void storeApplicationSchema(ApplicationDefinition appDef) { String appName = appDef.getAppName(); Tenant tenant = Tenant.getTenant(appDef); DBTransaction dbTran = DBService.instance().startTransaction(tenant); dbTran.addColumn(SchemaService.APPS_STORE_NAME, appName, COLNAME_APP_SCHEMA, appDef.toDoc().toJSON()); dbTran.addColumn(SchemaService.APPS_STORE_NAME, appName, COLNAME_APP_SCHEMA_FORMAT, ContentType.APPLICATION_JSON.toString()); dbTran.addColumn(SchemaService.APPS_STORE_NAME, appName, COLNAME_APP_SCHEMA_VERSION, Integer.toString(CURRENT_SCHEMA_LEVEL)); DBService.instance().commit(dbTran); } // storeApplicationSchema // Verify key match of an existing application, if any, and return it's definition. private ApplicationDefinition checkApplicationKey(ApplicationDefinition appDef) { Tenant tenant = Tenant.getTenant(appDef); ApplicationDefinition currAppDef = getApplication(tenant, appDef.getAppName()); if (currAppDef == null) { m_logger.info("Defining application: {}", appDef.getAppName()); } else { m_logger.info("Updating application: {}", appDef.getAppName()); String appKey = currAppDef.getKey(); Utils.require(Utils.isEmpty(appKey) || appKey.equals(appDef.getKey()), "Application key cannot be changed: %s", appDef.getKey()); } return currAppDef; } // checkApplicationKey // Set the given application's "Tenant" option to the given tenant's keyspace. private void setTenant(ApplicationDefinition appDef, Tenant tenant) { appDef.setOption(CommonDefs.OPT_TENANT, tenant.getKeyspace()); } // Verify the given application's StorageService option and, if this is a schema // change, ensure it hasn't changed. Return the application's StorageService object. private StorageService verifyStorageServiceOption(ApplicationDefinition currAppDef, ApplicationDefinition appDef) { // Verify or assign StorageService String ssName = getStorageServiceOption(appDef); StorageService storageService = getStorageService(appDef); Utils.require(storageService != null, "StorageService is unknown or hasn't been initialized: %s", ssName); // Currently, StorageService can't be changed. if (currAppDef != null) { String currSSName = getStorageServiceOption(currAppDef); Utils.require(currSSName.equals(ssName), "'StorageService' cannot be changed for application: %s", appDef.getAppName()); } return storageService; } // verifyStorageServiceOption private Map<String, String> getColumnMap(Iterator<DColumn> colIter) { Map<String, String> colMap = new HashMap<>(); while (colIter.hasNext()) { DColumn col = colIter.next(); colMap.put(col.getName(), col.getValue()); } return colMap; } // getColumnMap // Parse the application schema from the given application row. private ApplicationDefinition loadAppRow(Tenant tenant, Map<String, String> colMap) { ApplicationDefinition appDef = new ApplicationDefinition(); String appSchema = colMap.get(COLNAME_APP_SCHEMA); if (appSchema == null) { return null; // Not a real application definition row } String format = colMap.get(COLNAME_APP_SCHEMA_FORMAT); ContentType contentType = Utils.isEmpty(format) ? ContentType.TEXT_XML : new ContentType(format); String versionStr = colMap.get(COLNAME_APP_SCHEMA_VERSION); int schemaVersion = Utils.isEmpty(versionStr) ? CURRENT_SCHEMA_LEVEL : Integer.parseInt(versionStr); if (schemaVersion > CURRENT_SCHEMA_LEVEL) { m_logger.warn("Skipping schema with advanced version: {}", schemaVersion); return null; } try { appDef.parse(UNode.parse(appSchema, contentType)); } catch (Exception e) { m_logger.warn("Error parsing schema for application '" + appDef.getAppName() + "'; skipped", e); return null; } setTenant(appDef, tenant); return appDef; } // loadAppRow // Get the given application's application. If it's not in our app-to-tenant map, // refresh the map in case the application was just created. private ApplicationDefinition getApplicationDefinition(Tenant tenant, String appName) { Iterator<DColumn> colIter = DBService.instance().getAllColumns(tenant, SchemaService.APPS_STORE_NAME, appName); if (!colIter.hasNext()) { return null; } return loadAppRow(tenant, getColumnMap(colIter)); } // getApplicationDefinition // Get all application definitions for the given Tenant. private Collection<ApplicationDefinition> findAllApplications(Tenant tenant) { List<ApplicationDefinition> result = new ArrayList<>(); Iterator<DRow> rowIter = DBService.instance().getAllRowsAllColumns(tenant, SchemaService.APPS_STORE_NAME); while (rowIter.hasNext()) { DRow row = rowIter.next(); ApplicationDefinition appDef = loadAppRow(tenant, getColumnMap(row.getColumns())); if (appDef != null) { result.add(appDef); } } return result; } // findAllApplications } // class SchemaService
package com.jivesoftware.os.miru.plugin.index; import com.google.common.base.Charsets; import com.google.common.base.Splitter; import com.google.common.collect.Interner; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.jivesoftware.os.filer.io.api.StackBuffer; import com.jivesoftware.os.miru.api.activity.MiruActivity; import com.jivesoftware.os.miru.api.activity.schema.MiruFieldDefinition; import com.jivesoftware.os.miru.api.activity.schema.MiruSchema; import com.jivesoftware.os.miru.api.activity.schema.MiruSchema.CompositeFieldDefinition; import com.jivesoftware.os.miru.api.base.MiruIBA; import com.jivesoftware.os.miru.api.base.MiruTenantId; import com.jivesoftware.os.miru.api.base.MiruTermId; import com.jivesoftware.os.miru.plugin.MiruInterner; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * */ public class MiruActivityInternExtern { private static final MetricLogger log = MetricLoggerFactory.getLogger(); private static final int MAX_TERM_LENGTH = 4_096; //TODO add to schema private final MiruInterner<MiruIBA> ibaInterner; private final MiruInterner<MiruTenantId> tenantInterner; private final Interner<String> stringInterner; private final MiruTermComposer termComposer; public MiruActivityInternExtern(MiruInterner<MiruIBA> ibaInterner, MiruInterner<MiruTenantId> tenantInterner, Interner<String> stringInterner, MiruTermComposer termComposer) { this.ibaInterner = ibaInterner; this.tenantInterner = tenantInterner; this.stringInterner = stringInterner; this.termComposer = termComposer; } /** * It is expected that activityAndIds.size() == internedActivityAndIds.size(); * * @param activityAndIds * @param fromOffset * @param length * @param internedActivityAndIds * @param schema * @return */ public void intern(List<MiruActivityAndId<MiruActivity>> activityAndIds, int fromOffset, int length, List<MiruActivityAndId<MiruInternalActivity>> internedActivityAndIds, final MiruSchema schema, StackBuffer stackBuffer) throws Exception { for (int i = fromOffset; i < fromOffset + length && i < activityAndIds.size(); i++) { MiruActivityAndId<MiruActivity> activiyAndId = activityAndIds.get(i); MiruActivity activity = activiyAndId.activity; internedActivityAndIds.set(i, new MiruActivityAndId<>( new MiruInternalActivity.Builder(schema, tenantInterner.intern(activity.tenantId.getBytes()), activity.time, activity.version, activity.realtimeDelivery, internAuthz(activity.authz)) .putFieldsValues(internFields(activity.fieldsValues, schema, stackBuffer)) .putPropsValues(internProps(activity.propsValues, schema)) .build(), activiyAndId.id, activiyAndId.monoTimestamp)); } } private String[] internAuthz(String[] activityAuthz) { if (activityAuthz == null) { return null; } for (int i = 0; i < activityAuthz.length; i++) { activityAuthz[i] = stringInterner.intern(activityAuthz[i]); } return activityAuthz; } private MiruTermId[][] internFields(Map<String, List<String>> fields, MiruSchema schema, StackBuffer stackBuffer) throws Exception { MiruTermId[][] fieldsValues = new MiruTermId[schema.fieldCount()][]; for (MiruFieldDefinition fieldDefinition : schema.getFieldDefinitions()) { List<String[]> fieldValues; MiruSchema.CompositeFieldDefinition[] compositeFieldDefinitions = schema.getCompositeFieldDefinitions(fieldDefinition.fieldId); if (compositeFieldDefinitions != null) { List<String[]> accumFieldValues = Lists.newArrayList(); for (MiruSchema.CompositeFieldDefinition field : compositeFieldDefinitions) { List<String> compositeFieldValues = fields.get(field.definition.name); if (field.whitelist != null) { List<String> filteredCompositeFieldValues = Lists.newArrayListWithCapacity(compositeFieldValues.size()); for (String compositeFieldValue : compositeFieldValues) { if (field.definition.prefix != MiruFieldDefinition.Prefix.NONE) { Iterator<String> iterator = Splitter.on((char) fieldDefinition.prefix.separator).split(compositeFieldValue).iterator(); if (iterator.hasNext() && field.whitelist.contains(iterator.next())) { filteredCompositeFieldValues.add(compositeFieldValue); } } else { if (field.whitelist.contains(compositeFieldValue)) { filteredCompositeFieldValues.add(compositeFieldValue); } } } compositeFieldValues = filteredCompositeFieldValues; } if (compositeFieldValues == null || compositeFieldValues.isEmpty()) { // missing terms for a composite field, we cannot construct a composite term for this activity accumFieldValues.clear(); break; } else if (accumFieldValues.isEmpty()) { for (String compositeFieldValue : compositeFieldValues) { accumFieldValues.add(new String[] { compositeFieldValue }); } } else { List<String[]> tmpFieldValues = Lists.newArrayList(); for (String[] accumFieldValue : accumFieldValues) { for (String compositeFieldValue : compositeFieldValues) { if (compositeFieldValue.length() <= MAX_TERM_LENGTH && compositeFieldValue.length() > 0) { String[] concat = new String[accumFieldValue.length + 1]; System.arraycopy(accumFieldValue, 0, concat, 0, accumFieldValue.length); concat[concat.length - 1] = compositeFieldValue; tmpFieldValues.add(concat); } } } accumFieldValues = tmpFieldValues; } } fieldValues = accumFieldValues; } else { List<String> values = fields.get(fieldDefinition.name); if (values != null) { int i = 0; while (i < values.size()) { String fieldValue = values.get(i); if (fieldValue.length() > MAX_TERM_LENGTH || fieldValue.length() == 0) { log.warn("Ignored term {} because its length is zero or greater than {}.", fieldValue.length(), MAX_TERM_LENGTH); // heavy-handed copy for removal from list, but the original list may be immutable, and this should be a rare occurrence List<String> snip = Lists.newArrayListWithCapacity(values.size() - 1); snip.addAll(values.subList(0, i)); snip.addAll(values.subList(i + 1, values.size())); values = snip; } else { i++; } } fieldValues = Lists.newArrayListWithCapacity(values.size()); for (String value : values) { fieldValues.add(new String[] { value }); } } else { fieldValues = null; } } if (fieldValues != null && !fieldValues.isEmpty()) { MiruTermId[] values = new MiruTermId[fieldValues.size()]; for (int i = 0; i < values.length; i++) { values[i] = termComposer.compose(schema, fieldDefinition, stackBuffer, fieldValues.get(i)); } fieldsValues[fieldDefinition.fieldId] = values; } } return fieldsValues; } private MiruIBA[][] internProps(Map<String, List<String>> properties, MiruSchema schema) { MiruIBA[][] propertyValues = new MiruIBA[schema.propertyCount()][]; for (String propertyName : properties.keySet()) { int propertyId = schema.getPropertyId(propertyName); List<String> propValues = properties.get(propertyName); MiruIBA[] values = new MiruIBA[propValues.size()]; for (int i = 0; i < values.length; i++) { values[i] = ibaInterner.intern(propValues.get(i).getBytes(Charsets.UTF_8)); } propertyValues[propertyId] = values; } return propertyValues; } public String internString(String string) { return stringInterner.intern(string); } public MiruActivity extern(MiruInternalActivity activity, MiruSchema schema, StackBuffer stackBuffer) throws IOException { return new MiruActivity(activity.tenantId, activity.time, activity.version, activity.realtimeDelivery, activity.authz, externFields(activity.fieldsValues, schema, stackBuffer), externProps(activity.propsValues, schema)); } private Map<String, List<String>> externFields(MiruTermId[][] fields, MiruSchema schema, StackBuffer stackBuffer) throws IOException { Map<String, List<String>> externFields = Maps.newHashMapWithExpectedSize(fields.length); for (int i = 0; i < fields.length; i++) { MiruTermId[] values = fields[i]; if (values != null) { CompositeFieldDefinition[] compositeFieldDefinitions = schema.getCompositeFieldDefinitions(i); if (compositeFieldDefinitions != null) { continue; } MiruFieldDefinition fieldDefinition = schema.getFieldDefinition(i); List<String> externValues = new ArrayList<>(); for (MiruTermId value : values) { externValues.add(termComposer.decompose(schema, fieldDefinition, stackBuffer, value)[0]); } externFields.put(fieldDefinition.name, externValues); } } return externFields; } private Map<String, List<String>> externProps(MiruIBA[][] properties, MiruSchema schema) { Map<String, List<String>> externProperties = Maps.newHashMapWithExpectedSize(properties.length); for (int i = 0; i < properties.length; i++) { MiruIBA[] values = properties[i]; if (values != null) { List<String> externValues = new ArrayList<>(); for (MiruIBA value : values) { externValues.add(new String(value.getBytes(), Charsets.UTF_8)); } externProperties.put(schema.getPropertyDefinition(i).name, externValues); } } return externProperties; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.datasync.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * ListTasksResponse * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/ListTasks" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListTasksResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * A list of all the tasks that are returned. * </p> */ private java.util.List<TaskListEntry> tasks; /** * <p> * An opaque string that indicates the position at which to begin returning the next list of tasks. * </p> */ private String nextToken; /** * <p> * A list of all the tasks that are returned. * </p> * * @return A list of all the tasks that are returned. */ public java.util.List<TaskListEntry> getTasks() { return tasks; } /** * <p> * A list of all the tasks that are returned. * </p> * * @param tasks * A list of all the tasks that are returned. */ public void setTasks(java.util.Collection<TaskListEntry> tasks) { if (tasks == null) { this.tasks = null; return; } this.tasks = new java.util.ArrayList<TaskListEntry>(tasks); } /** * <p> * A list of all the tasks that are returned. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTasks(java.util.Collection)} or {@link #withTasks(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tasks * A list of all the tasks that are returned. * @return Returns a reference to this object so that method calls can be chained together. */ public ListTasksResult withTasks(TaskListEntry... tasks) { if (this.tasks == null) { setTasks(new java.util.ArrayList<TaskListEntry>(tasks.length)); } for (TaskListEntry ele : tasks) { this.tasks.add(ele); } return this; } /** * <p> * A list of all the tasks that are returned. * </p> * * @param tasks * A list of all the tasks that are returned. * @return Returns a reference to this object so that method calls can be chained together. */ public ListTasksResult withTasks(java.util.Collection<TaskListEntry> tasks) { setTasks(tasks); return this; } /** * <p> * An opaque string that indicates the position at which to begin returning the next list of tasks. * </p> * * @param nextToken * An opaque string that indicates the position at which to begin returning the next list of tasks. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * An opaque string that indicates the position at which to begin returning the next list of tasks. * </p> * * @return An opaque string that indicates the position at which to begin returning the next list of tasks. */ public String getNextToken() { return this.nextToken; } /** * <p> * An opaque string that indicates the position at which to begin returning the next list of tasks. * </p> * * @param nextToken * An opaque string that indicates the position at which to begin returning the next list of tasks. * @return Returns a reference to this object so that method calls can be chained together. */ public ListTasksResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTasks() != null) sb.append("Tasks: ").append(getTasks()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListTasksResult == false) return false; ListTasksResult other = (ListTasksResult) obj; if (other.getTasks() == null ^ this.getTasks() == null) return false; if (other.getTasks() != null && other.getTasks().equals(this.getTasks()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTasks() == null) ? 0 : getTasks().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListTasksResult clone() { try { return (ListTasksResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.cluster.failover; import java.util.Collection; import java.util.concurrent.TimeUnit; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientSessionFactory; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.client.impl.TopologyMemberImpl; import org.apache.activemq.artemis.core.config.ha.ReplicaPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.ReplicatedPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.ReplicationBackupPolicyConfiguration; import org.apache.activemq.artemis.core.config.ha.ReplicationPrimaryPolicyConfiguration; import org.apache.activemq.artemis.core.server.cluster.impl.MessageLoadBalancingType; import org.apache.activemq.artemis.core.server.group.impl.GroupingHandlerConfiguration; import org.apache.activemq.artemis.core.server.impl.ReplicationBackupActivation; import org.apache.activemq.artemis.core.server.impl.SharedNothingBackupActivation; import org.apache.activemq.artemis.tests.integration.cluster.distribution.ClusterTestBase; import org.apache.activemq.artemis.tests.util.ActiveMQTestBase; import org.apache.activemq.artemis.utils.Wait; import org.junit.Test; public abstract class GroupingFailoverTestBase extends ClusterTestBase { @Test public void testGroupingLocalHandlerFails() throws Exception { setupBackupServer(2, 0, isFileStorage(), haType(), isNetty()); setupLiveServer(0, isFileStorage(), haType(), isNetty(), false); setupLiveServer(1, isFileStorage(), haType(), isNetty(), false); setupClusterConnection("cluster0", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 0, 1); setupClusterConnection("cluster1", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 1, 0); setupClusterConnection("cluster0", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 2, 1); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.LOCAL, 0); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.REMOTE, 1); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.LOCAL, 2); switch (haType()) { case SharedNothingReplication: ((ReplicatedPolicyConfiguration) servers[0].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); ((ReplicatedPolicyConfiguration) servers[1].getConfiguration().getHAPolicyConfiguration()).setGroupName("group2"); ((ReplicaPolicyConfiguration) servers[2].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); break; case PluggableQuorumReplication: ((ReplicationPrimaryPolicyConfiguration) servers[0].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); ((ReplicationPrimaryPolicyConfiguration) servers[1].getConfiguration().getHAPolicyConfiguration()).setGroupName("group2"); ((ReplicationBackupPolicyConfiguration) servers[2].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); break; } startServers(0, 1, 2); setupSessionFactory(0, isNetty()); setupSessionFactory(1, isNetty()); createQueue(0, "queues.testaddress", "queue0", null, true); createQueue(1, "queues.testaddress", "queue0", null, true); waitForBindings(0, "queues.testaddress", 1, 0, true); waitForBindings(1, "queues.testaddress", 1, 0, true); addConsumer(0, 0, "queue0", null); addConsumer(1, 1, "queue0", null); waitForBindings(0, "queues.testaddress", 1, 1, false); waitForBindings(1, "queues.testaddress", 1, 1, false); waitForBindings(0, "queues.testaddress", 1, 1, true); waitForBindings(1, "queues.testaddress", 1, 1, true); waitForTopology(servers[1], 2, 1); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id1")); verifyReceiveAll(10, 0); if (!isSharedStore()) { waitForBackupTopologyAnnouncement(sfs[0]); } Thread.sleep(1000); closeSessionFactory(0); servers[0].fail(true); waitForServerRestart(2); setupSessionFactory(2, isNetty()); addConsumer(2, 2, "queue0", null); waitForBindings(2, "queues.testaddress", 1, 1, true); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id1")); verifyReceiveAll(10, 2); } public void waitForBackupTopologyAnnouncement(ClientSessionFactory sf) throws Exception { long start = System.currentTimeMillis(); ServerLocator locator = sf.getServerLocator(); do { Collection<TopologyMemberImpl> members = locator.getTopology().getMembers(); for (TopologyMemberImpl member : members) { if (member.getBackup() != null) { return; } } Thread.sleep(10); } while (System.currentTimeMillis() - start < ActiveMQTestBase.WAIT_TIMEOUT); throw new IllegalStateException("Timed out waiting for backup announce"); } @Test public void testGroupingLocalHandlerFailsMultipleGroups() throws Exception { setupBackupServer(2, 0, isFileStorage(), haType(), isNetty()); setupLiveServer(0, isFileStorage(), haType(), isNetty(), false); setupLiveServer(1, isFileStorage(), haType(), isNetty(), false); setupClusterConnection("cluster0", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 0, 1); setupClusterConnection("cluster1", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 1, 0); setupClusterConnection("cluster0", "queues", MessageLoadBalancingType.ON_DEMAND, 1, isNetty(), 2, 1); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.LOCAL, 0); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.REMOTE, 1); setUpGroupHandler(GroupingHandlerConfiguration.TYPE.LOCAL, 2); switch (haType()) { case SharedNothingReplication: ((ReplicatedPolicyConfiguration) servers[0].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); ((ReplicatedPolicyConfiguration) servers[1].getConfiguration().getHAPolicyConfiguration()).setGroupName("group2"); ((ReplicaPolicyConfiguration) servers[2].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); break; case PluggableQuorumReplication: ((ReplicationPrimaryPolicyConfiguration) servers[0].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); ((ReplicationPrimaryPolicyConfiguration) servers[1].getConfiguration().getHAPolicyConfiguration()).setGroupName("group2"); ((ReplicationBackupPolicyConfiguration) servers[2].getConfiguration().getHAPolicyConfiguration()).setGroupName("group1"); break; } startServers(0, 1, 2); setupSessionFactory(0, isNetty()); setupSessionFactory(1, isNetty()); createQueue(0, "queues.testaddress", "queue0", null, true); waitForBindings(0, "queues.testaddress", 1, 0, true); createQueue(1, "queues.testaddress", "queue0", null, true); waitForBindings(1, "queues.testaddress", 1, 0, true); addConsumer(0, 0, "queue0", null); addConsumer(1, 1, "queue0", null); waitForBindings(0, "queues.testaddress", 1, 1, false); waitForBindings(1, "queues.testaddress", 1, 1, false); waitForBindings(0, "queues.testaddress", 1, 1, true); waitForBindings(1, "queues.testaddress", 1, 1, true); waitForTopology(servers[1], 2); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id1")); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id2")); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id3")); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id4")); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id5")); sendWithProperty(0, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id6")); verifyReceiveAllWithGroupIDRoundRobin(0, 30, 0, 1); switch (haType()) { case SharedNothingReplication: { SharedNothingBackupActivation backupActivation = (SharedNothingBackupActivation) servers[2].getActivation(); assertTrue(backupActivation.waitForBackupSync(10, TimeUnit.SECONDS)); } break; case PluggableQuorumReplication: { ReplicationBackupActivation backupActivation = (ReplicationBackupActivation) servers[2].getActivation(); Wait.assertTrue(backupActivation::isReplicaSync, TimeUnit.SECONDS.toMillis(10)); } break; } closeSessionFactory(0); servers[0].fail(true); waitForServerRestart(2); setupSessionFactory(2, isNetty()); addConsumer(2, 2, "queue0", null); waitForBindings(2, "queues.testaddress", 1, 1, true); waitForBindings(2, "queues.testaddress", 1, 1, false); waitForBindings(1, "queues.testaddress", 1, 1, true); waitForBindings(1, "queues.testaddress", 1, 1, false); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id1")); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id2")); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id3")); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id4")); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id5")); sendWithProperty(2, "queues.testaddress", 10, false, Message.HDR_GROUP_ID, new SimpleString("id6")); verifyReceiveAllWithGroupIDRoundRobin(2, 30, 1, 2); } public boolean isNetty() { return true; } }
/* * * Copyright (c) 2013,2019 AT&T Knowledge Ventures * SPDX-License-Identifier: MIT */ package com.att.research.xacml.admin.jpa; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Transient; import com.att.research.xacml.api.Identifier; import com.att.research.xacml.api.XACML3; import com.att.research.xacml.std.IdentifierImpl; /** * The persistent class for the Datatype database table. * */ @Entity @Table(name="Datatype") @NamedQuery(name="Datatype.findAll", query="SELECT d FROM Datatype d") public class Datatype implements Serializable { private static final long serialVersionUID = 1L; public static final char STANDARD = 'S'; public static final char CUSTOM = 'C'; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name="id") private int id; @Column(name="is_standard", nullable=false) private char isStandard; @Column(name="xacml_id", nullable=false, unique=true, length=255) private String xacmlId; @Column(name="short_name", nullable=false, length=64) private String shortName; //bi-directional many-to-one association to Attribute @OneToMany(mappedBy="datatypeBean") private Set<Attribute> attributes = new HashSet<>(); //bi-directional many-to-one association to Attribute @OneToMany(mappedBy="datatypeBean") private Set<FunctionDefinition> functions = new HashSet<>(); //bi-directional many-to-one association to Attribute @OneToMany(mappedBy="datatypeBean") private Set<FunctionArgument> arguments = new HashSet<>(); public Datatype() { this.xacmlId = XACML3.ID_DATATYPE_STRING.stringValue(); this.isStandard = Datatype.STANDARD; } public Datatype(int id, Datatype dt) { this.id = id; this.isStandard = dt.isStandard; this.xacmlId = dt.xacmlId; this.shortName = dt.shortName; // // Make a copy? // this.attributes = new HashSet<>(); } public Datatype(Identifier identifier, char standard) { if (identifier != null) { this.xacmlId = identifier.stringValue(); } this.isStandard = standard; } public Datatype(Identifier identifier) { this(identifier, Datatype.STANDARD); } public int getId() { return this.id; } public void setId(int id) { this.id = id; } public char getIsStandard() { return this.isStandard; } public void setIsStandard(char isStandard) { this.isStandard = isStandard; } public String getXacmlId() { return this.xacmlId; } public void setXacmlId(String xacmlId) { this.xacmlId = xacmlId; } public String getShortName() { return shortName; } public void setShortName(String shortName) { this.shortName = shortName; } public Set<Attribute> getAttributes() { return this.attributes; } public void setAttributes(Set<Attribute> attributes) { this.attributes = attributes; } public Attribute addAttribute(Attribute attribute) { getAttributes().add(attribute); attribute.setDatatypeBean(this); return attribute; } public Attribute removeAttribute(Attribute attribute) { getAttributes().remove(attribute); attribute.setDatatypeBean(null); return attribute; } public Set<FunctionDefinition> getFunctions() { return this.functions; } public void setFunctions(Set<FunctionDefinition> functions) { this.functions = functions; } public FunctionDefinition addFunction(FunctionDefinition function) { getFunctions().add(function); function.setDatatypeBean(this); return function; } public FunctionDefinition removeAttribute(FunctionDefinition function) { getFunctions().remove(function); function.setDatatypeBean(null); return function; } public Set<FunctionArgument> getArguments() { return this.arguments; } public void setArguments(Set<FunctionArgument> argument) { this.arguments = argument; } public FunctionArgument addArgument(FunctionArgument argument) { getArguments().add(argument); argument.setDatatypeBean(this); return argument; } public FunctionArgument removeArgument(FunctionArgument argument) { getArguments().remove(argument); argument.setDatatypeBean(null); return argument; } @Transient public Identifier getIdentifer() { return new IdentifierImpl(this.xacmlId); } @Transient public boolean isStandard() { return (this.isStandard == Datatype.STANDARD); } @Transient public boolean isCustom() { return (this.isStandard == Datatype.CUSTOM); } @Transient @Override public String toString() { return "Datatype [id=" + id + ", isStandard=" + isStandard + ", xacmlId=" + xacmlId + ", shortName=" + shortName + ", attributes=" + attributes + ", functions=" + functions + ", arguments=" + arguments + "]"; } }
package umm3601.mongotest; import com.mongodb.MongoClient; import com.mongodb.client.*; import com.mongodb.client.model.Accumulators; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.Sorts; import org.bson.Document; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static com.mongodb.client.model.Filters.*; import static com.mongodb.client.model.Projections.*; import static org.junit.Assert.*; /** * Some simple "tests" that demonstrate our ability to * connect to a Mongo database and run some basic queries * against it. * * Note that none of these are actually tests of any of our * code; they are mostly demonstrations of the behavior of * the MongoDB Java libraries. Thus if they test anything, * they test that code, and perhaps our understanding of it. * * To test "our" code we'd want the tests to confirm that * the behavior of methods in things like the UserController * do the "right" thing. * * Created by mcphee on 20/2/17. */ public class MongoSpec { private MongoCollection<Document> userDocuments; @Before public void clearAndPopulateDB() { MongoClient mongoClient = new MongoClient(); MongoDatabase db = mongoClient.getDatabase("testingdb"); userDocuments = db.getCollection("users"); userDocuments.drop(); List<Document> testUsers = new ArrayList<>(); testUsers.add(Document.parse("{\n" + " name: \"Chris\",\n" + " age: 25,\n" + " company: \"UMM\",\n" + " email: \"chris@this.that\"\n" + " }")); testUsers.add(Document.parse("{\n" + " name: \"Pat\",\n" + " age: 37,\n" + " company: \"IBM\",\n" + " email: \"pat@something.com\"\n" + " }")); testUsers.add(Document.parse("{\n" + " name: \"Jamie\",\n" + " age: 37,\n" + " company: \"Frogs, Inc.\",\n" + " email: \"jamie@frogs.com\"\n" + " }")); userDocuments.insertMany(testUsers); } private List<Document> intoList(MongoIterable<Document> documents) { List<Document> users = new ArrayList<>(); documents.into(users); return users; } private int countUsers(FindIterable<Document> documents) { List<Document> users = intoList(documents); return users.size(); } @Test public void shouldBeThreeUsers() { FindIterable<Document> documents = userDocuments.find(); int numberOfUsers = countUsers(documents); assertEquals("Should be 3 total users", 3, numberOfUsers); } @Test public void shouldBeOneChris() { FindIterable<Document> documents = userDocuments.find(eq("name", "Chris")); int numberOfUsers = countUsers(documents); assertEquals("Should be 1 Chris", 1, numberOfUsers); } @Test public void shouldBeTwoOver25() { FindIterable<Document> documents = userDocuments.find(gt("age", 25)); int numberOfUsers = countUsers(documents); assertEquals("Should be 2 over 25", 2, numberOfUsers); } @Test public void over25SortedByName() { FindIterable<Document> documents = userDocuments.find(gt("age", 25)) .sort(Sorts.ascending("name")); List<Document> docs = intoList(documents); assertEquals("Should be 2", 2, docs.size()); assertEquals("First should be Jamie", "Jamie", docs.get(0).get("name")); assertEquals("Second should be Pat", "Pat", docs.get(1).get("name")); } @Test public void over25AndIbmers() { FindIterable<Document> documents = userDocuments.find(and(gt("age", 25), eq("company", "IBM"))); List<Document> docs = intoList(documents); assertEquals("Should be 1", 1, docs.size()); assertEquals("First should be Pat", "Pat", docs.get(0).get("name")); } @Test public void justNameAndEmail() { FindIterable<Document> documents = userDocuments.find().projection(fields(include("name", "email"))); List<Document> docs = intoList(documents); assertEquals("Should be 3", 3, docs.size()); assertEquals("First should be Chris", "Chris", docs.get(0).get("name")); assertNotNull("First should have email", docs.get(0).get("email")); assertNull("First shouldn't have 'company'", docs.get(0).get("company")); assertNotNull("First should have '_id'", docs.get(0).get("_id")); } @Test public void justNameAndEmailNoId() { FindIterable<Document> documents = userDocuments.find() .projection(fields(include("name", "email"), excludeId())); List<Document> docs = intoList(documents); assertEquals("Should be 3", 3, docs.size()); assertEquals("First should be Chris", "Chris", docs.get(0).get("name")); assertNotNull("First should have email", docs.get(0).get("email")); assertNull("First shouldn't have 'company'", docs.get(0).get("company")); assertNull("First should not have '_id'", docs.get(0).get("_id")); } @Test public void justNameAndEmailNoIdSortedByCompany() { FindIterable<Document> documents = userDocuments.find() .sort(Sorts.ascending("company")) .projection(fields(include("name", "email"), excludeId())); List<Document> docs = intoList(documents); assertEquals("Should be 3", 3, docs.size()); assertEquals("First should be Jamie", "Jamie", docs.get(0).get("name")); assertNotNull("First should have email", docs.get(0).get("email")); assertNull("First shouldn't have 'company'", docs.get(0).get("company")); assertNull("First should not have '_id'", docs.get(0).get("_id")); } @Test public void ageCounts() { AggregateIterable<Document> documents = userDocuments.aggregate( Arrays.asList( /* * Groups data by the "age" field, and then counts * the number of documents with each given age. * This creates a new "constructed document" that * has "age" as it's "_id", and the count as the * "ageCount" field. */ Aggregates.group("$age", Accumulators.sum("ageCount", 1)), Aggregates.sort(Sorts.ascending("_id")) ) ); List<Document> docs = intoList(documents); assertEquals("Should be two distinct ages", 2, docs.size()); assertEquals(docs.get(0).get("_id"), 25); assertEquals(docs.get(0).get("ageCount"), 1); assertEquals(docs.get(1).get("_id"), 37); assertEquals(docs.get(1).get("ageCount"), 2); } @Test public void averageAge() { AggregateIterable<Document> documents = userDocuments.aggregate( Arrays.asList( Aggregates.group("$company", Accumulators.avg("averageAge", "$age")), Aggregates.sort(Sorts.ascending("_id")) )); List<Document> docs = intoList(documents); assertEquals("Should be three companies", 3, docs.size()); assertEquals("Frogs, Inc.", docs.get(0).get("_id")); assertEquals(37.0, docs.get(0).get("averageAge")); assertEquals("IBM", docs.get(1).get("_id")); assertEquals(37.0, docs.get(1).get("averageAge")); assertEquals("UMM", docs.get(2).get("_id")); assertEquals(25.0, docs.get(2).get("averageAge")); } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit; import com.intellij.execution.CantRunException; import com.intellij.execution.ExecutionBundle; import com.intellij.execution.ExecutionException; import com.intellij.execution.TestClassCollector; import com.intellij.execution.configurations.RuntimeConfigurationError; import com.intellij.execution.configurations.RuntimeConfigurationException; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.testframework.SearchForTestsTask; import com.intellij.execution.testframework.SourceScope; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.execution.util.ProgramParametersUtil; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleFileIndex; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.GlobalSearchScopesCore; import com.intellij.psi.util.ClassUtil; import com.intellij.rt.execution.junit.JUnitStarter; import gnu.trove.THashSet; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.Set; class TestDirectory extends TestPackage { TestDirectory(JUnitConfiguration configuration, ExecutionEnvironment environment) { super(configuration, environment); } @Nullable @Override public SourceScope getSourceScope() { final String dirName = getConfiguration().getPersistentData().getDirName(); final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(dirName)); final Project project = getConfiguration().getProject(); final GlobalSearchScope globalSearchScope = file == null ? GlobalSearchScope.EMPTY_SCOPE : GlobalSearchScopesCore.directoryScope(project, file, true); return new SourceScope() { @Override public GlobalSearchScope getGlobalSearchScope() { return globalSearchScope; } @Override public Project getProject() { return project; } @Override public GlobalSearchScope getLibrariesScope() { final Module module = getConfiguration().getConfigurationModule().getModule(); return module != null ? GlobalSearchScope.moduleWithLibrariesScope(module) : GlobalSearchScope.allScope(project); } @Override public Module[] getModulesToCompile() { final Collection<Module> validModules = getConfiguration().getValidModules(); return validModules.toArray(Module.EMPTY_ARRAY); } }; } @Nullable @Override protected Path getRootPath() { final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(getConfiguration().getPersistentData().getDirName())); if (file == null) return null; Module dirModule = ModuleUtilCore.findModuleForFile(file, getConfiguration().getProject()); if (dirModule == null) return null; return TestClassCollector.getRootPath(dirModule, true); } @Override protected boolean configureByModule(Module module) { return module != null; } @Override public void checkConfiguration() throws RuntimeConfigurationException { JavaParametersUtil.checkAlternativeJRE(getConfiguration()); ProgramParametersUtil.checkWorkingDirectoryExist( getConfiguration(), getConfiguration().getProject(), getConfiguration().getConfigurationModule().getModule()); final String dirName = getConfiguration().getPersistentData().getDirName(); if (dirName == null || dirName.isEmpty()) { throw new RuntimeConfigurationError("Directory is not specified"); } final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(dirName)); if (file == null) { throw new RuntimeConfigurationError("Directory \'" + dirName + "\' is not found"); } final Module module = getConfiguration().getConfigurationModule().getModule(); if (module == null) { throw new RuntimeConfigurationError("Module to choose classpath from is not specified"); } } @Override protected GlobalSearchScope filterScope(JUnitConfiguration.Data data) throws CantRunException { return GlobalSearchScope.allScope(getConfiguration().getProject()); } @Override public SearchForTestsTask createSearchingForTestsTask() { if (JUnitStarter.JUNIT5_PARAMETER.equals(getRunner())) { return new SearchForTestsTask(getConfiguration().getProject(), myServerSocket) { private final THashSet<PsiClass> classes = new THashSet<>(); @Override protected void search() throws ExecutionException { PsiDirectory directory = getDirectory(getConfiguration().getPersistentData()); PsiPackage aPackage = JavaRuntimeConfigurationProducerBase.checkPackage(directory); if (aPackage != null) { final Module module = ModuleUtilCore.findModuleForFile(directory.getVirtualFile(), getProject()); if (module != null) { ModuleFileIndex fileIndex = ModuleRootManager.getInstance(module).getFileIndex(); PsiDirectory[] directories = aPackage.getDirectories(module.getModuleScope(true)); boolean foundTestSources = false; for (PsiDirectory dir : directories) { if (fileIndex.isInTestSourceContent(dir.getVirtualFile())) { if (foundTestSources) { collectClassesRecursively(directory, Condition.TRUE, classes); break; } foundTestSources = true; } } } } } @Override protected void onFound() throws ExecutionException { String packageName = TestDirectory.super.getPackageName(getConfiguration().getPersistentData()); try { Path rootPath = getRootPath(); LOG.assertTrue(rootPath != null); JUnitStarter .printClassesList(Collections.singletonList("\u002B" + rootPath.toFile().getAbsolutePath()), packageName, "", classes.isEmpty() ? packageName + "\\..*" : StringUtil.join(classes, aClass -> ClassUtil.getJVMClassName(aClass), "||"), myTempFile); } catch (IOException e) { LOG.error(e); } } }; } return super.createSearchingForTestsTask(); } @Override protected String getPackageName(JUnitConfiguration.Data data) throws CantRunException { return ""; } @Override protected void collectClassesRecursively(TestClassFilter classFilter, Condition<? super PsiClass> acceptClassCondition, Set<? super PsiClass> classes) throws CantRunException { collectClassesRecursively(getDirectory(getConfiguration().getPersistentData()), acceptClassCondition, classes); } private static void collectClassesRecursively(PsiDirectory directory, Condition<? super PsiClass> acceptAsTest, Set<? super PsiClass> classes) { PsiDirectory[] subDirectories = ReadAction.compute(() -> directory.getSubdirectories()); for (PsiDirectory subDirectory : subDirectories) { collectClassesRecursively(subDirectory, acceptAsTest, classes); } PsiFile[] files = ReadAction.compute(() -> directory.getFiles()); for (PsiFile file : files) { if (file instanceof PsiClassOwner) { for (PsiClass aClass : ReadAction.compute(() -> ((PsiClassOwner)file).getClasses())) { collectInnerClasses(aClass, acceptAsTest, classes); } } } } @Override protected PsiPackage getPackage(JUnitConfiguration.Data data) throws CantRunException { final PsiDirectory directory = getDirectory(data); return ReadAction.compute(() -> JavaDirectoryService.getInstance().getPackageInSources(directory)); } private PsiDirectory getDirectory(JUnitConfiguration.Data data) throws CantRunException { final String dirName = data.getDirName(); final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(dirName)); if (file == null) { throw new CantRunException("Directory \'" + dirName + "\' is not found"); } final PsiDirectory directory = ReadAction.compute(() -> PsiManager.getInstance(getConfiguration().getProject()).findDirectory(file)); if (directory == null) { throw new CantRunException("Directory \'" + dirName + "\' is not found"); } return directory; } @Override public String suggestActionName() { final JUnitConfiguration.Data data = getConfiguration().getPersistentData(); final String dirName = data.getDirName(); return dirName.isEmpty() ? ExecutionBundle.message("all.tests.scope.presentable.text") : ExecutionBundle.message("test.in.scope.presentable.text", StringUtil.getShortName(FileUtil.toSystemIndependentName(dirName), '/')); } @Override public boolean isConfiguredByElement(JUnitConfiguration configuration, PsiClass testClass, PsiMethod testMethod, PsiPackage testPackage, PsiDirectory testDir) { if (JUnitConfiguration.TEST_DIRECTORY.equals(configuration.getPersistentData().TEST_OBJECT) && testDir != null) { if (Comparing.strEqual(FileUtil.toSystemIndependentName(configuration.getPersistentData().getDirName()), testDir.getVirtualFile().getPath())) { return true; } } return false; } }
/** * Copyright (c) 2015 deniskir@gmail.com. All rights reserved. * * @author Denis Kiriusin */ package com.dkiriusin.combinatorius; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.util.Collection; import java.util.Properties; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import com.dkiriusin.combinatorius.CombinatoriusServlet; import com.dkiriusin.combinatorius.MimeType; import com.dkiriusin.combinatorius.Property; import com.dkiriusin.combinatorius.RequestDetails; @RunWith(MockitoJUnitRunner.class) public class ComboServletTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Rule public TemporaryFolder tp = new TemporaryFolder(); @InjectMocks private CombinatoriusServlet servlet = new CombinatoriusServlet(); @Mock private HttpServletRequest request; @Mock private HttpServletResponse response; @Mock private ServletConfig servletConfig; @Mock private ServletContext servletContext; @Mock private ThreadLocal<RequestDetails> requestDetails; @Mock private RequestDetails requestDetailsObject; @Mock private Cookie cookie; @Mock private Properties properties; @Before public void setUp() throws ServletException, IOException, URISyntaxException { Mockito.when(servlet.getServletContext()).thenReturn(servletContext); Mockito.when(servletContext.getRealPath(Mockito.anyString())).thenReturn("src/test/resources/css/file1.css"); Mockito.when(requestDetailsObject.getMimeType()).thenReturn(MimeType.css); Mockito.when(requestDetailsObject.getResources()).thenReturn(TestUtils.URL); Mockito.when(requestDetailsObject.getThemeName()).thenReturn("test-theme"); Mockito.when(requestDetailsObject.getVersion()).thenReturn(0L); Mockito.when(requestDetailsObject.getExtension()).thenReturn("css"); Mockito.when(requestDetails.get()).thenReturn(requestDetailsObject); Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer(TestUtils.URL)); Mockito.when(properties.getProperty(Mockito.eq(Property.CSS_DIR.getName()), Mockito.anyString())).thenReturn("src/test/resources/css"); Mockito.when(properties.getProperty(Mockito.eq(Property.THEMES_DIR.getName()), Mockito.anyString())).thenReturn("src/test/resources/themes"); Mockito.when(properties.getProperty(Mockito.eq(Property.CSS_CACHE_DIR.getName()), Mockito.anyString())).thenReturn("css_cache"); Mockito.when(properties.getProperty(Mockito.eq(Property.IS_COMPRESSION_ENABLED.getName()), Mockito.anyString())).thenReturn("true"); Mockito.when(properties.getProperty(Mockito.eq(Property.IS_YUI_COMPRESSOR_ENABLED.getName()), Mockito.anyString())).thenReturn("false"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_OMIT_FILES_FROM_MINIFICATION_REGEX.getName()), Mockito.anyString())).thenReturn(".*\\.min\\.(js|css)$"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_CSSCOMPRESSOR_LINEBREAKPOS.getName()), Mockito.anyString())).thenReturn("-1"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_JAVASCRIPT_COMPRESSOR_DISABLEOPTIMISATIONS.getName()), Mockito.anyString())).thenReturn("true"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_JAVASCRIPT_COMPRESSOR_LINEBREAK.getName()), Mockito.anyString())).thenReturn("100"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_JAVASCRIPT_COMPRESSOR_NOMUNGE.getName()), Mockito.anyString())).thenReturn("false"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_JAVASCRIPT_COMPRESSOR_PRESERVEALLSEMICOLONS.getName()), Mockito.anyString())).thenReturn("true"); Mockito.when(properties.getProperty(Mockito.eq(Property.YUI_JAVASCRIPT_COMPRESSOR_VERBOSE.getName()), Mockito.anyString())).thenReturn("false"); } @After public void tearDown() { Mockito.when(servlet.getServletContext()).thenReturn(null); requestDetailsObject = null; } @Test public void testGetFilesWithDefaultDirectoriesOnly() { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/css"); Mockito.when(requestDetailsObject.getThemeName()).thenReturn(null); Mockito.when(requestDetailsObject.getResources()).thenReturn(null); Collection<File> files = servlet.getFiles(request, requestDetailsObject); Assert.assertTrue("Should be at least several CSS files in test folder", files.size() > 0); } @Test public void testGetFilesWithThemes() { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/css"); Mockito.when(properties.getProperty(Property.THEMES_DIR.getName())).thenReturn("src/test/resources/themes"); Mockito.when(requestDetailsObject.getResources()).thenReturn(null); Collection<File> files = servlet.getFiles(request, requestDetailsObject); Assert.assertTrue("Should be at least several CSS files in test folders including test themes", files.size() > 3); } @Test public void testGetFilesWithWrongTheme() { Mockito.when(requestDetailsObject.getThemeName()).thenReturn("wrong-theme"); Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/css"); Mockito.when(properties.getProperty(Property.THEMES_DIR.getName())).thenReturn("src/test/resources/themes"); Mockito.when(requestDetailsObject.getResources()).thenReturn(null); thrown.expect(IllegalArgumentException.class); thrown.expectMessage( "Error getting 'wrong-theme' theme. Please make sure the theme name is correctly specified via 'theme' " + "URL parameter or as 'combinatorius.theme' cookie value."); servlet.getFiles(request, requestDetailsObject); } @Test public void testGetFilesWithIcorrectResourcesDir() { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/wrong_dir"); thrown.expect(IllegalArgumentException.class); thrown.expectMessage(Matchers.containsString("Error getting files from")); servlet.getFiles(request, requestDetailsObject); } @Test public void testDoGetWithIcorrectResourcesURL() throws ServletException, IOException { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/css"); Mockito.when(properties.getProperty(Property.THEMES_DIR.getName())).thenReturn("src/test/resources/themes"); servlet.doGet(request, response); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST), Mockito.contains("Error trying to get content:")); } @Test public void testDoGetWithNoCacheDir() throws ServletException, IOException { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn("src/test/resources/css"); Mockito.when(properties.getProperty(Property.THEMES_DIR.getName())).thenReturn("src/test/resources/themes"); Mockito.when(properties.getProperty(Property.CSS_CACHE_DIR.getName())).thenReturn(null); servlet.doGet(request, response); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST), Mockito.contains("Error trying to get content:")); } @Test public void testDoGetWithNoCssDir() throws ServletException, IOException { Mockito.when(properties.getProperty(Property.CSS_DIR.getName())).thenReturn(null); servlet.doGet(request, response); Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_BAD_REQUEST), Mockito.contains("CSS directory not specified")); } @Test public void testGetFilesAlwaysReturnsCollection() throws IOException { Collection<File> files = servlet.getFiles(request, null); Assert.assertEquals("Should always return collection, never null", files.size(), 0); } @Test public void testSetResponseHeaders() { Mockito.when(request.getScheme()).thenReturn("non-https"); Mockito.when(properties.getProperty(Mockito.eq(Property.IS_COMPRESSION_ENABLED.getName()), Mockito.anyString())).thenReturn("true"); Mockito.when(properties.getProperty(Mockito.eq(Property.S_MAXAGE.getName()), Mockito.anyString())).thenReturn("31536000"); Mockito.when(properties.getProperty(Mockito.eq(Property.MAX_AGE.getName()), Mockito.anyString())).thenReturn("31536000"); CombinatoriusServlet.setResponseHeaders(request, response, "test_etag", 127151112L, 128); Mockito.verify(response).setContentType(requestDetails.get().getMimeType().getName()); Mockito.verify(response).setHeader("Etag", "test_etag"); Mockito.verify(response).setCharacterEncoding("UTF-8"); Mockito.verify(response).setDateHeader(Mockito.eq("Expires"), Mockito.anyLong()); Mockito.verify(response).setHeader("Cache-Control", "private, max-age=31536000"); Mockito.verify(response).setDateHeader(Mockito.eq("Last-Modified"), Mockito.anyLong()); Mockito.verify(response).setContentLength(Mockito.anyInt()); } @Test public void testSetResponseHeadersHTTPS() { Mockito.when(request.getScheme()).thenReturn("https"); Mockito.when(properties.getProperty(Mockito.eq(Property.IS_COMPRESSION_ENABLED.getName()), Mockito.anyString())).thenReturn("true"); Mockito.when(properties.getProperty(Mockito.eq(Property.S_MAXAGE.getName()), Mockito.anyString())).thenReturn("31536000"); Mockito.when(properties.getProperty(Mockito.eq(Property.MAX_AGE.getName()), Mockito.anyString())).thenReturn("31536000"); CombinatoriusServlet.setResponseHeaders(request, response, "test_etag", 127151112L, 128); Mockito.verify(response).setContentType(requestDetails.get().getMimeType().getName()); Mockito.verify(response).setHeader("Etag", "test_etag"); Mockito.verify(response).setCharacterEncoding("UTF-8"); Mockito.verify(response).setDateHeader(Mockito.eq("Expires"), Mockito.anyLong()); Mockito.verify(response).setHeader("Cache-Control", "public, s-maxage=31536000, max-age=31536000"); Mockito.verify(response).setDateHeader(Mockito.eq("Last-Modified"), Mockito.anyLong()); Mockito.verify(response).setContentLength(Mockito.anyInt()); } @Test public void testSetConditionalResponseHeaders() { CombinatoriusServlet.setConditionalResponseHeaders(request, response); Mockito.verify(response).setStatus(304); Mockito.verify(response).setHeader("Content-Length", "0"); Mockito.verify(response).setHeader("Last-Modified", request.getHeader("If-Modified-Since")); } @Test public void testGetThemeName() { Mockito.when(cookie.getName()).thenReturn(CombinatoriusServlet.combinatoriusTheme); Mockito.when(cookie.getValue()).thenReturn("green"); Mockito.when(cookie.getDomain()).thenReturn("localhost"); Mockito.when(request.getCookies()).thenReturn(new Cookie[] { cookie }); Mockito.when(request.getServerName()).thenReturn("localhost"); String themeName = servlet.getThemeName(request, requestDetails.get()); Assert.assertEquals("green", themeName); } }
package com.tuyang.test.perftest; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.beanutils.PropertyUtils; import org.junit.Test; import org.slf4j.impl.StaticLoggerBinder; import org.springframework.cglib.beans.BeanCopier; import com.tuyang.beanutils.BeanCopyUtils; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.LoggerContext; import net.sf.ezmorph.MorpherRegistry; import net.sf.ezmorph.bean.BeanMorpher; public class TestPerformance { static { StaticLoggerBinder binder = StaticLoggerBinder.getSingleton(); LoggerContext context = (LoggerContext) binder.getLoggerFactory(); context.getLogger("ROOT").setLevel(Level.WARN); } private FromBean getFromBean() { FromBean fromBean = new FromBean(); fromBean.setBeanBool(true); fromBean.setBeanByte((byte)5); fromBean.setBeanChar((char)40); fromBean.setBeanShort((short)50); fromBean.setBeanInt(100); fromBean.setBeanFloat(100.50f); fromBean.setBeanLong(234323243243243234L); fromBean.setBeanDouble(2342332423.23432432523523); fromBean.setBeanString("Test test Test test."); return fromBean; } @Test public void testBanchmark() { FromBean fromBean = getFromBean(); BeanCopyInterface BeanCopyUtilsCopyBean1 = new BeanCopyInterface() { @Override public String getMethodName() { return "com.tuyang.beanutils.BeanCopyUtils.copyBean 1"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); return (ToBean) BeanCopyUtils.copyBean(fromBean, toBean); } }; BeanCopyInterface BeanCopyUtilsCopyBean2 = new BeanCopyInterface() { com.tuyang.beanutils.BeanCopier copier = BeanCopyUtils.getBeanCopier(FromBean.class, ToBean.class); @Override public String getMethodName() { return "com.tuyang.beanutils.BeanCopyUtils.copyBean 2"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); return (ToBean)copier.copyBean(fromBean, toBean); } }; BeanCopyInterface BeanUtilcopyProperties = new BeanCopyInterface() { @Override public String getMethodName() { return "org.apache.commons.beanutils.BeanUtil.copyProperties"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); BeanUtils.copyProperties(toBean, fromBean); return toBean; } }; BeanCopyInterface PropertyUtilscopyProperties = new BeanCopyInterface() { @Override public String getMethodName() { return "org.apache.commons.beanutils.PropertyUtils.copyProperties"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); PropertyUtils.copyProperties(toBean, fromBean); return toBean; } }; BeanCopyInterface springcopyProperties = new BeanCopyInterface() { @Override public String getMethodName() { return "org.springframework.beans.BeanUtils.copyProperties"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); org.springframework.beans.BeanUtils.copyProperties(fromBean, toBean); return toBean; } }; BeanCopyInterface BeanCopiercreate1 = new BeanCopyInterface() { @Override public String getMethodName() { return "org.springframework.cglib.beans.BeanCopier.create 1"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); BeanCopier bc = BeanCopier.create(FromBean.class, ToBean.class, false); bc.copy(fromBean, toBean, null); return toBean; } }; BeanCopyInterface BeanCopiercreate2 = new BeanCopyInterface() { BeanCopier bc = BeanCopier.create(FromBean.class, ToBean.class, false); @Override public String getMethodName() { return "org.springframework.cglib.beans.BeanCopier.create 2"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); bc.copy(fromBean, toBean, null); return toBean; } }; final MorpherRegistry registry = new MorpherRegistry(); registry.registerMorpher(new BeanMorpher(ToBean.class, registry)); BeanCopyInterface ezmorphCopy = new BeanCopyInterface() { @Override public String getMethodName() { return "net.sf.ezmorph.bean.BeanMorpher"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { //ToBean toBean = new ToBean(); ToBean toBean = (ToBean) registry.morph(ToBean.class, fromBean); return toBean; } }; BeanCopyInterface NativeCopy = new BeanCopyInterface() { @Override public String getMethodName() { return "NativeCopy"; } @Override public ToBean callCopyBean(FromBean fromBean) throws Exception { ToBean toBean = new ToBean(); toBean.setBeanByte(fromBean.getBeanByte()); toBean.setBeanChar(fromBean.getBeanChar()); toBean.setBeanDouble(fromBean.getBeanDouble()); toBean.setBeanFloat(fromBean.getBeanFloat()); toBean.setBeanInt(fromBean.getBeanInt()); toBean.setBeanLong(fromBean.getBeanLong()); toBean.setBeanShort(fromBean.getBeanShort()); toBean.setBeanString(fromBean.getBeanString()); return toBean; } }; LoopTest loopTest = new LoopTest(1); for( int i =0; i< 3; i++ ) { loopTest = new LoopTest(1); loopTest.run(BeanUtilcopyProperties, fromBean); loopTest.run(PropertyUtilscopyProperties, fromBean); loopTest.run(springcopyProperties, fromBean); loopTest.run(ezmorphCopy, fromBean); loopTest.run(BeanCopiercreate1, fromBean); loopTest.run(BeanCopiercreate2, fromBean); loopTest.run(BeanCopyUtilsCopyBean1, fromBean); loopTest.run(BeanCopyUtilsCopyBean2, fromBean); loopTest.run(NativeCopy, fromBean); } for( int i =0; i< 3; i++ ) { loopTest = new LoopTest(100); loopTest.run(BeanUtilcopyProperties, fromBean); loopTest.run(PropertyUtilscopyProperties, fromBean); loopTest.run(springcopyProperties, fromBean); loopTest.run(ezmorphCopy, fromBean); loopTest.run(BeanCopiercreate1, fromBean); loopTest.run(BeanCopiercreate2, fromBean); loopTest.run(BeanCopyUtilsCopyBean1, fromBean); loopTest.run(BeanCopyUtilsCopyBean2, fromBean); loopTest.run(NativeCopy, fromBean); } for( int i =0; i< 3; i++ ) { loopTest = new LoopTest(10000); loopTest.run(BeanUtilcopyProperties, fromBean); loopTest.run(PropertyUtilscopyProperties, fromBean); loopTest.run(springcopyProperties, fromBean); loopTest.run(ezmorphCopy, fromBean); loopTest.run(BeanCopiercreate1, fromBean); loopTest.run(BeanCopiercreate2, fromBean); loopTest.run(BeanCopyUtilsCopyBean1, fromBean); loopTest.run(BeanCopyUtilsCopyBean2, fromBean); loopTest.run(NativeCopy, fromBean); } for( int i =0; i< 3; i++ ) { loopTest = new LoopTest(1000000); loopTest.run(BeanUtilcopyProperties, fromBean); loopTest.run(PropertyUtilscopyProperties, fromBean); loopTest.run(springcopyProperties, fromBean); loopTest.run(ezmorphCopy, fromBean); loopTest.run(BeanCopiercreate1, fromBean); loopTest.run(BeanCopiercreate2, fromBean); loopTest.run(BeanCopyUtilsCopyBean1, fromBean); loopTest.run(BeanCopyUtilsCopyBean2, fromBean); loopTest.run(NativeCopy, fromBean); } for( int i =0; i< 3; i++ ) { loopTest = new LoopTest(10000000); loopTest.run(BeanUtilcopyProperties, fromBean); loopTest.run(PropertyUtilscopyProperties, fromBean); loopTest.run(springcopyProperties, fromBean); loopTest.run(ezmorphCopy, fromBean); loopTest.run(BeanCopiercreate1, fromBean); loopTest.run(BeanCopiercreate2, fromBean); loopTest.run(BeanCopyUtilsCopyBean1, fromBean); loopTest.run(BeanCopyUtilsCopyBean2, fromBean); loopTest.run(NativeCopy, fromBean); } System.out.println("===================finish==============="); } }
// Part of SourceAFIS for Java: https://sourceafis.machinezoo.com/java package com.machinezoo.sourceafis; import java.util.*; import javax.imageio.*; import com.machinezoo.sourceafis.engine.configuration.*; import com.machinezoo.sourceafis.engine.images.*; import com.machinezoo.sourceafis.engine.primitives.*; /** * Pixels and metadata of the fingerprint image. * This class captures all fingerprint information that is available prior to construction of {@link FingerprintTemplate}. * It consists of pixel data and additional information in {@link FingerprintImageOptions}. * Since SourceAFIS algorithm is not scale-invariant, all images should have DPI * configured explicitly by calling {@link FingerprintImageOptions#dpi(double)}. * <p> * Application should start fingerprint processing by constructing an instance of {@code FingerprintImage} * and then passing it to {@link FingerprintTemplate#FingerprintTemplate(FingerprintImage)}. * <p> * Fingerprint image can be either in one of the supported image formats (PNG, JPEG, ...), * in which case constructor {@link #FingerprintImage(byte[], FingerprintImageOptions)} is used, * or it can be a raw grayscale image, for which constructor * {@link #FingerprintImage(int, int, byte[], FingerprintImageOptions)} is used. * * @see FingerprintImageOptions * @see FingerprintTemplate */ public class FingerprintImage { /* * API roadmap: * + double dpi() * + int width() * + int height() * + byte[] grayscale() * + FingerprintPosition position() */ static { PlatformCheck.run(); } double dpi = 500; DoubleMatrix matrix; /** * Decodes fingerprint image in standard format. * The image must contain black fingerprint on white background * in resolution specified by calling {@link FingerprintImageOptions#dpi(double)}. * <p> * The image may be in any format commonly used to store fingerprint images, including PNG, JPEG, BMP, TIFF, or WSQ. * SourceAFIS will try to decode the image using Java's {@link ImageIO} (PNG, JPEG, BMP, and on Java 9+ TIFF), * <a href="https://github.com/kareez/jnbis">JNBIS</a> library (WSQ), and Android's * <a href="https://developer.android.com/reference/android/graphics/Bitmap">Bitmap</a> class (PNG, JPEG, BMP) in this order. * Note that these libraries might not support all versions and variations of the mentioned formats. * * @param image * fingerprint image in one of the supported formats * @param options * additional information about the image or {@code null} for default options * @throws NullPointerException * if {@code image} is {@code null} * @throws IllegalArgumentException * if the image format is unsupported or the image is corrupted * * @see #FingerprintImage(int, int, byte[], FingerprintImageOptions) * @see FingerprintCompatibility#convert(byte[]) * @see FingerprintTemplate#FingerprintTemplate(byte[]) */ public FingerprintImage(byte[] image, FingerprintImageOptions options) { Objects.requireNonNull(image); if (options == null) options = new FingerprintImageOptions(); dpi = options.dpi; DecodedImage decoded = ImageDecoder.decodeAny(image); matrix = new DoubleMatrix(decoded.width, decoded.height); for (int y = 0; y < decoded.height; ++y) { for (int x = 0; x < decoded.width; ++x) { int pixel = decoded.pixels[y * decoded.width + x]; int color = (pixel & 0xff) + ((pixel >> 8) & 0xff) + ((pixel >> 16) & 0xff); matrix.set(x, y, 1 - color * (1.0 / (3.0 * 255.0))); } } } /** * Decodes fingerprint image in standard format using default options. * This constructor is equivalent to calling {@link #FingerprintImage(byte[], FingerprintImageOptions)} * with default {@link FingerprintImageOptions}. * * @param image * fingerprint image in one of the supported formats * @throws NullPointerException * if {@code image} is {@code null} * @throws IllegalArgumentException * if the image format is unsupported or the image is corrupted * * @see #FingerprintImage(int, int, byte[]) * @see FingerprintCompatibility#convert(byte[]) * @see FingerprintTemplate#FingerprintTemplate(byte[]) */ public FingerprintImage(byte[] image) { this(image, null); } /** * Reads raw grayscale fingerprint image from byte array. * The image must contain black fingerprint on white background * in resolution specified by calling {@link FingerprintImageOptions#dpi(double)}. * <p> * Pixels are represented as 8-bit unsigned bytes with 0 meaning black and 255 meaning white. * Java's byte is a signed 8-bit number, but this method interprets all 8 bits as an unsigned number * as if by calling {@link Byte#toUnsignedInt(byte)}. * Pixels in {@code pixels} array are ordered from top-left to bottom-right in horizontal rows. * Size of {@code pixels} must be equal to {@code width * height}. * * @param width * width of the image * @param height * height of the image * @param pixels * image pixels ordered from top-left to bottom-right in horizontal rows * @param options * additional information about the image or {@code null} for default options * @throws NullPointerException * if {@code pixels} is {@code null} * @throws IndexOutOfBoundsException * if {@code width} or {@code height} is not positive or if {@code pixels} length is not {@code width * height} * * @see #FingerprintImage(byte[], FingerprintImageOptions) * @see FingerprintCompatibility#convert(byte[]) * @see FingerprintTemplate#FingerprintTemplate(byte[]) */ public FingerprintImage(int width, int height, byte[] pixels, FingerprintImageOptions options) { Objects.requireNonNull(pixels); if (width <= 0 || height <= 0 || pixels.length != width * height) throw new IndexOutOfBoundsException(); if (options == null) options = new FingerprintImageOptions(); dpi = options.dpi; matrix = new DoubleMatrix(width, height); for (int y = 0; y < height; ++y) for (int x = 0; x < width; ++x) matrix.set(x, y, 1 - Byte.toUnsignedInt(pixels[y * width + x]) / 255.0); } /** * Reads raw grayscale fingerprint image from byte array using default options. * * @param width * width of the image * @param height * height of the image * @param pixels * image pixels ordered from top-left to bottom-right in horizontal rows * @throws NullPointerException * if {@code pixels} is {@code null} * @throws IndexOutOfBoundsException * if {@code width} or {@code height} is not positive or if {@code pixels} length is not {@code width * height} * * @see #FingerprintImage(byte[]) * @see FingerprintCompatibility#convert(byte[]) * @see FingerprintTemplate#FingerprintTemplate(byte[]) */ public FingerprintImage(int width, int height, byte[] pixels) { this(width, height, pixels, null); } /** * @deprecated Use one of the constructors that fully initialize the object. * * @see #FingerprintImage(byte[], FingerprintImageOptions) * @see #FingerprintImage(int, int, byte[], FingerprintImageOptions) */ @Deprecated public FingerprintImage() { } /** * @deprecated Set DPI via {@link FingerprintImageOptions#dpi(double)} instead. * * @param dpi * DPI of the fingerprint image * @return {@code this} (fluent method) * @throws IllegalArgumentException * if {@code dpi} is non-positive, impossibly low, or impossibly high * * @see FingerprintImageOptions#dpi(double) */ @Deprecated public FingerprintImage dpi(double dpi) { if (dpi < 20 || dpi > 20_000) throw new IllegalArgumentException(); this.dpi = dpi; return this; } /** * @deprecated Use {@link #FingerprintImage(byte[], FingerprintImageOptions)} constructor to decode image in standard format. * * @param image * fingerprint image in one of the supported formats * @return {@code this} (fluent method) * @throws NullPointerException * if {@code image} is {@code null} * @throws IllegalArgumentException * if the image format is unsupported or the image is corrupted * * @see #FingerprintImage(byte[], FingerprintImageOptions) */ @Deprecated public FingerprintImage decode(byte[] image) { Objects.requireNonNull(image); DecodedImage decoded = ImageDecoder.decodeAny(image); matrix = new DoubleMatrix(decoded.width, decoded.height); for (int y = 0; y < decoded.height; ++y) { for (int x = 0; x < decoded.width; ++x) { int pixel = decoded.pixels[y * decoded.width + x]; int color = (pixel & 0xff) + ((pixel >> 8) & 0xff) + ((pixel >> 16) & 0xff); matrix.set(x, y, 1 - color * (1.0 / (3.0 * 255.0))); } } return this; } /** * @deprecated Use {@link #FingerprintImage(int, int, byte[], FingerprintImageOptions)} constructor to read raw image. * * @param width * width of the image * @param height * height of the image * @param pixels * image pixels ordered from top-left to bottom-right in horizontal rows * @return {@code this} (fluent method) * @throws NullPointerException * if {@code image} is {@code null} * @throws IndexOutOfBoundsException * if {@code width} or {@code height} is not positive or if {@code pixels} length is not {@code width * height} * * @see #FingerprintImage(int, int, byte[], FingerprintImageOptions) */ @Deprecated public FingerprintImage grayscale(int width, int height, byte[] pixels) { Objects.requireNonNull(pixels); if (width <= 0 || height <= 0 || pixels.length != width * height) throw new IndexOutOfBoundsException(); matrix = new DoubleMatrix(width, height); for (int y = 0; y < height; ++y) for (int x = 0; x < width; ++x) matrix.set(x, y, 1 - Byte.toUnsignedInt(pixels[y * width + x]) / 255.0); return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.service.api.records.Artifact; import org.apache.hadoop.yarn.service.api.records.ComponentState; import org.apache.hadoop.yarn.service.api.records.ContainerState; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.component.Component; import org.apache.hadoop.yarn.service.component.instance.ComponentInstance; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent; import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType; import org.apache.hadoop.yarn.service.exceptions.SliderException; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeoutException; /** * Tests for {@link ServiceManager}. */ public class TestServiceManager { @Rule public ServiceTestUtils.ServiceFSWatcher rule = new ServiceTestUtils.ServiceFSWatcher(); @Test (timeout = TIMEOUT) public void testUpgrade() throws Exception { ServiceContext context = createServiceContext("testUpgrade"); initUpgrade(context, "v2", false, false, false); Assert.assertEquals("service not upgraded", ServiceState.UPGRADING, context.getServiceManager().getServiceSpec().getState()); } @Test (timeout = TIMEOUT) public void testRestartNothingToUpgrade() throws Exception { ServiceContext context = createServiceContext( "testRestartNothingToUpgrade"); initUpgrade(context, "v2", false, false, false); ServiceManager manager = context.getServiceManager(); //make components stable by upgrading all instances upgradeAndReadyAllInstances(context); context.scheduler.getDispatcher().getEventHandler().handle( new ServiceEvent(ServiceEventType.START)); GenericTestUtils.waitFor(()-> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service not re-started", ServiceState.STABLE, manager.getServiceSpec().getState()); } @Test(timeout = TIMEOUT) public void testAutoFinalizeNothingToUpgrade() throws Exception { ServiceContext context = createServiceContext( "testAutoFinalizeNothingToUpgrade"); initUpgrade(context, "v2", false, true, false); ServiceManager manager = context.getServiceManager(); //make components stable by upgrading all instances upgradeAndReadyAllInstances(context); GenericTestUtils.waitFor(()-> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service stable", ServiceState.STABLE, manager.getServiceSpec().getState()); } @Test(timeout = TIMEOUT) public void testRestartWithPendingUpgrade() throws Exception { ServiceContext context = createServiceContext("testRestart"); initUpgrade(context, "v2", true, false, false); ServiceManager manager = context.getServiceManager(); context.scheduler.getDispatcher().getEventHandler().handle( new ServiceEvent(ServiceEventType.START)); context.scheduler.getDispatcher().stop(); Assert.assertEquals("service should still be upgrading", ServiceState.UPGRADING, manager.getServiceSpec().getState()); } @Test(timeout = TIMEOUT) public void testFinalize() throws Exception { ServiceContext context = createServiceContext("testCheckState"); initUpgrade(context, "v2", true, false, false); ServiceManager manager = context.getServiceManager(); Assert.assertEquals("service not upgrading", ServiceState.UPGRADING, manager.getServiceSpec().getState()); //make components stable by upgrading all instances upgradeAndReadyAllInstances(context); // finalize service context.scheduler.getDispatcher().getEventHandler().handle( new ServiceEvent(ServiceEventType.START)); GenericTestUtils.waitFor(()-> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service not re-started", ServiceState.STABLE, manager.getServiceSpec().getState()); validateUpgradeFinalization(manager.getName(), "v2"); } @Test(timeout = TIMEOUT) public void testAutoFinalize() throws Exception { ServiceContext context = createServiceContext("testCheckStateAutoFinalize"); ServiceManager manager = context.getServiceManager(); manager.getServiceSpec().setState( ServiceState.UPGRADING_AUTO_FINALIZE); initUpgrade(context, "v2", true, true, false); // make components stable upgradeAndReadyAllInstances(context); GenericTestUtils.waitFor(() -> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service not stable", ServiceState.STABLE, manager.getServiceSpec().getState()); validateUpgradeFinalization(manager.getName(), "v2"); } @Test public void testInvalidUpgrade() throws Exception { ServiceContext serviceContext = createServiceContext("testInvalidUpgrade"); ServiceManager manager = serviceContext.getServiceManager(); manager.getServiceSpec().setState( ServiceState.UPGRADING_AUTO_FINALIZE); Service upgradedDef = ServiceTestUtils.createExampleApplication(); upgradedDef.setName(manager.getName()); upgradedDef.setVersion("v2"); upgradedDef.setLifetime(2L); writeUpgradedDef(upgradedDef); try { manager.processUpgradeRequest("v2", true, false); } catch (Exception ex) { Assert.assertTrue(ex instanceof UnsupportedOperationException); return; } Assert.fail(); } @Test(timeout = TIMEOUT) public void testExpressUpgrade() throws Exception { ServiceContext context = createServiceContext("testExpressUpgrade"); ServiceManager manager = context.getServiceManager(); manager.getServiceSpec().setState(ServiceState.EXPRESS_UPGRADING); initUpgrade(context, "v2", true, true, true); List<String> comps = ServiceApiUtil.resolveCompsDependency(context.service); // wait till instances of first component are upgraded and ready String compA = comps.get(0); makeInstancesReadyAfterUpgrade(context, compA); // wait till instances of second component are upgraded and ready String compB = comps.get(1); makeInstancesReadyAfterUpgrade(context, compB); GenericTestUtils.waitFor(() -> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service not stable", ServiceState.STABLE, manager.getServiceSpec().getState()); validateUpgradeFinalization(manager.getName(), "v2"); } @Test(timeout = TIMEOUT) public void testCancelUpgrade() throws Exception { ServiceContext context = createServiceContext("testCancelUpgrade"); writeInitialDef(context.service); initUpgrade(context, "v2", true, false, false); ServiceManager manager = context.getServiceManager(); Assert.assertEquals("service not upgrading", ServiceState.UPGRADING, manager.getServiceSpec().getState()); List<String> comps = ServiceApiUtil.resolveCompsDependency(context.service); // wait till instances of first component are upgraded and ready String compA = comps.get(0); // upgrade the instances upgradeInstances(context, compA); makeInstancesReadyAfterUpgrade(context, compA); // cancel upgrade context.scheduler.getDispatcher().getEventHandler().handle( new ServiceEvent(ServiceEventType.CANCEL_UPGRADE)); makeInstancesReadyAfterUpgrade(context, compA); GenericTestUtils.waitFor(()-> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service upgrade not cancelled", ServiceState.STABLE, manager.getServiceSpec().getState()); validateUpgradeFinalization(manager.getName(), "v1"); } @Test(timeout = TIMEOUT) public void testCancelUpgradeAfterInitiate() throws Exception { ServiceContext context = createServiceContext("testCancelUpgrade"); writeInitialDef(context.service); initUpgrade(context, "v2", true, false, false); ServiceManager manager = context.getServiceManager(); Assert.assertEquals("service not upgrading", ServiceState.UPGRADING, manager.getServiceSpec().getState()); // cancel upgrade context.scheduler.getDispatcher().getEventHandler().handle( new ServiceEvent(ServiceEventType.CANCEL_UPGRADE)); GenericTestUtils.waitFor(()-> context.service.getState().equals(ServiceState.STABLE), CHECK_EVERY_MILLIS, TIMEOUT); Assert.assertEquals("service upgrade not cancelled", ServiceState.STABLE, manager.getServiceSpec().getState()); validateUpgradeFinalization(manager.getName(), "v1"); } private void validateUpgradeFinalization(String serviceName, String expectedVersion) throws IOException { Service savedSpec = ServiceApiUtil.loadService(rule.getFs(), serviceName); Assert.assertEquals("service def not re-written", expectedVersion, savedSpec.getVersion()); Assert.assertNotNull("app id not present", savedSpec.getId()); Assert.assertEquals("state not stable", ServiceState.STABLE, savedSpec.getState()); savedSpec.getComponents().forEach(compSpec -> Assert.assertEquals("comp not stable", ComponentState.STABLE, compSpec.getState())); } private void initUpgrade(ServiceContext context, String version, boolean upgradeArtifact, boolean autoFinalize, boolean expressUpgrade) throws IOException, SliderException, TimeoutException, InterruptedException { ServiceManager serviceManager = context.getServiceManager(); Service upgradedDef = ServiceTestUtils.createExampleApplication(); upgradedDef.setName(serviceManager.getName()); upgradedDef.setVersion(version); if (upgradeArtifact) { Artifact upgradedArtifact = createTestArtifact("2"); upgradedDef.getComponents().forEach(component -> { component.setArtifact(upgradedArtifact); }); } writeUpgradedDef(upgradedDef); serviceManager.processUpgradeRequest(version, autoFinalize, expressUpgrade); GenericTestUtils.waitFor(() -> { for (Component comp : context.scheduler.getAllComponents().values()) { if (!comp.getComponentSpec().getState().equals( ComponentState.NEEDS_UPGRADE)) { return false; } } return true; }, CHECK_EVERY_MILLIS, TIMEOUT); } private void upgradeAndReadyAllInstances(ServiceContext context) throws TimeoutException, InterruptedException { upgradeAllInstances(context); makeAllInstancesReady(context); } private void upgradeAllInstances(ServiceContext context) throws TimeoutException, InterruptedException { // upgrade the instances context.scheduler.getLiveInstances().forEach(((containerId, instance) -> { ComponentInstanceEvent event = new ComponentInstanceEvent(containerId, ComponentInstanceEventType.UPGRADE); context.scheduler.getDispatcher().getEventHandler().handle(event); })); } private void makeAllInstancesReady(ServiceContext context) throws TimeoutException, InterruptedException { context.scheduler.getLiveInstances().forEach(((containerId, instance) -> { ComponentInstanceEvent event = new ComponentInstanceEvent(containerId, ComponentInstanceEventType.BECOME_READY); context.scheduler.getDispatcher().getEventHandler().handle(event); })); GenericTestUtils.waitFor(()-> { for (ComponentInstance instance: context.scheduler.getLiveInstances().values()) { if (!instance.getContainerState().equals(ContainerState.READY)) { return false; } } return true; }, CHECK_EVERY_MILLIS, TIMEOUT); } private void upgradeInstances(ServiceContext context, String compName) { Collection<ComponentInstance> compInstances = context.scheduler .getAllComponents().get(compName).getAllComponentInstances(); compInstances.forEach(instance -> { ComponentInstanceEvent event = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.UPGRADE); context.scheduler.getDispatcher().getEventHandler().handle(event); }); } private void makeInstancesReadyAfterUpgrade(ServiceContext context, String compName) throws TimeoutException, InterruptedException { Collection<ComponentInstance> compInstances = context.scheduler .getAllComponents().get(compName).getAllComponentInstances(); GenericTestUtils.waitFor(() -> { for (ComponentInstance instance : compInstances) { if (!instance.getContainerState().equals(ContainerState.UPGRADING)) { return false; } } return true; }, CHECK_EVERY_MILLIS, TIMEOUT); // instances of comp1 get upgraded and become ready event is triggered // become ready compInstances.forEach(instance -> { ComponentInstanceEvent event = new ComponentInstanceEvent( instance.getContainer().getId(), ComponentInstanceEventType.BECOME_READY); context.scheduler.getDispatcher().getEventHandler().handle(event); }); GenericTestUtils.waitFor(() -> { for (ComponentInstance instance : compInstances) { if (!instance.getContainerState().equals(ContainerState.READY)) { return false; } } return true; }, CHECK_EVERY_MILLIS, TIMEOUT); } private ServiceContext createServiceContext(String name) throws Exception { Service service = createBaseDef(name); ServiceContext context = new MockRunningServiceContext(rule, service); context.scheduler.getDispatcher().setDrainEventsOnStop(); context.scheduler.getDispatcher().start(); return context; } public static Service createBaseDef(String name) { return createDef(name, ServiceTestUtils.createExampleApplication()); } public static Service createDef(String name, Service serviceDef) { ApplicationId applicationId = ApplicationId.newInstance( System.currentTimeMillis(), 1); serviceDef.setId(applicationId.toString()); serviceDef.setName(name); serviceDef.setState(ServiceState.STARTED); Artifact artifact = createTestArtifact("1"); serviceDef.getComponents().forEach(component -> component.setArtifact(artifact)); return serviceDef; } static Artifact createTestArtifact(String artifactId) { Artifact artifact = new Artifact(); artifact.setId(artifactId); artifact.setType(Artifact.TypeEnum.TARBALL); return artifact; } private void writeInitialDef(Service service) throws IOException, SliderException { Path servicePath = rule.getFs().buildClusterDirPath( service.getName()); ServiceApiUtil.createDirAndPersistApp(rule.getFs(), servicePath, service); } private void writeUpgradedDef(Service upgradedDef) throws IOException, SliderException { Path upgradePath = rule.getFs().buildClusterUpgradeDirPath( upgradedDef.getName(), upgradedDef.getVersion()); ServiceApiUtil.createDirAndPersistApp(rule.getFs(), upgradePath, upgradedDef); } private static final int TIMEOUT = 10000; private static final int CHECK_EVERY_MILLIS = 100; }
/* * $Id$ */ /* Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.test; import java.io.*; import java.util.*; import java.security.MessageDigest; import org.lockss.daemon.*; import org.lockss.test.*; import org.lockss.util.*; import org.lockss.plugin.*; /** * This is a mock version of <code>CachedUrlSet</code> used for testing * * @author Thomas S. Robertson * @version 0.0 */ public class MockCachedUrlSet implements CachedUrlSet { private MockArchivalUnit au; private CachedUrlSetSpec spec; private String url; private boolean isLeafIsSet = false; private boolean isLeaf = false; private boolean hasContentIsSet = false; private boolean hasContent = false; private Set cachedUrls = new HashSet(); private Set forceCachedUrls = new HashSet(); private Iterator flatIterator = null; private Iterator hashIterator = null; private Collection flatSource = null; private Collection hashSource = null; private long actualHashDuration; private Map cacheAttempts = new HashMap(); private static final Logger logger = Logger.getLogger("MockCachedUrlSet"); public MockCachedUrlSet() { this(new MockArchivalUnit(), (MockCachedUrlSetSpec)null); } public MockCachedUrlSet(MockArchivalUnit owner, CachedUrlSetSpec spec) { this(owner); this.spec = spec; } public MockCachedUrlSet(MockArchivalUnit owner) { this.au = owner; } public MockCachedUrlSet(String url) { this.url = url; } public CachedUrlSetSpec getSpec() { return spec; } public void setSpec(CachedUrlSetSpec newSpec) { spec = newSpec; } public ArchivalUnit getArchivalUnit() { return au; } public void setArchivalUnit(MockArchivalUnit newAu) { au = newAu; } public MockCachedUrlSet(CachedUrlSetSpec spec) { this(new MockArchivalUnit(), spec); } public boolean containsUrl(String url) { return spec.matches(url); } public boolean hasContent() { if (hasContentIsSet) { return this.hasContent; } if (au != null) { CachedUrl cu = au.makeCachedUrl(getUrl()); return cu != null && cu.hasContent(); } return false; } public void setHasContent(boolean hasContent) { this.hasContentIsSet = true; this.hasContent = hasContent; } public boolean isLeaf() { if (isLeafIsSet) { return isLeaf; } return (((flatIterator==null) || (!flatIterator.hasNext())) && ((hashIterator==null) || (!hashIterator.hasNext())) && ((flatSource==null) || (flatSource.size() == 0)) && ((hashSource==null) || (hashSource.size() == 0))); } public void setIsLeaf(boolean isLeaf) { this.isLeafIsSet = true; this.isLeaf = isLeaf; } public int getType() { return CachedUrlSetNode.TYPE_CACHED_URL_SET; } public void setExcludeFilesUnchangedAfter(long date) { } public Iterator flatSetIterator() { if (flatSource!=null) { return flatSource.iterator(); } return flatIterator; } public void setFlatIterator(Iterator it) { flatIterator = it; } public void setFlatItSource(Collection col) { flatSource = col; } public Iterator contentHashIterator() { if (hashSource!=null) { return hashSource.iterator(); } return hashIterator; } public CuIterator getCuIterator() { if (hashSource!=null) { return new MockCuIterator(hashSource); } if (hashIterator != null) { return new MockCuIterator(hashIterator); } return null; } public CuIterable getCuIterable() { return new CuIterable() { @Override protected CuIterator makeIterator() { return getCuIterator(); }}; } public void setHashIterator(Iterator it) { hashIterator = it; } public void setHashItSource(Collection col) { hashSource = col; } public CuIterator archiveMemberIterator() { return getCuIterator(); } // Methods used by the poller CachedUrlSetHasher contentHasher = null; CachedUrlSetHasher nameHasher = null; byte[] contentToBeHashed = null; byte[] namesToBeHashed = null; public void setContentHasher(CachedUrlSetHasher hasher) { contentHasher = hasher; } public void setNameHasher(CachedUrlSetHasher hasher) { nameHasher = hasher; } public void setContentToBeHashed(byte[] content) { contentToBeHashed = content; } public void setNamesToBeHashed(byte[] names) { namesToBeHashed = names; } public CachedUrlSetHasher getContentHasher(MessageDigest digest) { if (contentToBeHashed != null) { digest.update(contentToBeHashed); } return contentHasher; } public CachedUrlSetHasher getNameHasher(MessageDigest digest) { if (namesToBeHashed != null) { digest.update(namesToBeHashed); } return nameHasher; } private long hashEstimate = 0; public long estimatedHashDuration() { return hashEstimate; } public void setEstimatedHashDuration(long n) { hashEstimate = n; } public void storeActualHashDuration(long elapsed, Exception err) { actualHashDuration = elapsed; } public long getActualHashDuration() { return actualHashDuration; } //methods used to generate proper mock objects public String getUrl() { if (url != null) { return url; } if (spec!=null) { return spec.getUrl(); } else { return null; } } public void addCachedUrl(String url) { cachedUrls.add(url); } public void addForceCachedUrl(String url) { forceCachedUrls.add(url); } public Set getCachedUrls() { return cachedUrls; } public Set getForceCachedUrls() { return forceCachedUrls; } public int hashCode() { if (spec!=null) { return spec.hashCode(); } else { return 0; } } public void signalCacheAttempt(String url) { Integer numTimesCached = (Integer) cacheAttempts.get(url); if (numTimesCached == null) { cacheAttempts.put(url, new Integer(1)); } else { cacheAttempts.put(url, new Integer(numTimesCached.intValue()+1)); } } public int getNumCacheAttempts(String url) { Integer num = (Integer)cacheAttempts.get(url); return (num == null ? 0 : num.intValue()); } public boolean equals(Object obj) { if (obj instanceof CachedUrlSet) { CachedUrlSet cus = (CachedUrlSet)obj; if (spec==null) { return (cus.getSpec()==null); } else { return spec.equals(cus.getSpec()); } } else { return false; } } public int cusCompare(CachedUrlSet cus2) { // check that they're in the same AU if (!this.getArchivalUnit().equals(cus2.getArchivalUnit())) { return NO_RELATION; } CachedUrlSetSpec spec1 = this.getSpec(); CachedUrlSetSpec spec2 = cus2.getSpec(); String url1 = this.getUrl(); String url2 = cus2.getUrl(); // check for top-level urls if (spec1.isAu() || spec2.isAu()) { if (spec1.equals(spec2)) { return SAME_LEVEL_OVERLAP; } else if (spec1.isAu()) { return ABOVE; } else { return BELOW; } } if (!url1.endsWith(UrlUtil.URL_PATH_SEPARATOR)) { url1 += UrlUtil.URL_PATH_SEPARATOR; } if (!url2.endsWith(UrlUtil.URL_PATH_SEPARATOR)) { url2 += UrlUtil.URL_PATH_SEPARATOR; } if (url1.equals(url2)) { //the urls are on the same level; check for overlap if (spec1.isDisjoint(spec2)) { return SAME_LEVEL_NO_OVERLAP; } else { return SAME_LEVEL_OVERLAP; } } else if (spec1.subsumes(spec2)) { // parent return ABOVE; } else if (spec2.subsumes(spec1)) { // child return BELOW; } else if (spec2.isSingleNode()) { if (url1.startsWith(url2)) { return SAME_LEVEL_NO_OVERLAP; } // else, cus2 probably has a range which excludes url1 } else if (spec1.isSingleNode()) { if (url2.startsWith(url1)) { return SAME_LEVEL_NO_OVERLAP; } // else, cus1 probably has a range which excludes url2 } // no connection between the two urls return NO_RELATION; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.uimanager; import android.os.SystemClock; import android.view.View; import androidx.annotation.GuardedBy; import androidx.annotation.Nullable; import androidx.annotation.UiThread; import com.facebook.common.logging.FLog; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.GuardedRunnable; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContext; import com.facebook.react.bridge.ReactNoCrashSoftException; import com.facebook.react.bridge.ReactSoftException; import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.RetryableMountingLayerException; import com.facebook.react.bridge.SoftAssertions; import com.facebook.react.bridge.UiThreadUtil; import com.facebook.react.common.ReactConstants; import com.facebook.react.config.ReactFeatureFlags; import com.facebook.react.modules.core.ReactChoreographer; import com.facebook.react.uimanager.debug.NotThreadSafeViewHierarchyUpdateDebugListener; import com.facebook.systrace.Systrace; import com.facebook.systrace.SystraceMessage; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * This class acts as a buffer for command executed on {@link NativeViewHierarchyManager}. It expose * similar methods as mentioned classes but instead of executing commands immediately it enqueues * those operations in a queue that is then flushed from {@link UIManagerModule} once JS batch of ui * operations is finished. This is to make sure that we execute all the JS operation coming from a * single batch a single loop of the main (UI) android looper. * * <p>TODO(7135923): Pooling of operation objects TODO(5694019): Consider a better data structure * for operations queue to save on allocations */ public class UIViewOperationQueue { public static final int DEFAULT_MIN_TIME_LEFT_IN_FRAME_FOR_NONBATCHED_OPERATION_MS = 8; private static final String TAG = UIViewOperationQueue.class.getSimpleName(); private final int[] mMeasureBuffer = new int[4]; /** A mutation or animation operation on the view hierarchy. */ public interface UIOperation { void execute(); } /** A spec for an operation on the native View hierarchy. */ private abstract class ViewOperation implements UIOperation { public int mTag; public ViewOperation(int tag) { mTag = tag; } } private final class RemoveRootViewOperation extends ViewOperation { public RemoveRootViewOperation(int tag) { super(tag); } @Override public void execute() { mNativeViewHierarchyManager.removeRootView(mTag); } } private final class UpdatePropertiesOperation extends ViewOperation { private final ReactStylesDiffMap mProps; private UpdatePropertiesOperation(int tag, ReactStylesDiffMap props) { super(tag); mProps = props; } @Override public void execute() { mNativeViewHierarchyManager.updateProperties(mTag, mProps); } } private final class EmitOnLayoutEventOperation extends ViewOperation { private final int mScreenX; private final int mScreenY; private final int mScreenWidth; private final int mScreenHeight; public EmitOnLayoutEventOperation( int tag, int screenX, int screenY, int screenWidth, int screenHeight) { super(tag); mScreenX = screenX; mScreenY = screenY; mScreenWidth = screenWidth; mScreenHeight = screenHeight; } @Override public void execute() { mReactApplicationContext .getNativeModule(UIManagerModule.class) .getEventDispatcher() .dispatchEvent( OnLayoutEvent.obtain(mTag, mScreenX, mScreenY, mScreenWidth, mScreenHeight)); } } private final class UpdateInstanceHandleOperation extends ViewOperation { private final long mInstanceHandle; private UpdateInstanceHandleOperation(int tag, long instanceHandle) { super(tag); mInstanceHandle = instanceHandle; } @Override public void execute() { mNativeViewHierarchyManager.updateInstanceHandle(mTag, mInstanceHandle); } } /** * Operation for updating native view's position and size. The operation is not created directly * by a {@link UIManagerModule} call from JS. Instead it gets inflated using computed position and * size values by CSSNodeDEPRECATED hierarchy. */ private final class UpdateLayoutOperation extends ViewOperation { private final int mParentTag, mX, mY, mWidth, mHeight; public UpdateLayoutOperation(int parentTag, int tag, int x, int y, int width, int height) { super(tag); mParentTag = parentTag; mX = x; mY = y; mWidth = width; mHeight = height; Systrace.startAsyncFlow(Systrace.TRACE_TAG_REACT_VIEW, "updateLayout", mTag); } @Override public void execute() { Systrace.endAsyncFlow(Systrace.TRACE_TAG_REACT_VIEW, "updateLayout", mTag); mNativeViewHierarchyManager.updateLayout(mParentTag, mTag, mX, mY, mWidth, mHeight); } } private final class CreateViewOperation extends ViewOperation { private final ThemedReactContext mThemedContext; private final String mClassName; private final @Nullable ReactStylesDiffMap mInitialProps; public CreateViewOperation( ThemedReactContext themedContext, int tag, String className, @Nullable ReactStylesDiffMap initialProps) { super(tag); mThemedContext = themedContext; mClassName = className; mInitialProps = initialProps; Systrace.startAsyncFlow(Systrace.TRACE_TAG_REACT_VIEW, "createView", mTag); } @Override public void execute() { Systrace.endAsyncFlow(Systrace.TRACE_TAG_REACT_VIEW, "createView", mTag); mNativeViewHierarchyManager.createView(mThemedContext, mTag, mClassName, mInitialProps); } } private final class ManageChildrenOperation extends ViewOperation { private final @Nullable int[] mIndicesToRemove; private final @Nullable ViewAtIndex[] mViewsToAdd; private final @Nullable int[] mTagsToDelete; public ManageChildrenOperation( int tag, @Nullable int[] indicesToRemove, @Nullable ViewAtIndex[] viewsToAdd, @Nullable int[] tagsToDelete) { super(tag); mIndicesToRemove = indicesToRemove; mViewsToAdd = viewsToAdd; mTagsToDelete = tagsToDelete; } @Override public void execute() { mNativeViewHierarchyManager.manageChildren( mTag, mIndicesToRemove, mViewsToAdd, mTagsToDelete); } } private final class SetChildrenOperation extends ViewOperation { private final ReadableArray mChildrenTags; public SetChildrenOperation(int tag, ReadableArray childrenTags) { super(tag); mChildrenTags = childrenTags; } @Override public void execute() { mNativeViewHierarchyManager.setChildren(mTag, mChildrenTags); } } private final class UpdateViewExtraData extends ViewOperation { private final Object mExtraData; public UpdateViewExtraData(int tag, Object extraData) { super(tag); mExtraData = extraData; } @Override public void execute() { mNativeViewHierarchyManager.updateViewExtraData(mTag, mExtraData); } } private final class ChangeJSResponderOperation extends ViewOperation { private final int mInitialTag; private final boolean mBlockNativeResponder; private final boolean mClearResponder; public ChangeJSResponderOperation( int tag, int initialTag, boolean clearResponder, boolean blockNativeResponder) { super(tag); mInitialTag = initialTag; mClearResponder = clearResponder; mBlockNativeResponder = blockNativeResponder; } @Override public void execute() { if (!mClearResponder) { mNativeViewHierarchyManager.setJSResponder(mTag, mInitialTag, mBlockNativeResponder); } else { mNativeViewHierarchyManager.clearJSResponder(); } } } /** * This is a common interface for View Command operations. Once we delete the deprecated {@link * DispatchCommandOperation}, we can delete this interface too. It provides a set of common * operations to simplify generic operations on all types of ViewCommands. */ private interface DispatchCommandViewOperation { /** * Like the execute function, but throws real exceptions instead of logging soft errors and * returning silently. */ void executeWithExceptions(); /** Increment retry counter. */ void incrementRetries(); /** Get retry counter. */ int getRetries(); } @Deprecated private final class DispatchCommandOperation extends ViewOperation implements DispatchCommandViewOperation { private final int mCommand; private final @Nullable ReadableArray mArgs; private int numRetries = 0; public DispatchCommandOperation(int tag, int command, @Nullable ReadableArray args) { super(tag); mCommand = command; mArgs = args; } @Override public void execute() { try { mNativeViewHierarchyManager.dispatchCommand(mTag, mCommand, mArgs); } catch (Throwable e) { ReactSoftException.logSoftException( TAG, new RuntimeException("Error dispatching View Command", e)); } } @Override public void executeWithExceptions() { mNativeViewHierarchyManager.dispatchCommand(mTag, mCommand, mArgs); } @Override @UiThread public void incrementRetries() { numRetries++; } @Override @UiThread public int getRetries() { return numRetries; } } private final class DispatchStringCommandOperation extends ViewOperation implements DispatchCommandViewOperation { private final String mCommand; private final @Nullable ReadableArray mArgs; private int numRetries = 0; public DispatchStringCommandOperation(int tag, String command, @Nullable ReadableArray args) { super(tag); mCommand = command; mArgs = args; } @Override public void execute() { try { mNativeViewHierarchyManager.dispatchCommand(mTag, mCommand, mArgs); } catch (Throwable e) { ReactSoftException.logSoftException( TAG, new RuntimeException("Error dispatching View Command", e)); } } @Override @UiThread public void executeWithExceptions() { mNativeViewHierarchyManager.dispatchCommand(mTag, mCommand, mArgs); } @Override @UiThread public void incrementRetries() { numRetries++; } @Override public int getRetries() { return numRetries; } } private final class ShowPopupMenuOperation extends ViewOperation { private final ReadableArray mItems; private final Callback mError; private final Callback mSuccess; public ShowPopupMenuOperation(int tag, ReadableArray items, Callback error, Callback success) { super(tag); mItems = items; mError = error; mSuccess = success; } @Override public void execute() { mNativeViewHierarchyManager.showPopupMenu(mTag, mItems, mSuccess, mError); } } private final class DismissPopupMenuOperation implements UIOperation { @Override public void execute() { mNativeViewHierarchyManager.dismissPopupMenu(); } } /** A spec for animation operations (add/remove) */ private abstract static class AnimationOperation implements UIViewOperationQueue.UIOperation { protected final int mAnimationID; public AnimationOperation(int animationID) { mAnimationID = animationID; } } private class SetLayoutAnimationEnabledOperation implements UIOperation { private final boolean mEnabled; private SetLayoutAnimationEnabledOperation(final boolean enabled) { mEnabled = enabled; } @Override public void execute() { mNativeViewHierarchyManager.setLayoutAnimationEnabled(mEnabled); } } private class ConfigureLayoutAnimationOperation implements UIOperation { private final ReadableMap mConfig; private final Callback mAnimationComplete; private ConfigureLayoutAnimationOperation( final ReadableMap config, final Callback animationComplete) { mConfig = config; mAnimationComplete = animationComplete; } @Override public void execute() { mNativeViewHierarchyManager.configureLayoutAnimation(mConfig, mAnimationComplete); } } private final class MeasureOperation implements UIOperation { private final int mReactTag; private final Callback mCallback; private MeasureOperation(final int reactTag, final Callback callback) { super(); mReactTag = reactTag; mCallback = callback; } @Override public void execute() { try { mNativeViewHierarchyManager.measure(mReactTag, mMeasureBuffer); } catch (NoSuchNativeViewException e) { // Invoke with no args to signal failure and to allow JS to clean up the callback // handle. mCallback.invoke(); return; } float x = PixelUtil.toDIPFromPixel(mMeasureBuffer[0]); float y = PixelUtil.toDIPFromPixel(mMeasureBuffer[1]); float width = PixelUtil.toDIPFromPixel(mMeasureBuffer[2]); float height = PixelUtil.toDIPFromPixel(mMeasureBuffer[3]); mCallback.invoke(0, 0, width, height, x, y); } } private final class MeasureInWindowOperation implements UIOperation { private final int mReactTag; private final Callback mCallback; private MeasureInWindowOperation(final int reactTag, final Callback callback) { super(); mReactTag = reactTag; mCallback = callback; } @Override public void execute() { try { mNativeViewHierarchyManager.measureInWindow(mReactTag, mMeasureBuffer); } catch (NoSuchNativeViewException e) { // Invoke with no args to signal failure and to allow JS to clean up the callback // handle. mCallback.invoke(); return; } float x = PixelUtil.toDIPFromPixel(mMeasureBuffer[0]); float y = PixelUtil.toDIPFromPixel(mMeasureBuffer[1]); float width = PixelUtil.toDIPFromPixel(mMeasureBuffer[2]); float height = PixelUtil.toDIPFromPixel(mMeasureBuffer[3]); mCallback.invoke(x, y, width, height); } } private final class FindTargetForTouchOperation implements UIOperation { private final int mReactTag; private final float mTargetX; private final float mTargetY; private final Callback mCallback; private FindTargetForTouchOperation( final int reactTag, final float targetX, final float targetY, final Callback callback) { super(); mReactTag = reactTag; mTargetX = targetX; mTargetY = targetY; mCallback = callback; } @Override public void execute() { try { mNativeViewHierarchyManager.measure(mReactTag, mMeasureBuffer); } catch (IllegalViewOperationException e) { mCallback.invoke(); return; } // Because React coordinates are relative to root container, and measure() operates // on screen coordinates, we need to offset values using root container location. final float containerX = (float) mMeasureBuffer[0]; final float containerY = (float) mMeasureBuffer[1]; final int touchTargetReactTag = mNativeViewHierarchyManager.findTargetTagForTouch(mReactTag, mTargetX, mTargetY); try { mNativeViewHierarchyManager.measure(touchTargetReactTag, mMeasureBuffer); } catch (IllegalViewOperationException e) { mCallback.invoke(); return; } float x = PixelUtil.toDIPFromPixel(mMeasureBuffer[0] - containerX); float y = PixelUtil.toDIPFromPixel(mMeasureBuffer[1] - containerY); float width = PixelUtil.toDIPFromPixel(mMeasureBuffer[2]); float height = PixelUtil.toDIPFromPixel(mMeasureBuffer[3]); mCallback.invoke(touchTargetReactTag, x, y, width, height); } } private final class LayoutUpdateFinishedOperation implements UIOperation { private final ReactShadowNode mNode; private final UIImplementation.LayoutUpdateListener mListener; private LayoutUpdateFinishedOperation( ReactShadowNode node, UIImplementation.LayoutUpdateListener listener) { mNode = node; mListener = listener; } @Override public void execute() { mListener.onLayoutUpdated(mNode); } } private class UIBlockOperation implements UIOperation { private final UIBlock mBlock; public UIBlockOperation(UIBlock block) { mBlock = block; } @Override public void execute() { mBlock.execute(mNativeViewHierarchyManager); } } private final class SendAccessibilityEvent extends ViewOperation { private final int mEventType; private SendAccessibilityEvent(int tag, int eventType) { super(tag); mEventType = eventType; } @Override public void execute() { mNativeViewHierarchyManager.sendAccessibilityEvent(mTag, mEventType); } } private final NativeViewHierarchyManager mNativeViewHierarchyManager; private final Object mDispatchRunnablesLock = new Object(); private final Object mNonBatchedOperationsLock = new Object(); private final DispatchUIFrameCallback mDispatchUIFrameCallback; private final ReactApplicationContext mReactApplicationContext; private final boolean mAllowViewCommandsQueue; private ArrayList<DispatchCommandViewOperation> mViewCommandOperations = new ArrayList<>(); // Only called from the UIManager queue? private ArrayList<UIOperation> mOperations = new ArrayList<>(); @GuardedBy("mDispatchRunnablesLock") private ArrayList<Runnable> mDispatchUIRunnables = new ArrayList<>(); @GuardedBy("mNonBatchedOperationsLock") private ArrayDeque<UIOperation> mNonBatchedOperations = new ArrayDeque<>(); private @Nullable NotThreadSafeViewHierarchyUpdateDebugListener mViewHierarchyUpdateDebugListener; private boolean mIsDispatchUIFrameCallbackEnqueued = false; private boolean mIsInIllegalUIState = false; private boolean mIsProfilingNextBatch = false; private long mNonBatchedExecutionTotalTime; private long mProfiledBatchCommitStartTime; private long mProfiledBatchCommitEndTime; private long mProfiledBatchLayoutTime; private long mProfiledBatchDispatchViewUpdatesTime; private long mProfiledBatchRunStartTime; private long mProfiledBatchRunEndTime; private long mProfiledBatchBatchedExecutionTime; private long mProfiledBatchNonBatchedExecutionTime; private long mThreadCpuTime; private long mCreateViewCount; private long mUpdatePropertiesOperationCount; public UIViewOperationQueue( ReactApplicationContext reactContext, NativeViewHierarchyManager nativeViewHierarchyManager, int minTimeLeftInFrameForNonBatchedOperationMs) { mNativeViewHierarchyManager = nativeViewHierarchyManager; mDispatchUIFrameCallback = new DispatchUIFrameCallback( reactContext, minTimeLeftInFrameForNonBatchedOperationMs == -1 ? DEFAULT_MIN_TIME_LEFT_IN_FRAME_FOR_NONBATCHED_OPERATION_MS : minTimeLeftInFrameForNonBatchedOperationMs); mReactApplicationContext = reactContext; mAllowViewCommandsQueue = ReactFeatureFlags.allowEarlyViewCommandExecution; } /*package*/ NativeViewHierarchyManager getNativeViewHierarchyManager() { return mNativeViewHierarchyManager; } public void setViewHierarchyUpdateDebugListener( @Nullable NotThreadSafeViewHierarchyUpdateDebugListener listener) { mViewHierarchyUpdateDebugListener = listener; } public void profileNextBatch() { mIsProfilingNextBatch = true; mProfiledBatchCommitStartTime = 0; mCreateViewCount = 0; mUpdatePropertiesOperationCount = 0; } public Map<String, Long> getProfiledBatchPerfCounters() { Map<String, Long> perfMap = new HashMap<>(); perfMap.put("CommitStartTime", mProfiledBatchCommitStartTime); perfMap.put("CommitEndTime", mProfiledBatchCommitEndTime); perfMap.put("LayoutTime", mProfiledBatchLayoutTime); perfMap.put("DispatchViewUpdatesTime", mProfiledBatchDispatchViewUpdatesTime); perfMap.put("RunStartTime", mProfiledBatchRunStartTime); perfMap.put("RunEndTime", mProfiledBatchRunEndTime); perfMap.put("BatchedExecutionTime", mProfiledBatchBatchedExecutionTime); perfMap.put("NonBatchedExecutionTime", mProfiledBatchNonBatchedExecutionTime); perfMap.put("NativeModulesThreadCpuTime", mThreadCpuTime); perfMap.put("CreateViewCount", mCreateViewCount); perfMap.put("UpdatePropsCount", mUpdatePropertiesOperationCount); return perfMap; } public boolean isEmpty() { return mOperations.isEmpty() && mViewCommandOperations.isEmpty(); } public void addRootView(final int tag, final View rootView) { mNativeViewHierarchyManager.addRootView(tag, rootView); } /** * Enqueues a UIOperation to be executed in UI thread. This method should only be used by a * subclass to support UIOperations not provided by UIViewOperationQueue. */ protected void enqueueUIOperation(UIOperation operation) { SoftAssertions.assertNotNull(operation); mOperations.add(operation); } public void enqueueRemoveRootView(int rootViewTag) { mOperations.add(new RemoveRootViewOperation(rootViewTag)); } public void enqueueSetJSResponder(int tag, int initialTag, boolean blockNativeResponder) { mOperations.add( new ChangeJSResponderOperation( tag, initialTag, false /*clearResponder*/, blockNativeResponder)); } public void enqueueClearJSResponder() { // Tag is 0 because JSResponderHandler doesn't need one in order to clear the responder. mOperations.add(new ChangeJSResponderOperation(0, 0, true /*clearResponder*/, false)); } @Deprecated public void enqueueDispatchCommand( int reactTag, int commandId, @Nullable ReadableArray commandArgs) { final DispatchCommandOperation command = new DispatchCommandOperation(reactTag, commandId, commandArgs); if (mAllowViewCommandsQueue) { mViewCommandOperations.add(command); } else { mOperations.add(command); } } public void enqueueDispatchCommand( int reactTag, String commandId, @Nullable ReadableArray commandArgs) { final DispatchStringCommandOperation command = new DispatchStringCommandOperation(reactTag, commandId, commandArgs); if (mAllowViewCommandsQueue) { mViewCommandOperations.add(command); } else { mOperations.add(command); } } public void enqueueUpdateExtraData(int reactTag, Object extraData) { mOperations.add(new UpdateViewExtraData(reactTag, extraData)); } public void enqueueShowPopupMenu( int reactTag, ReadableArray items, Callback error, Callback success) { mOperations.add(new ShowPopupMenuOperation(reactTag, items, error, success)); } public void enqueueDismissPopupMenu() { mOperations.add(new DismissPopupMenuOperation()); } public void enqueueCreateView( ThemedReactContext themedContext, int viewReactTag, String viewClassName, @Nullable ReactStylesDiffMap initialProps) { synchronized (mNonBatchedOperationsLock) { mCreateViewCount++; mNonBatchedOperations.addLast( new CreateViewOperation(themedContext, viewReactTag, viewClassName, initialProps)); } } public void enqueueUpdateInstanceHandle(int reactTag, long instanceHandle) { mOperations.add(new UpdateInstanceHandleOperation(reactTag, instanceHandle)); } public void enqueueUpdateProperties(int reactTag, String className, ReactStylesDiffMap props) { mUpdatePropertiesOperationCount++; mOperations.add(new UpdatePropertiesOperation(reactTag, props)); } public void enqueueOnLayoutEvent( int tag, int screenX, int screenY, int screenWidth, int screenHeight) { mOperations.add( new EmitOnLayoutEventOperation(tag, screenX, screenY, screenWidth, screenHeight)); } public void enqueueUpdateLayout( int parentTag, int reactTag, int x, int y, int width, int height) { mOperations.add(new UpdateLayoutOperation(parentTag, reactTag, x, y, width, height)); } public void enqueueManageChildren( int reactTag, @Nullable int[] indicesToRemove, @Nullable ViewAtIndex[] viewsToAdd, @Nullable int[] tagsToDelete) { mOperations.add( new ManageChildrenOperation(reactTag, indicesToRemove, viewsToAdd, tagsToDelete)); } public void enqueueSetChildren(int reactTag, ReadableArray childrenTags) { mOperations.add(new SetChildrenOperation(reactTag, childrenTags)); } public void enqueueSetLayoutAnimationEnabled(final boolean enabled) { mOperations.add(new SetLayoutAnimationEnabledOperation(enabled)); } public void enqueueConfigureLayoutAnimation( final ReadableMap config, final Callback onAnimationComplete) { mOperations.add(new ConfigureLayoutAnimationOperation(config, onAnimationComplete)); } public void enqueueMeasure(final int reactTag, final Callback callback) { mOperations.add(new MeasureOperation(reactTag, callback)); } public void enqueueMeasureInWindow(final int reactTag, final Callback callback) { mOperations.add(new MeasureInWindowOperation(reactTag, callback)); } public void enqueueFindTargetForTouch( final int reactTag, final float targetX, final float targetY, final Callback callback) { mOperations.add(new FindTargetForTouchOperation(reactTag, targetX, targetY, callback)); } public void enqueueSendAccessibilityEvent(int tag, int eventType) { mOperations.add(new SendAccessibilityEvent(tag, eventType)); } public void enqueueLayoutUpdateFinished( ReactShadowNode node, UIImplementation.LayoutUpdateListener listener) { mOperations.add(new LayoutUpdateFinishedOperation(node, listener)); } public void enqueueUIBlock(UIBlock block) { mOperations.add(new UIBlockOperation(block)); } public void prependUIBlock(UIBlock block) { mOperations.add(0, new UIBlockOperation(block)); } public void dispatchViewUpdates( final int batchId, final long commitStartTime, final long layoutTime) { SystraceMessage.beginSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "UIViewOperationQueue.dispatchViewUpdates") .arg("batchId", batchId) .flush(); try { final long dispatchViewUpdatesTime = SystemClock.uptimeMillis(); final long nativeModulesThreadCpuTime = SystemClock.currentThreadTimeMillis(); // Store the current operation queues to dispatch and create new empty ones to continue // receiving new operations final ArrayList<DispatchCommandViewOperation> viewCommandOperations; if (!mViewCommandOperations.isEmpty()) { viewCommandOperations = mViewCommandOperations; mViewCommandOperations = new ArrayList<>(); } else { viewCommandOperations = null; } final ArrayList<UIOperation> batchedOperations; if (!mOperations.isEmpty()) { batchedOperations = mOperations; mOperations = new ArrayList<>(); } else { batchedOperations = null; } final ArrayDeque<UIOperation> nonBatchedOperations; synchronized (mNonBatchedOperationsLock) { if (!mNonBatchedOperations.isEmpty()) { nonBatchedOperations = mNonBatchedOperations; mNonBatchedOperations = new ArrayDeque<>(); } else { nonBatchedOperations = null; } } if (mViewHierarchyUpdateDebugListener != null) { mViewHierarchyUpdateDebugListener.onViewHierarchyUpdateEnqueued(); } Runnable runOperations = new Runnable() { @Override public void run() { SystraceMessage.beginSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "DispatchUI") .arg("BatchId", batchId) .flush(); try { long runStartTime = SystemClock.uptimeMillis(); // All ViewCommands should be executed first as a perf optimization. // This entire block is only executed if there's a separate viewCommand queue, // which is currently gated by a ReactFeatureFlag. if (viewCommandOperations != null) { for (DispatchCommandViewOperation op : viewCommandOperations) { try { op.executeWithExceptions(); } catch (RetryableMountingLayerException e) { // Catch errors in DispatchCommands. We allow all commands to be retried // exactly once, after the current batch of other mountitems. If the second // attempt fails, then we log a soft error. This will still crash only in // debug. We do this because it is a ~relatively common pattern to dispatch a // command during render, for example, to scroll to the bottom of a ScrollView // in render. This dispatches the command before that View is even mounted. By // retrying once, we can still dispatch the vast majority of commands faster, // avoid errors, and still operate correctly for most commands even when // they're executed too soon. if (op.getRetries() == 0) { op.incrementRetries(); mViewCommandOperations.add(op); } else { // Retryable exceptions should be logged, but never crash in debug. ReactSoftException.logSoftException(TAG, new ReactNoCrashSoftException(e)); } } catch (Throwable e) { // Non-retryable exceptions should be logged in prod, and crash in Debug. ReactSoftException.logSoftException(TAG, e); } } } // All nonBatchedOperations should be executed before regular operations as // regular operations may depend on them if (nonBatchedOperations != null) { for (UIOperation op : nonBatchedOperations) { op.execute(); } } if (batchedOperations != null) { for (UIOperation op : batchedOperations) { op.execute(); } } if (mIsProfilingNextBatch && mProfiledBatchCommitStartTime == 0) { mProfiledBatchCommitStartTime = commitStartTime; mProfiledBatchCommitEndTime = SystemClock.uptimeMillis(); mProfiledBatchLayoutTime = layoutTime; mProfiledBatchDispatchViewUpdatesTime = dispatchViewUpdatesTime; mProfiledBatchRunStartTime = runStartTime; mProfiledBatchRunEndTime = mProfiledBatchCommitEndTime; mThreadCpuTime = nativeModulesThreadCpuTime; Systrace.beginAsyncSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "delayBeforeDispatchViewUpdates", 0, mProfiledBatchCommitStartTime * 1000000); Systrace.endAsyncSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "delayBeforeDispatchViewUpdates", 0, mProfiledBatchDispatchViewUpdatesTime * 1000000); Systrace.beginAsyncSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "delayBeforeBatchRunStart", 0, mProfiledBatchDispatchViewUpdatesTime * 1000000); Systrace.endAsyncSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "delayBeforeBatchRunStart", 0, mProfiledBatchRunStartTime * 1000000); } // Clear layout animation, as animation only apply to current UI operations batch. mNativeViewHierarchyManager.clearLayoutAnimation(); if (mViewHierarchyUpdateDebugListener != null) { mViewHierarchyUpdateDebugListener.onViewHierarchyUpdateFinished(); } } catch (Exception e) { mIsInIllegalUIState = true; throw e; } finally { Systrace.endSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE); } } }; SystraceMessage.beginSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "acquiring mDispatchRunnablesLock") .arg("batchId", batchId) .flush(); synchronized (mDispatchRunnablesLock) { Systrace.endSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE); mDispatchUIRunnables.add(runOperations); } // In the case where the frame callback isn't enqueued, the UI isn't being displayed or is // being // destroyed. In this case it's no longer important to align to frames, but it is important to // make // sure any late-arriving UI commands are executed. if (!mIsDispatchUIFrameCallbackEnqueued) { UiThreadUtil.runOnUiThread( new GuardedRunnable(mReactApplicationContext) { @Override public void runGuarded() { flushPendingBatches(); } }); } } finally { Systrace.endSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE); } } /* package */ void resumeFrameCallback() { mIsDispatchUIFrameCallbackEnqueued = true; ReactChoreographer.getInstance() .postFrameCallback(ReactChoreographer.CallbackType.DISPATCH_UI, mDispatchUIFrameCallback); } /* package */ void pauseFrameCallback() { mIsDispatchUIFrameCallbackEnqueued = false; ReactChoreographer.getInstance() .removeFrameCallback(ReactChoreographer.CallbackType.DISPATCH_UI, mDispatchUIFrameCallback); flushPendingBatches(); } private void flushPendingBatches() { if (mIsInIllegalUIState) { FLog.w( ReactConstants.TAG, "Not flushing pending UI operations because of previously thrown Exception"); return; } final ArrayList<Runnable> runnables; synchronized (mDispatchRunnablesLock) { if (!mDispatchUIRunnables.isEmpty()) { runnables = mDispatchUIRunnables; mDispatchUIRunnables = new ArrayList<>(); } else { return; } } final long batchedExecutionStartTime = SystemClock.uptimeMillis(); for (Runnable runnable : runnables) { runnable.run(); } if (mIsProfilingNextBatch) { mProfiledBatchBatchedExecutionTime = SystemClock.uptimeMillis() - batchedExecutionStartTime; mProfiledBatchNonBatchedExecutionTime = mNonBatchedExecutionTotalTime; mIsProfilingNextBatch = false; Systrace.beginAsyncSection( Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "batchedExecutionTime", 0, batchedExecutionStartTime * 1000000); Systrace.endAsyncSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "batchedExecutionTime", 0); } mNonBatchedExecutionTotalTime = 0; } /** * Choreographer FrameCallback responsible for actually dispatching view updates on the UI thread * that were enqueued via {@link #dispatchViewUpdates(int)}. The reason we don't just enqueue * directly to the UI thread from that method is to make sure our Runnables actually run before * the next traversals happen: * * <p>ViewRootImpl#scheduleTraversals (which is called from invalidate, requestLayout, etc) calls * Looper#postSyncBarrier which keeps any UI thread looper messages from being processed until * that barrier is removed during the next traversal. That means, depending on when we get updates * from JS and what else is happening on the UI thread, we can sometimes try to post this runnable * after ViewRootImpl has posted a barrier. * * <p>Using a Choreographer callback (which runs immediately before traversals), we guarantee we * run before the next traversal. */ private class DispatchUIFrameCallback extends GuardedFrameCallback { private static final int FRAME_TIME_MS = 16; private final int mMinTimeLeftInFrameForNonBatchedOperationMs; private DispatchUIFrameCallback( ReactContext reactContext, int minTimeLeftInFrameForNonBatchedOperationMs) { super(reactContext); mMinTimeLeftInFrameForNonBatchedOperationMs = minTimeLeftInFrameForNonBatchedOperationMs; } @Override public void doFrameGuarded(long frameTimeNanos) { if (mIsInIllegalUIState) { FLog.w( ReactConstants.TAG, "Not flushing pending UI operations because of previously thrown Exception"); return; } Systrace.beginSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE, "dispatchNonBatchedUIOperations"); try { dispatchPendingNonBatchedOperations(frameTimeNanos); } finally { Systrace.endSection(Systrace.TRACE_TAG_REACT_JAVA_BRIDGE); } flushPendingBatches(); ReactChoreographer.getInstance() .postFrameCallback(ReactChoreographer.CallbackType.DISPATCH_UI, this); } private void dispatchPendingNonBatchedOperations(long frameTimeNanos) { while (true) { long timeLeftInFrame = FRAME_TIME_MS - ((System.nanoTime() - frameTimeNanos) / 1000000); if (timeLeftInFrame < mMinTimeLeftInFrameForNonBatchedOperationMs) { break; } UIOperation nextOperation; synchronized (mNonBatchedOperationsLock) { if (mNonBatchedOperations.isEmpty()) { break; } nextOperation = mNonBatchedOperations.pollFirst(); } try { long nonBatchedExecutionStartTime = SystemClock.uptimeMillis(); nextOperation.execute(); mNonBatchedExecutionTotalTime += SystemClock.uptimeMillis() - nonBatchedExecutionStartTime; } catch (Exception e) { mIsInIllegalUIState = true; throw e; } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.jta.websphere; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import javax.cache.configuration.Factory; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.InvalidTransactionException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.Synchronization; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.xa.XAResource; import org.apache.ignite.IgniteException; /** * Implementation of Transaction Manager factory that should used within * WebSphere Application Server ("full profile" / "traditional" WS AS). * <p> * Notes: * <ul> * <li> * {@link WebSphereLibertyTmFactory} should be used within WebSphere Liberty. * </li> * <li> * The implementation has been tested with WebSphere Application Server 8.5.5. * </li> * </ul> * <h2 class="header">Java Configuration</h2> * <pre name="code" class="java"> * IgniteConfiguration cfg = new IgniteConfiguration(); * * TransactionConfiguration txCfg = new TransactionConfiguration(); * * txCfg.setTxManagerFactory(new WebSphereTmFactory()); * * cfg.setTransactionConfiguration(new txCfg); * </pre> * <h2 class="header">Spring Configuration</h2> * <pre name="code" class="xml"> * &lt;bean id="ignite.cfg" class="org.apache.ignite.configuration.IgniteConfiguration"&gt; * ... * &lt;property name="transactionConfiguration"&gt; * &lt;bean class="org.apache.ignite.cache.jta.websphere.WebSphereTmFactory"/&gt; * &lt;/property&gt; * ... * &lt;/bean&gt; * </pre> * <p> * <img src="http://ignite.apache.org/images/spring-small.png"> * <br> * For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>* */ public class WebSphereTmFactory implements Factory<TransactionManager> { /** */ private static final long serialVersionUID = 0; /** */ private static final Class<?> onePhaseXAResourceCls; static { try { onePhaseXAResourceCls = Class.forName("com.ibm.tx.jta.OnePhaseXAResource"); } catch (ClassNotFoundException e) { throw new IgniteException(e); } } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public TransactionManager create() { try { Class clazz = Class.forName("com.ibm.tx.jta.impl.TranManagerSet"); Method m = clazz.getMethod("instance", (Class[])null); TransactionManager tranMgr = (TransactionManager)m.invoke(null, (Object[])null); return new WebSphereTransactionManager(tranMgr); } catch (SecurityException | ClassNotFoundException | IllegalArgumentException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { throw new IgniteException(e); } } /** * */ private static class WebSphereTransactionManager implements TransactionManager { /** */ private TransactionManager mgr; /** * @param mgr Transaction Manager. */ WebSphereTransactionManager(TransactionManager mgr) { this.mgr = mgr; } /** {@inheritDoc} */ @Override public void begin() throws NotSupportedException, SystemException { mgr.begin(); } /** {@inheritDoc} */ @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { mgr.commit(); } /** {@inheritDoc} */ @Override public int getStatus() throws SystemException { return mgr.getStatus(); } /** {@inheritDoc} */ @Override public Transaction getTransaction() throws SystemException { Transaction tx = mgr.getTransaction(); if (tx == null) return null; return new WebSphereTransaction(tx); } /** {@inheritDoc} */ @Override public void resume(Transaction tobj) throws InvalidTransactionException, IllegalStateException, SystemException { mgr.resume(tobj); } /** {@inheritDoc} */ @Override public void rollback() throws IllegalStateException, SecurityException, SystemException { mgr.rollback(); } /** {@inheritDoc} */ @Override public void setRollbackOnly() throws IllegalStateException, SystemException { mgr.setRollbackOnly(); } /** {@inheritDoc} */ @Override public void setTransactionTimeout(int seconds) throws SystemException { mgr.setTransactionTimeout(seconds); } /** {@inheritDoc} */ @Override public Transaction suspend() throws SystemException { return mgr.suspend(); } } /** * */ private static class WebSphereTransaction implements Transaction { /** */ private final Transaction tx; /** * @param tx Transaction. */ WebSphereTransaction(Transaction tx) { assert tx != null; this.tx = tx; } /** {@inheritDoc} */ @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { tx.commit(); } /** {@inheritDoc} */ @Override public boolean delistResource(XAResource xaRes, int flag) throws IllegalStateException, SystemException { return tx.delistResource(xaRes, flag); } /** {@inheritDoc} */ @Override public boolean enlistResource(final XAResource xaRes) throws RollbackException, IllegalStateException, SystemException { if (xaRes == null) return false; // final XAResource res = new IgniteOnePhaseXAResource(xaRes); Object ibmProxy = Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(), new Class[] {onePhaseXAResourceCls}, new InvocationHandler() { @Override public Object invoke(Object proxy, Method mtd, Object[] args) throws Throwable { return mtd.invoke(xaRes, args); } }); return tx.enlistResource((XAResource)ibmProxy); } /** {@inheritDoc} */ @Override public int getStatus() throws SystemException { return tx.getStatus(); } /** {@inheritDoc} */ @Override public void registerSynchronization(Synchronization sync) throws RollbackException, IllegalStateException, SystemException { tx.registerSynchronization(sync); } /** {@inheritDoc} */ @Override public void rollback() throws IllegalStateException, SystemException { tx.rollback(); } /** {@inheritDoc} */ @Override public void setRollbackOnly() throws IllegalStateException, SystemException { tx.setRollbackOnly(); } } }
package com.michaelhradek.aurkitu.plugin.core.output; import com.michaelhradek.aurkitu.plugin.Config; import com.michaelhradek.aurkitu.plugin.core.Comparators; import com.michaelhradek.aurkitu.plugin.core.Validator; import com.michaelhradek.aurkitu.plugin.core.output.components.Namespace; import com.michaelhradek.aurkitu.plugin.core.parsing.ClasspathReference; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import java.util.*; /** * @author m.hradek */ @Getter @Setter @EqualsAndHashCode public class Schema { // IDL values private String name; private String fileIdentifier; private String fileExtension; private Namespace namespace; private String rootType; private List<EnumDeclaration> enumDeclarations; private List<TypeDeclaration> typeDeclarations; private Set<String> includes; private List<String> attributes; private List<Constant<Integer>> integerConstants; private List<Constant<Float>> floatConstants; // Aurkitu values private boolean generateVersion; private Boolean isValid; private Validator validator; private boolean isDependency; private boolean isEmpty; // Classpath references used to create this schema private List<ClasspathReference> classpathReferenceList; public Schema() { enumDeclarations = new ArrayList<>(); typeDeclarations = new ArrayList<>(); includes = new HashSet<>(); attributes = new ArrayList<>(); integerConstants = new ArrayList<>(); floatConstants = new ArrayList<>(); classpathReferenceList = new ArrayList<>(); } /** * @param isEmpty If the schema had no classes to review from the classpath. We can check this also be reviewing the various lists but that could be a false negative/positive. */ public void isEmpty(boolean isEmpty) { this.isEmpty = isEmpty; } /** * @param input Add an enum declaration to the schema */ public void addEnumDeclaration(EnumDeclaration input) { if (!enumDeclarations.contains(input)) { enumDeclarations.add(input); } } /** * @param input Add a type (i.e. class) declaration to the schema */ public void addTypeDeclaration(TypeDeclaration input) { if (!typeDeclarations.contains(input)) { typeDeclarations.add(input); } } /** * @param input Add another schema to include within this schema */ public void addInclude(String input) { includes.add(input); } /** * @param input Add an attribute to the schema */ public void addAttribute(String input) { attributes.add(input); } /** * @param input Add an integer constant to the schema */ public void addIntegerConstant(Constant<Integer> input) { integerConstants.add(input); } /** * @param input Add a float constant to the schema */ public void addFloatConstant(Constant<Float> input) { floatConstants.add(input); } /** * @param input Set the 4 character file identifier. */ public void setFileIdentifier(String input) { if (StringUtils.isEmpty(input)) { fileIdentifier = null; return; } if (input.length() != 4) { return; } fileIdentifier = input.toUpperCase(); } /** * @param input Set the file extension. Default is {@link Config#FILE_EXTENSION} */ public void setFileExtension(String input) { if (StringUtils.isEmpty(input)) { fileExtension = null; return; } fileExtension = input.toLowerCase(); } /** * @param input The namespace for the schema. Dashes are replaced with underscores - otherwise flatc compilation will fail. */ public void setNamespace(String input) { this.namespace = Namespace.parse(input); } /** * @param namespace The namespace. See {@link Schema#setNamespace(String)} */ public void setNamespace(Namespace namespace) { this.namespace = namespace; } @Override public String toString() { StringBuilder builder = new StringBuilder(Config.SCHEMA_INTRO_COMMENT); builder.append(System.lineSeparator()); if (generateVersion) { builder.append(Config.SCHEMA_VERSION_COMMENT); builder.append(System.lineSeparator()); } builder.append(System.lineSeparator()); if (!CollectionUtils.isEmpty(includes)) { for (String include : includes) { builder.append("include \""); builder.append(include); builder.append("." + Config.FILE_EXTENSION); if (!include.endsWith(";")) builder.append("\";"); else builder.insert(builder.length(), "\";"); builder.append(System.lineSeparator()); } builder.append(System.lineSeparator()); } if (!CollectionUtils.isEmpty(attributes)) { attributes.sort(Comparators.STRING_LIST); for (String attribute : attributes) { builder.append("attribute \""); builder.append(attribute); builder.append("\""); builder.append(";"); builder.append(System.lineSeparator()); } builder.append(System.lineSeparator()); } if (!CollectionUtils.isEmpty(integerConstants)) { integerConstants.sort(Comparators.CONSTANT_DECLARATION); for (Constant<Integer> constant : integerConstants) { builder.append("int "); builder.append(constant.name); builder.append(" "); builder.append(constant.value); builder.append(";"); builder.append(System.lineSeparator()); } builder.append(System.lineSeparator()); } if (!CollectionUtils.isEmpty(floatConstants)) { floatConstants.sort(Comparators.CONSTANT_DECLARATION); for (Constant<Float> constant : floatConstants) { builder.append("float "); builder.append(constant.name); builder.append(" "); builder.append(constant.value); builder.append(";"); builder.append(System.lineSeparator()); } builder.append(System.lineSeparator()); } if (!Namespace.isEmpty(namespace)) { builder.append("namespace "); final String outputNamespace = namespace.toString(); builder.append(outputNamespace); if (!outputNamespace.endsWith(";")) builder.append(";"); builder.append(System.lineSeparator()); builder.append(System.lineSeparator()); } enumDeclarations.sort(Comparators.ENUM_DECLARATION); for (EnumDeclaration enumD : enumDeclarations) { builder.append(enumD.toString()); } typeDeclarations.sort(Comparators.TYPE_DECLARATION); for (TypeDeclaration typeD : typeDeclarations) { builder.append(typeD.toString()); } if (rootType != null) { builder.append("root_type "); builder.append(rootType); builder.append(";"); builder.append(System.lineSeparator()); builder.append(System.lineSeparator()); } if (fileIdentifier != null) { builder.append("file_identifier "); builder.append("\""); builder.append(fileIdentifier); builder.append("\""); builder.append(";"); builder.append(System.lineSeparator()); builder.append(System.lineSeparator()); } if (fileExtension != null) { builder.append("file_extension "); builder.append("\""); builder.append(fileExtension); builder.append("\""); builder.append(";"); builder.append(System.lineSeparator()); builder.append(System.lineSeparator()); } if (isValid != null) { builder.append(validator.getErrorComments()); } String result = builder.toString(); if (generateVersion) { return result.replace(Config.SCHEMA_VERSION_PLACEHOLDER, Integer.toHexString(result.hashCode())); } return result; } /** * @param <T> A class which contains the name, value, and options used to define Numbers at the schema level */ public static class Constant<T extends Number> { public String name; public T value; public Map<String, String> options = new HashMap<>(); } }
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 1997-2010 Oracle and/or its affiliates. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can * obtain a copy of the License at * https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html * or packager/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at packager/legal/LICENSE.txt. * * GPL Classpath Exception: * Oracle designates this particular file as subject to the "Classpath" * exception as provided by Oracle in the GPL Version 2 section of the License * file that accompanied this code. * * Modifications: * If applicable, add the following below the License Header, with the fields * enclosed by brackets [] replaced by your own identifying information: * "Portions Copyright [year] [name of copyright owner]" * * Contributor(s): * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.sun.codemodel.fmt; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.net.URL; import java.text.ParseException; import java.util.Iterator; import java.util.List; import com.sun.codemodel.JClass; import com.sun.codemodel.JPackage; import com.sun.codemodel.JResourceFile; import com.sun.codemodel.JTypeVar; /** * Statically generated Java soruce file. * * <p> * This {@link JResourceFile} implementation will generate a Java source * file by copying the source code from a resource. * <p> * While copying a resource, we look for a package declaration and * replace it with the target package name. This allows the static Java * source code to have an arbitrary package declaration. * <p> * You can also use the getJClass method to obtain a {@link JClass} * object that represents the static file. This allows the client code * to refer to the class from other CodeModel generated code. * <p> * Note that because we don't parse the static Java source code, * the returned {@link JClass} object doesn't respond to methods like * "isInterface" or "_extends", * * @author * Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com) */ public final class JStaticJavaFile extends JResourceFile { private final JPackage pkg; private final String className; private final URL source; private final JStaticClass clazz; private final LineFilter filter; public JStaticJavaFile(JPackage _pkg, String className, String _resourceName) { this( _pkg, className, SecureLoader.getClassClassLoader(JStaticJavaFile.class).getResource(_resourceName), null ); } public JStaticJavaFile(JPackage _pkg, String _className, URL _source, LineFilter _filter ) { super(_className+".java"); if(_source==null) throw new NullPointerException(); this.pkg = _pkg; this.clazz = new JStaticClass(); this.className = _className; this.source = _source; this.filter = _filter; } /** * Returns a class object that represents a statically generated code. */ public final JClass getJClass() { return clazz; } protected boolean isResource() { return false; } protected void build(OutputStream os) throws IOException { InputStream is = source.openStream(); BufferedReader r = new BufferedReader(new InputStreamReader(is)); PrintWriter w = new PrintWriter(new BufferedWriter(new OutputStreamWriter(os))); LineFilter filter = createLineFilter(); int lineNumber=1; try { String line; while((line=r.readLine())!=null) { line = filter.process(line); if(line!=null) w.println(line); lineNumber++; } } catch( ParseException e ) { throw new IOException("unable to process "+source+" line:"+lineNumber+"\n"+e.getMessage()); } w.close(); r.close(); } /** * Creates a {@link LineFilter}. * <p> * A derived class can override this method to process * the contents of the source file. */ private LineFilter createLineFilter() { // this filter replaces the package declaration. LineFilter f = new LineFilter() { public String process(String line) { if(!line.startsWith("package ")) return line; // replace package decl if( pkg.isUnnamed() ) return null; else return "package "+pkg.name()+";"; } }; if( filter!=null ) return new ChainFilter(filter,f); else return f; } /** * Filter that alters the Java source code. * <p> * By implementing this interface, derived classes * can modify the Java source file before it's written out. */ public interface LineFilter { /** * @param line * a non-null valid String that corresponds to one line. * No '\n' included. * @return * null to strip the line off. Otherwise the returned * String will be written out. Do not add '\n' at the end * of this string. * * @exception ParseException * when for some reason there's an error in the line. */ String process(String line) throws ParseException; } /** * A {@link LineFilter} that combines two {@link LineFilter}s. */ public final static class ChainFilter implements LineFilter { private final LineFilter first,second; public ChainFilter( LineFilter first, LineFilter second ) { this.first=first; this.second=second; } public String process(String line) throws ParseException { line = first.process(line); if(line==null) return null; return second.process(line); } } private class JStaticClass extends JClass { private final JTypeVar[] typeParams; JStaticClass() { super(pkg.owner()); // TODO: allow those to be specified typeParams = new JTypeVar[0]; } public String name() { return className; } public String fullName() { if(pkg.isUnnamed()) return className; else return pkg.name()+'.'+className; } public JPackage _package() { return pkg; } public JClass _extends() { throw new UnsupportedOperationException(); } public Iterator<JClass> _implements() { throw new UnsupportedOperationException(); } public boolean isInterface() { throw new UnsupportedOperationException(); } public boolean isAbstract() { throw new UnsupportedOperationException(); } public JTypeVar[] typeParams() { return typeParams; } protected JClass substituteParams(JTypeVar[] variables, List<JClass> bindings) { return this; } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service.reads.repair; import java.net.UnknownHostException; import java.util.Random; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.ReadCommand; import org.apache.cassandra.db.SinglePartitionReadCommand; import org.apache.cassandra.db.Slices; import org.apache.cassandra.db.filter.ClusteringIndexSliceFilter; import org.apache.cassandra.db.filter.ColumnFilter; import org.apache.cassandra.db.filter.DataLimits; import org.apache.cassandra.db.filter.RowFilter; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.metrics.TableMetrics; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import static org.junit.Assert.assertEquals; public class RepairedDataVerifierTest { private static final String TEST_NAME = "read_command_vh_test_"; private static final String KEYSPACE = TEST_NAME + "cql_keyspace"; private static final String TABLE = "table1"; private final Random random = new Random(); private TableMetadata metadata; private TableMetrics metrics; // counter to generate the last byte of peer addresses private int addressSuffix = 10; @BeforeClass public static void init() { SchemaLoader.loadSchema(); SchemaLoader.schemaDefinition(TEST_NAME); DatabaseDescriptor.reportUnconfirmedRepairedDataMismatches(true); } @Before public void setup() { metadata = Schema.instance.getTableMetadata(KEYSPACE, TABLE); metrics = ColumnFamilyStore.metricsFor(metadata.id); } @Test public void repairedDataMismatchWithSomeConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), false); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest2"), true); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount + 1 , unconfirmedCount()); } @Test public void repairedDataMismatchWithNoneConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), false); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest2"), false); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount + 1 , unconfirmedCount()); } @Test public void repairedDataMismatchWithAllConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), true); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest2"), true); tracker.verify(); assertEquals(confirmedCount + 1, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void repairedDataMatchesWithAllConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), true); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest1"), true); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void repairedDataMatchesWithSomeConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), true); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest1"), false); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void repairedDataMatchesWithNoneConclusive() { long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.bytes("digest1"), false); tracker.recordDigest(peer2, ByteBufferUtil.bytes("digest1"), false); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void allEmptyDigestWithAllConclusive() { // if a read didn't touch any repaired sstables, digests will be empty long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.EMPTY_BYTE_BUFFER, true); tracker.recordDigest(peer2, ByteBufferUtil.EMPTY_BYTE_BUFFER, true); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void allEmptyDigestsWithSomeConclusive() { // if a read didn't touch any repaired sstables, digests will be empty long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.EMPTY_BYTE_BUFFER, true); tracker.recordDigest(peer2, ByteBufferUtil.EMPTY_BYTE_BUFFER, false); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void allEmptyDigestsWithNoneConclusive() { // if a read didn't touch any repaired sstables, digests will be empty long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); InetAddressAndPort peer1 = peer(); InetAddressAndPort peer2 = peer(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.recordDigest(peer1, ByteBufferUtil.EMPTY_BYTE_BUFFER, false); tracker.recordDigest(peer2, ByteBufferUtil.EMPTY_BYTE_BUFFER, false); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } @Test public void noTrackingDataRecorded() { // if a read didn't land on any replicas which support repaired data tracking, nothing will be recorded long confirmedCount = confirmedCount(); long unconfirmedCount = unconfirmedCount(); RepairedDataVerifier.SimpleVerifier verifier = new RepairedDataVerifier.SimpleVerifier(command(key())); RepairedDataTracker tracker = new RepairedDataTracker(verifier); tracker.verify(); assertEquals(confirmedCount, confirmedCount()); assertEquals(unconfirmedCount, unconfirmedCount()); } private long confirmedCount() { return metrics.confirmedRepairedInconsistencies.table.getCount(); } private long unconfirmedCount() { return metrics.unconfirmedRepairedInconsistencies.table.getCount(); } private InetAddressAndPort peer() { try { return InetAddressAndPort.getByAddress(new byte[]{ 127, 0, 0, (byte) addressSuffix++ }); } catch (UnknownHostException e) { throw new RuntimeException(e); } } private int key() { return random.nextInt(); } private ReadCommand command(int key) { return new StubReadCommand(key, metadata, false); } private static class StubReadCommand extends SinglePartitionReadCommand { StubReadCommand(int key, TableMetadata metadata, boolean isDigest) { super(isDigest, 0, false, metadata, FBUtilities.nowInSeconds(), ColumnFilter.all(metadata), RowFilter.NONE, DataLimits.NONE, metadata.partitioner.decorateKey(ByteBufferUtil.bytes(key)), new ClusteringIndexSliceFilter(Slices.ALL, false), null); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.clientImpl; import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.accumulo.core.util.Validators.EXISTING_NAMESPACE_NAME; import static org.apache.accumulo.core.util.Validators.NEW_NAMESPACE_NAME; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; import java.util.stream.Collectors; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.NamespaceExistsException; import org.apache.accumulo.core.client.NamespaceNotEmptyException; import org.apache.accumulo.core.client.NamespaceNotFoundException; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.accumulo.core.clientImpl.thrift.SecurityErrorCode; import org.apache.accumulo.core.clientImpl.thrift.ThriftSecurityException; import org.apache.accumulo.core.clientImpl.thrift.ThriftTableOperationException; import org.apache.accumulo.core.data.NamespaceId; import org.apache.accumulo.core.data.constraints.Constraint; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.accumulo.core.manager.thrift.FateOperation; import org.apache.accumulo.core.trace.TraceUtil; import org.apache.accumulo.core.util.LocalityGroupUtil; import org.apache.accumulo.core.util.LocalityGroupUtil.LocalityGroupConfigurationError; import org.apache.accumulo.core.util.OpTimer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class NamespaceOperationsImpl extends NamespaceOperationsHelper { private final ClientContext context; private TableOperationsImpl tableOps; private static final Logger log = LoggerFactory.getLogger(TableOperations.class); public NamespaceOperationsImpl(ClientContext context, TableOperationsImpl tableOps) { checkArgument(context != null, "context is null"); this.context = context; this.tableOps = tableOps; } @Override public SortedSet<String> list() { OpTimer timer = null; if (log.isTraceEnabled()) { log.trace("tid={} Fetching list of namespaces...", Thread.currentThread().getId()); timer = new OpTimer().start(); } TreeSet<String> namespaces = new TreeSet<>(Namespaces.getNameToIdMap(context).keySet()); if (timer != null) { timer.stop(); log.trace("tid={} Fetched {} namespaces in {}", Thread.currentThread().getId(), namespaces.size(), String.format("%.3f secs", timer.scale(SECONDS))); } return namespaces; } @Override public boolean exists(String namespace) { EXISTING_NAMESPACE_NAME.validate(namespace); OpTimer timer = null; if (log.isTraceEnabled()) { log.trace("tid={} Checking if namespace {} exists", Thread.currentThread().getId(), namespace); timer = new OpTimer().start(); } boolean exists = Namespaces.namespaceNameExists(context, namespace); if (timer != null) { timer.stop(); log.trace("tid={} Checked existence of {} in {}", Thread.currentThread().getId(), exists, String.format("%.3f secs", timer.scale(SECONDS))); } return exists; } @Override public void create(String namespace) throws AccumuloException, AccumuloSecurityException, NamespaceExistsException { NEW_NAMESPACE_NAME.validate(namespace); try { doNamespaceFateOperation(FateOperation.NAMESPACE_CREATE, Arrays.asList(ByteBuffer.wrap(namespace.getBytes(UTF_8))), Collections.emptyMap(), namespace); } catch (NamespaceNotFoundException e) { // should not happen throw new AssertionError(e); } } @Override public void delete(String namespace) throws AccumuloException, AccumuloSecurityException, NamespaceNotFoundException, NamespaceNotEmptyException { EXISTING_NAMESPACE_NAME.validate(namespace); NamespaceId namespaceId = Namespaces.getNamespaceId(context, namespace); if (namespaceId.equals(Namespace.ACCUMULO.id()) || namespaceId.equals(Namespace.DEFAULT.id())) { Credentials credentials = context.getCredentials(); log.debug("{} attempted to delete the {} namespace", credentials.getPrincipal(), namespaceId); throw new AccumuloSecurityException(credentials.getPrincipal(), SecurityErrorCode.UNSUPPORTED_OPERATION); } if (!Namespaces.getTableIds(context, namespaceId).isEmpty()) { throw new NamespaceNotEmptyException(namespaceId.canonical(), namespace, null); } List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(namespace.getBytes(UTF_8))); Map<String,String> opts = new HashMap<>(); try { doNamespaceFateOperation(FateOperation.NAMESPACE_DELETE, args, opts, namespace); } catch (NamespaceExistsException e) { // should not happen throw new AssertionError(e); } } @Override public void rename(String oldNamespaceName, String newNamespaceName) throws AccumuloSecurityException, NamespaceNotFoundException, AccumuloException, NamespaceExistsException { EXISTING_NAMESPACE_NAME.validate(oldNamespaceName); NEW_NAMESPACE_NAME.validate(newNamespaceName); List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(oldNamespaceName.getBytes(UTF_8)), ByteBuffer.wrap(newNamespaceName.getBytes(UTF_8))); Map<String,String> opts = new HashMap<>(); doNamespaceFateOperation(FateOperation.NAMESPACE_RENAME, args, opts, oldNamespaceName); } @Override public void setProperty(final String namespace, final String property, final String value) throws AccumuloException, AccumuloSecurityException, NamespaceNotFoundException { EXISTING_NAMESPACE_NAME.validate(namespace); checkArgument(property != null, "property is null"); checkArgument(value != null, "value is null"); ManagerClient.executeNamespace(context, client -> client.setNamespaceProperty(TraceUtil.traceInfo(), context.rpcCreds(), namespace, property, value)); checkLocalityGroups(namespace, property); } @Override public void removeProperty(final String namespace, final String property) throws AccumuloException, AccumuloSecurityException, NamespaceNotFoundException { EXISTING_NAMESPACE_NAME.validate(namespace); checkArgument(property != null, "property is null"); ManagerClient.executeNamespace(context, client -> client .removeNamespaceProperty(TraceUtil.traceInfo(), context.rpcCreds(), namespace, property)); checkLocalityGroups(namespace, property); } @Override public Map<String,String> getConfiguration(final String namespace) throws AccumuloException, NamespaceNotFoundException { EXISTING_NAMESPACE_NAME.validate(namespace); try { return ServerClient.executeRaw(context, client -> client .getNamespaceConfiguration(TraceUtil.traceInfo(), context.rpcCreds(), namespace)); } catch (ThriftTableOperationException e) { switch (e.getType()) { case NAMESPACE_NOTFOUND: throw new NamespaceNotFoundException(e); case OTHER: default: throw new AccumuloException(e.description, e); } } catch (AccumuloException e) { throw e; } catch (Exception e) { throw new AccumuloException(e); } } @Override public Map<String,String> namespaceIdMap() { return Namespaces.getNameToIdMap(context).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().canonical(), (v1, v2) -> { throw new RuntimeException(String.format("Duplicate key for values %s and %s", v1, v2)); }, TreeMap::new)); } @Override public boolean testClassLoad(final String namespace, final String className, final String asTypeName) throws NamespaceNotFoundException, AccumuloException, AccumuloSecurityException { EXISTING_NAMESPACE_NAME.validate(namespace); checkArgument(className != null, "className is null"); checkArgument(asTypeName != null, "asTypeName is null"); try { return ServerClient.executeRaw(context, client -> client.checkNamespaceClass(TraceUtil.traceInfo(), context.rpcCreds(), namespace, className, asTypeName)); } catch (ThriftTableOperationException e) { switch (e.getType()) { case NAMESPACE_NOTFOUND: throw new NamespaceNotFoundException(e); default: throw new AccumuloException(e.description, e); } } catch (ThriftSecurityException e) { throw new AccumuloSecurityException(e.user, e.code, e); } catch (AccumuloException e) { throw e; } catch (Exception e) { throw new AccumuloException(e); } } @Override public void attachIterator(String namespace, IteratorSetting setting, EnumSet<IteratorScope> scopes) throws AccumuloSecurityException, AccumuloException, NamespaceNotFoundException { // testClassLoad validates the namespace name testClassLoad(namespace, setting.getIteratorClass(), SortedKeyValueIterator.class.getName()); super.attachIterator(namespace, setting, scopes); } @Override public int addConstraint(String namespace, String constraintClassName) throws AccumuloException, AccumuloSecurityException, NamespaceNotFoundException { // testClassLoad validates the namespace name testClassLoad(namespace, constraintClassName, Constraint.class.getName()); return super.addConstraint(namespace, constraintClassName); } private String doNamespaceFateOperation(FateOperation op, List<ByteBuffer> args, Map<String,String> opts, String namespace) throws AccumuloSecurityException, AccumuloException, NamespaceExistsException, NamespaceNotFoundException { // caller should validate the namespace name try { return tableOps.doFateOperation(op, args, opts, namespace); } catch (TableExistsException | TableNotFoundException e) { // should not happen throw new AssertionError(e); } } private void checkLocalityGroups(String namespace, String propChanged) throws AccumuloException, NamespaceNotFoundException { EXISTING_NAMESPACE_NAME.validate(namespace); if (LocalityGroupUtil.isLocalityGroupProperty(propChanged)) { Map<String,String> allProps = getConfiguration(namespace); try { LocalityGroupUtil.checkLocalityGroups(allProps); } catch (LocalityGroupConfigurationError | RuntimeException e) { LoggerFactory.getLogger(this.getClass()).warn("Changing '" + propChanged + "' for namespace '" + namespace + "'resulted in bad locality group config. This may be a transient situation since the" + " config spreads over multiple properties. Setting properties in a different order " + "may help. Even though this warning was displayed, the property was updated. Please " + "check your config to ensure consistency.", e); } } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.ui.table; import com.intellij.ide.IdeBundle; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.*; import com.intellij.ui.table.JBTable; import com.intellij.ui.table.TableView; import com.intellij.util.Function; import com.intellij.util.FunctionUtil; import com.intellij.util.PlatformIcons; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.CollectionItemEditor; import com.intellij.util.ui.CollectionModelEditor; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.ListTableModel; import com.intellij.util.xmlb.SkipDefaultValuesSerializationFilters; import com.intellij.util.xmlb.XmlSerializer; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.table.TableModel; import java.awt.*; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class TableModelEditor<T> extends CollectionModelEditor<T, CollectionItemEditor<T>> { private final TableView<T> table; private final ToolbarDecorator toolbarDecorator; private final MyListTableModel model; public TableModelEditor(@NotNull ColumnInfo[] columns, @NotNull CollectionItemEditor<T> itemEditor, @NotNull String emptyText) { this(Collections.emptyList(), columns, itemEditor, emptyText); } /** * source will be copied, passed list will not be used directly * * Implement {@link DialogItemEditor} instead of {@link CollectionItemEditor} if you want provide dialog to edit. */ public TableModelEditor(@NotNull List<T> items, @NotNull ColumnInfo[] columns, @NotNull CollectionItemEditor<T> itemEditor, @NotNull String emptyText) { super(itemEditor); model = new MyListTableModel(columns, new ArrayList<>(items)); table = new TableView<>(model); table.setDefaultEditor(Enum.class, ComboBoxTableCellEditor.INSTANCE); table.setStriped(true); table.setEnableAntialiasing(true); preferredScrollableViewportHeightInRows(JBTable.PREFERRED_SCROLLABLE_VIEWPORT_HEIGHT_IN_ROWS); new TableSpeedSearch(table); ColumnInfo firstColumn = columns[0]; if ((firstColumn.getColumnClass() == boolean.class || firstColumn.getColumnClass() == Boolean.class) && firstColumn.getName().isEmpty()) { TableUtil.setupCheckboxColumn(table.getColumnModel().getColumn(0)); } boolean needTableHeader = false; for (ColumnInfo column : columns) { if (!StringUtil.isEmpty(column.getName())) { needTableHeader = true; break; } } if (!needTableHeader) { table.setTableHeader(null); } table.getEmptyText().setText(emptyText); MyRemoveAction removeAction = new MyRemoveAction(); toolbarDecorator = ToolbarDecorator.createDecorator(table, this).setRemoveAction(removeAction).setRemoveActionUpdater(removeAction); if (itemEditor instanceof DialogItemEditor) { addDialogActions(); } } @NotNull public TableModelEditor<T> preferredScrollableViewportHeightInRows(int rows) { table.setPreferredScrollableViewportSize(new Dimension(200, table.getRowHeight() * rows)); return this; } private void addDialogActions() { toolbarDecorator.setEditAction(button -> { T item = table.getSelectedObject(); if (item != null) { Function<T, T> mutator; if (helper.isMutable(item)) { mutator = FunctionUtil.id(); } else { final int selectedRow = table.getSelectedRow(); mutator = item12 -> helper.getMutable(item12, selectedRow); } ((DialogItemEditor<T>)itemEditor).edit(item, mutator, false); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(table, true); }); } }).setEditActionUpdater(e -> { T item = table.getSelectedObject(); return item != null && ((DialogItemEditor<T>)itemEditor).isEditable(item); }); if (((DialogItemEditor)itemEditor).isUseDialogToAdd()) { toolbarDecorator.setAddAction(button -> { T item = createElement(); ((DialogItemEditor<T>)itemEditor).edit(item, item1 -> { model.addRow(item1); return item1; }, true); }); } } @NotNull public TableModelEditor<T> disableUpDownActions() { toolbarDecorator.disableUpDownActions(); return this; } @NotNull public TableModelEditor<T> enabled(boolean value) { table.setEnabled(value); return this; } public static abstract class DataChangedListener<T> implements TableModelListener { public abstract void dataChanged(@NotNull ColumnInfo<T, ?> columnInfo, int rowIndex); @Override public void tableChanged(@NotNull TableModelEvent e) { } } public TableModelEditor<T> modelListener(@NotNull DataChangedListener<T> listener) { model.dataChangedListener = listener; model.addTableModelListener(listener); return this; } @NotNull public ListTableModel<T> getModel() { return model; } public interface DialogItemEditor<T> extends CollectionItemEditor<T> { void edit(@NotNull T item, @NotNull Function<T, T> mutator, boolean isAdd); void applyEdited(@NotNull T oldItem, @NotNull T newItem); default boolean isEditable(@NotNull T item) { return true; } default boolean isUseDialogToAdd() { return false; } } @NotNull public static <T> T cloneUsingXmlSerialization(@NotNull T oldItem, @NotNull T newItem) { Element serialized = XmlSerializer.serialize(oldItem, new SkipDefaultValuesSerializationFilters()); if (!JDOMUtil.isEmpty(serialized)) { XmlSerializer.deserializeInto(newItem, serialized); } return newItem; } private final class MyListTableModel extends ListTableModel<T> { private List<T> items; private DataChangedListener<T> dataChangedListener; MyListTableModel(@NotNull ColumnInfo[] columns, @NotNull List<T> items) { super(columns, items); this.items = items; } @Override public void setItems(@NotNull List<T> items) { this.items = items; super.setItems(items); } @Override public void removeRow(int index) { helper.remove(getItem(index)); super.removeRow(index); } @Override public void setValueAt(Object newValue, int rowIndex, int columnIndex) { if (rowIndex < getRowCount()) { @SuppressWarnings("unchecked") ColumnInfo<T, Object> column = (ColumnInfo<T, Object>)getColumnInfos()[columnIndex]; T item = getItem(rowIndex); Object oldValue = column.valueOf(item); if (column.getColumnClass() == String.class ? !Comparing.strEqual(((String)oldValue), ((String)newValue)) : !Comparing.equal(oldValue, newValue)) { column.setValue(helper.getMutable(item, rowIndex), newValue); if (dataChangedListener != null) { dataChangedListener.dataChanged(column, rowIndex); } } } } } public abstract static class EditableColumnInfo<Item, Aspect> extends ColumnInfo<Item, Aspect> { public EditableColumnInfo(@NotNull String name) { super(name); } public EditableColumnInfo() { super(""); } @Override public boolean isCellEditable(Item item) { return true; } } @NotNull public JComponent createComponent() { return toolbarDecorator.addExtraAction( new ToolbarDecorator.ElementActionButton(IdeBundle.message("button.copy"), PlatformIcons.COPY_ICON) { @Override public void actionPerformed(@NotNull AnActionEvent e) { TableUtil.stopEditing(table); List<T> selectedItems = table.getSelectedObjects(); if (selectedItems.isEmpty()) { return; } for (T item : selectedItems) { model.addRow(itemEditor.clone(item, false)); } IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(table, true); }); TableUtil.updateScroller(table); } } ).createPanel(); } @NotNull @Override protected List<T> getItems() { return model.items; } public void selectItem(@NotNull final T item) { table.clearSelection(); final Ref<T> ref; if (helper.hasModifiedItems()) { ref = Ref.create(); helper.process((modified, original) -> { if (item == original) { ref.set(modified); } return ref.isNull(); }); } else { ref = null; } table.addSelection(ref == null || ref.isNull() ? item : ref.get()); } @NotNull public List<T> apply() { if (helper.hasModifiedItems()) { @SuppressWarnings("unchecked") final ColumnInfo<T, Object>[] columns = model.getColumnInfos(); helper.process((newItem, oldItem) -> { for (ColumnInfo<T, Object> column : columns) { if (column.isCellEditable(newItem)) { column.setValue(oldItem, column.valueOf(newItem)); } } if (itemEditor instanceof DialogItemEditor) { ((DialogItemEditor<T>)itemEditor).applyEdited(oldItem, newItem); } model.items.set(ContainerUtil.indexOfIdentity(model.items, newItem), oldItem); return true; }); } helper.reset(model.items); return model.items; } @Override public void reset(@NotNull List<T> items) { super.reset(items); model.setItems(new ArrayList<>(items)); } private class MyRemoveAction implements AnActionButtonRunnable, AnActionButtonUpdater, TableUtil.ItemChecker { @Override public void run(AnActionButton button) { if (TableUtil.doRemoveSelectedItems(table, model, this)) { IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(table, true); }); TableUtil.updateScroller(table); } } @Override public boolean isOperationApplyable(@NotNull TableModel ignored, int row) { T item = model.getItem(row); return item != null && itemEditor.isRemovable(item); } @Override public boolean isEnabled(@NotNull AnActionEvent e) { return areSelectedItemsRemovable(table.getSelectionModel()); } } }