gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.youtube.model; /** * Model definition for PlaylistItemListResponse. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the YouTube Data API v3. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class PlaylistItemListResponse extends com.google.api.client.json.GenericJson { /** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String etag; /** * Serialized EventId of the request which produced this response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String eventId; /** * A list of playlist items that match the request criteria. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<PlaylistItem> items; static { // hack to force ProGuard to consider PlaylistItem used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(PlaylistItem.class); } /** * Identifies what kind of resource this is. Value: the fixed string * "youtube#playlistItemListResponse". Etag of this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * General pagination information. * The value may be {@code null}. */ @com.google.api.client.util.Key private PageInfo pageInfo; /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String prevPageToken; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private TokenPagination tokenPagination; /** * The visitorId identifies the visitor. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visitorId; /** * @return value or {@code null} for none */ public java.lang.String getEtag() { return etag; } /** * @param etag etag or {@code null} for none */ public PlaylistItemListResponse setEtag(java.lang.String etag) { this.etag = etag; return this; } /** * Serialized EventId of the request which produced this response. * @return value or {@code null} for none */ public java.lang.String getEventId() { return eventId; } /** * Serialized EventId of the request which produced this response. * @param eventId eventId or {@code null} for none */ public PlaylistItemListResponse setEventId(java.lang.String eventId) { this.eventId = eventId; return this; } /** * A list of playlist items that match the request criteria. * @return value or {@code null} for none */ public java.util.List<PlaylistItem> getItems() { return items; } /** * A list of playlist items that match the request criteria. * @param items items or {@code null} for none */ public PlaylistItemListResponse setItems(java.util.List<PlaylistItem> items) { this.items = items; return this; } /** * Identifies what kind of resource this is. Value: the fixed string * "youtube#playlistItemListResponse". Etag of this resource. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Identifies what kind of resource this is. Value: the fixed string * "youtube#playlistItemListResponse". Etag of this resource. * @param kind kind or {@code null} for none */ public PlaylistItemListResponse setKind(java.lang.String kind) { this.kind = kind; return this; } /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * The token that can be used as the value of the pageToken parameter to retrieve the next page in * the result set. * @param nextPageToken nextPageToken or {@code null} for none */ public PlaylistItemListResponse setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * General pagination information. * @return value or {@code null} for none */ public PageInfo getPageInfo() { return pageInfo; } /** * General pagination information. * @param pageInfo pageInfo or {@code null} for none */ public PlaylistItemListResponse setPageInfo(PageInfo pageInfo) { this.pageInfo = pageInfo; return this; } /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * @return value or {@code null} for none */ public java.lang.String getPrevPageToken() { return prevPageToken; } /** * The token that can be used as the value of the pageToken parameter to retrieve the previous * page in the result set. * @param prevPageToken prevPageToken or {@code null} for none */ public PlaylistItemListResponse setPrevPageToken(java.lang.String prevPageToken) { this.prevPageToken = prevPageToken; return this; } /** * @return value or {@code null} for none */ public TokenPagination getTokenPagination() { return tokenPagination; } /** * @param tokenPagination tokenPagination or {@code null} for none */ public PlaylistItemListResponse setTokenPagination(TokenPagination tokenPagination) { this.tokenPagination = tokenPagination; return this; } /** * The visitorId identifies the visitor. * @return value or {@code null} for none */ public java.lang.String getVisitorId() { return visitorId; } /** * The visitorId identifies the visitor. * @param visitorId visitorId or {@code null} for none */ public PlaylistItemListResponse setVisitorId(java.lang.String visitorId) { this.visitorId = visitorId; return this; } @Override public PlaylistItemListResponse set(String fieldName, Object value) { return (PlaylistItemListResponse) super.set(fieldName, value); } @Override public PlaylistItemListResponse clone() { return (PlaylistItemListResponse) super.clone(); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ package com.sleepycat.persist.model; import java.io.Serializable; import java.util.Collection; import java.util.List; import java.util.Map; /** * The metadata for a persistent class. A persistent class may be specified * with the {@link Entity} or {@link Persistent} annotation. * * <p>{@code ClassMetadata} objects are thread-safe. Multiple threads may * safely call the methods of a shared {@code ClassMetadata} object.</p> * * <p>This and other metadata classes are classes rather than interfaces to * allow adding properties to the model at a future date without causing * incompatibilities. Any such property will be given a default value and * its use will be optional.</p> * * @author Mark Hayes */ public class ClassMetadata implements Serializable { private static final long serialVersionUID = -2520207423701776679L; private String className; private int version; private String proxiedClassName; private boolean entityClass; private PrimaryKeyMetadata primaryKey; private Map<String,SecondaryKeyMetadata> secondaryKeys; private List<FieldMetadata> compositeKeyFields; private Collection<FieldMetadata> persistentFields; /** * Used by an {@code EntityModel} to construct persistent class metadata. * The optional {@link #getPersistentFields} property will be set to null. */ public ClassMetadata(String className, int version, String proxiedClassName, boolean entityClass, PrimaryKeyMetadata primaryKey, Map<String,SecondaryKeyMetadata> secondaryKeys, List<FieldMetadata> compositeKeyFields) { this(className, version, proxiedClassName, entityClass, primaryKey, secondaryKeys, compositeKeyFields, null /*persistentFields*/); } /** * Used by an {@code EntityModel} to construct persistent class metadata. */ public ClassMetadata(String className, int version, String proxiedClassName, boolean entityClass, PrimaryKeyMetadata primaryKey, Map<String,SecondaryKeyMetadata> secondaryKeys, List<FieldMetadata> compositeKeyFields, Collection<FieldMetadata> persistentFields) { this.className = className; this.version = version; this.proxiedClassName = proxiedClassName; this.entityClass = entityClass; this.primaryKey = primaryKey; this.secondaryKeys = secondaryKeys; this.compositeKeyFields = compositeKeyFields; this.persistentFields = persistentFields; } /** * Returns the name of the persistent class. */ public String getClassName() { return className; } /** * Returns the version of this persistent class. This may be specified * using the {@link Entity#version} or {@link Persistent#version} * annotation. */ public int getVersion() { return version; } /** * Returns the class name of the proxied class if this class is a {@link * PersistentProxy}, or null otherwise. */ public String getProxiedClassName() { return proxiedClassName; } /** * Returns whether this class is an entity class. */ public boolean isEntityClass() { return entityClass; } /** * Returns the primary key metadata for a key declared in this class, or * null if none is declared. This may be specified using the {@link * PrimaryKey} annotation. */ public PrimaryKeyMetadata getPrimaryKey() { return primaryKey; } /** * Returns an unmodifiable map of key name (which may be different from * field name) to secondary key metadata for all secondary keys declared in * this class, or null if no secondary keys are declared in this class. * This metadata may be specified using {@link SecondaryKey} annotations. */ public Map<String,SecondaryKeyMetadata> getSecondaryKeys() { return secondaryKeys; } /** * Returns an unmodifiable list of metadata for the fields making up a * composite key, or null if this is a not a composite key class. The * order of the fields in the returned list determines their stored order * and may be specified using the {@link KeyField} annotation. When the * composite key class does not implement {@link Comparable}, the order of * the fields is the relative sort order. */ public List<FieldMetadata> getCompositeKeyFields() { return compositeKeyFields; } /** * Returns an unmodifiable list of metadata for the persistent fields in * this class, or null if the default rules for persistent fields should be * used. All fields returned must be declared in this class and must be * non-static. * * <p>By default (if null is returned) the persistent fields of a class * will be all declared instance fields that are non-transient (are not * declared with the <code>transient</code> keyword). The default rules * may be overridden by an {@link EntityModel}. For example, the {@link * AnnotationModel} overrides the default rules when the {@link * NotPersistent} or {@link NotTransient} annotation is specified.</p> */ public Collection<FieldMetadata> getPersistentFields() { return persistentFields; } @Override public boolean equals(Object other) { if (other instanceof ClassMetadata) { ClassMetadata o = (ClassMetadata) other; return version == o.version && entityClass == o.entityClass && nullOrEqual(className, o.className) && nullOrEqual(proxiedClassName, o.proxiedClassName) && nullOrEqual(primaryKey, o.primaryKey) && nullOrEqual(secondaryKeys, o.secondaryKeys) && nullOrEqual(compositeKeyFields, o.compositeKeyFields); } else { return false; } } @Override public int hashCode() { return version + (entityClass ? 1 : 0) + hashCode(className) + hashCode(proxiedClassName) + hashCode(primaryKey) + hashCode(secondaryKeys) + hashCode(compositeKeyFields); } static boolean nullOrEqual(Object o1, Object o2) { if (o1 == null) { return o2 == null; } else { return o1.equals(o2); } } static int hashCode(Object o) { if (o != null) { return o.hashCode(); } else { return 0; } } }
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.test.integration; import com.google.common.collect.ImmutableMap; import io.pravega.client.ClientConfig; import io.pravega.client.EventStreamClientFactory; import io.pravega.client.admin.ReaderGroupManager; import io.pravega.client.control.impl.Controller; import io.pravega.client.segment.impl.Segment; import io.pravega.client.stream.Checkpoint; import io.pravega.client.stream.EventRead; import io.pravega.client.stream.EventStreamReader; import io.pravega.client.stream.EventStreamWriter; import io.pravega.client.stream.EventWriterConfig; import io.pravega.client.stream.ReaderConfig; import io.pravega.client.stream.ReaderGroup; import io.pravega.client.stream.ReaderGroupConfig; import io.pravega.client.stream.ScalingPolicy; import io.pravega.client.stream.Stream; import io.pravega.client.stream.StreamConfiguration; import io.pravega.client.stream.StreamCut; import io.pravega.client.stream.impl.JavaSerializer; import io.pravega.client.stream.impl.StreamCutImpl; import io.pravega.test.common.ThreadPooledTestSuite; import java.util.Arrays; import java.util.Map; import java.util.concurrent.CompletableFuture; import lombok.Cleanup; import org.junit.ClassRule; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class UnreadBytesTest extends ThreadPooledTestSuite { @ClassRule public static final PravegaResource PRAVEGA = new PravegaResource(); @Override protected int getThreadPoolSize() { return 1; } @Test(timeout = 50000) public void testUnreadBytes() throws Exception { StreamConfiguration config = StreamConfiguration.builder() .scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)) .build(); String streamName = "testUnreadBytes"; Controller controller = PRAVEGA.getLocalController(); controller.createScope("unreadbytes").get(); controller.createStream("unreadbytes", streamName, config).get(); @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build()); String group = "testUnreadBytes-group"; @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName).build()); @Cleanup ReaderGroup readerGroup = groupManager.getReaderGroup(group); @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build()); long unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0); writer.writeEvent("0", "data of size 30").get(); writer.writeEvent("0", "data of size 30").get(); EventRead<String> firstEvent = reader.readNextEvent(15000); EventRead<String> secondEvent = reader.readNextEvent(15000); assertNotNull(firstEvent); assertEquals("data of size 30", firstEvent.getEvent()); assertNotNull(secondEvent); assertEquals("data of size 30", secondEvent.getEvent()); // trigger a checkpoint. CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService()); EventRead<String> chkpointEvent = reader.readNextEvent(15000); assertEquals("test", chkpointEvent.getCheckpointName()); EventRead<String> emptyEvent = reader.readNextEvent(100); assertEquals(false, emptyEvent.isCheckpoint()); assertEquals(null, emptyEvent.getEvent()); chkPointResult.join(); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0); writer.writeEvent("0", "data of size 30").get(); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30); } @Test(timeout = 50000) public void testUnreadBytesWithEndStreamCuts() throws Exception { StreamConfiguration config = StreamConfiguration.builder() .scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)) .build(); String streamName = "testUnreadBytesWithEndStreamCuts"; Controller controller = PRAVEGA.getLocalController(); controller.createScope("unreadbytes").get(); controller.createStream("unreadbytes", streamName, config).get(); @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build()); //Write just 2 events to simplify simulating a checkpoint. writer.writeEvent("0", "data of size 30").get(); writer.writeEvent("0", "data of size 30").get(); String group = "testUnreadBytesWithEndStreamCuts-group"; @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); //create a bounded reader group. groupManager.createReaderGroup(group, ReaderGroupConfig .builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName, StreamCut.UNBOUNDED, getStreamCut(streamName, 90L, 0)).build()); ReaderGroup readerGroup = groupManager.getReaderGroup(group); @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build()); EventRead<String> firstEvent = reader.readNextEvent(15000); EventRead<String> secondEvent = reader.readNextEvent(15000); assertNotNull(firstEvent); assertEquals("data of size 30", firstEvent.getEvent()); assertNotNull(secondEvent); assertEquals("data of size 30", secondEvent.getEvent()); // trigger a checkpoint. CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService()); EventRead<String> chkpointEvent = reader.readNextEvent(15000); assertEquals("test", chkpointEvent.getCheckpointName()); EventRead<String> emptyEvent = reader.readNextEvent(100); assertEquals(false, emptyEvent.isCheckpoint()); assertEquals(null, emptyEvent.getEvent()); chkPointResult.join(); //Writer events, to ensure 120Bytes are written. writer.writeEvent("0", "data of size 30").get(); writer.writeEvent("0", "data of size 30").get(); long unreadBytes = readerGroup.getMetrics().unreadBytes(); //Ensure the endoffset of 90 Bytes is taken into consideration when computing unread assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 30); } @Test public void testUnreadBytesWithCheckpointsAndStreamCuts() throws Exception { StreamConfiguration config = StreamConfiguration.builder() .scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)) .build(); String streamName = "testUnreadBytesWithCheckpointsAndStreamCuts"; Controller controller = PRAVEGA.getLocalController(); controller.createScope("unreadbytes").get(); controller.createStream("unreadbytes", streamName, config).get(); @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build()); String group = "testUnreadBytesWithCheckpointsAndStreamCuts-group"; @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build()); groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName).build()); @Cleanup ReaderGroup readerGroup = groupManager.getReaderGroup(group); @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build()); long unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0); writer.writeEvent("0", "data of size 30").get(); writer.writeEvent("0", "data of size 30").get(); EventRead<String> firstEvent = reader.readNextEvent(15000); EventRead<String> secondEvent = reader.readNextEvent(15000); assertNotNull(firstEvent); assertEquals("data of size 30", firstEvent.getEvent()); assertNotNull(secondEvent); assertEquals("data of size 30", secondEvent.getEvent()); // trigger a checkpoint. CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService()); EventRead<String> chkpointEvent = reader.readNextEvent(15000); assertEquals("test", chkpointEvent.getCheckpointName()); EventRead<String> emptyEvent = reader.readNextEvent(100); assertEquals(false, emptyEvent.isCheckpoint()); assertEquals(null, emptyEvent.getEvent()); chkPointResult.join(); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0); // starting from checkpoint "test", data of size 30 is read writer.writeEvent("0", "data of size 30").get(); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30); // trigger a stream-cut CompletableFuture<Map<Stream, StreamCut>> scResult = readerGroup.generateStreamCuts(executorService()); EventRead<String> scEvent = reader.readNextEvent(15000); reader.readNextEvent(100); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 30); // starting from checkpoint "test", data of size 60 is written => stream-cut does not change last checkpointed position writer.writeEvent("0", "data of size 30").get(); unreadBytes = readerGroup.getMetrics().unreadBytes(); assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 60); } /* * Test method to create StreamCuts. In the real world StreamCuts are obtained via the Pravega client apis. */ private StreamCut getStreamCut(String streamName, long offset, int... segmentNumbers) { ImmutableMap.Builder<Segment, Long> builder = ImmutableMap.<Segment, Long>builder(); Arrays.stream(segmentNumbers).forEach(seg -> { builder.put(new Segment("unreadbytes", streamName, seg), offset); }); return new StreamCutImpl(Stream.of("unreadbytes", streamName), builder.build()); } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import org.drools.core.beliefsystem.ModedAssertion; import org.drools.core.beliefsystem.simple.SimpleMode; import org.kie.api.runtime.rule.FactHandle; import org.drools.core.common.ActivationGroupNode; import org.drools.core.common.ActivationNode; import org.drools.core.common.AgendaItem; import org.drools.core.common.InternalAgenda; import org.drools.core.common.InternalAgendaGroup; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.LogicalDependency; import org.drools.core.common.QueryElementFactHandle; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.phreak.RuleAgendaItem; import org.drools.core.rule.Declaration; import org.drools.core.rule.GroupElement; import org.drools.core.spi.Consequence; import org.drools.core.spi.PropagationContext; import org.drools.core.util.LinkedList; import org.drools.core.util.LinkedListEntry; import org.kie.internal.event.rule.ActivationUnMatchListener; import org.kie.internal.runtime.beliefs.Mode; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class RuleTerminalNodeLeftTuple<T extends ModedAssertion<T>> extends BaseLeftTuple implements AgendaItem<T> { private static final long serialVersionUID = 540l; /** * The salience */ private int salience; /** * The activation number */ private long activationNumber; private volatile int queueIndex; private volatile boolean queued; private LinkedList<LogicalDependency<T>> justified; private LinkedList<LogicalDependency<SimpleMode>> blocked; private LinkedList<SimpleMode> blockers; private InternalAgendaGroup agendaGroup; private ActivationGroupNode activationGroupNode; private ActivationNode activationNode; private InternalFactHandle factHandle; private transient boolean canceled; private boolean matched; private boolean active; private ActivationUnMatchListener activationUnMatchListener; private RuleAgendaItem ruleAgendaItem; public RuleTerminalNodeLeftTuple() { // constructor needed for serialisation } // ------------------------------------------------------------ // Constructors // ------------------------------------------------------------ public RuleTerminalNodeLeftTuple(final InternalFactHandle factHandle, final LeftTupleSink sink, final boolean leftTupleMemoryEnabled) { super(factHandle, sink, leftTupleMemoryEnabled); } public RuleTerminalNodeLeftTuple(final InternalFactHandle factHandle, final LeftTuple leftTuple, final LeftTupleSink sink) { super(factHandle, leftTuple, sink); } public RuleTerminalNodeLeftTuple(final LeftTuple leftTuple, final LeftTupleSink sink, final PropagationContext pctx, final boolean leftTupleMemoryEnabled) { super(leftTuple, sink, pctx, leftTupleMemoryEnabled); } public RuleTerminalNodeLeftTuple(final LeftTuple leftTuple, RightTuple rightTuple, LeftTupleSink sink) { super(leftTuple, rightTuple, sink); } public RuleTerminalNodeLeftTuple(final LeftTuple leftTuple, final RightTuple rightTuple, final LeftTupleSink sink, final boolean leftTupleMemoryEnabled) { this(leftTuple, rightTuple, null, null, sink, leftTupleMemoryEnabled); } public RuleTerminalNodeLeftTuple(final LeftTuple leftTuple, final RightTuple rightTuple, final LeftTuple currentLeftChild, final LeftTuple currentRightChild, final LeftTupleSink sink, final boolean leftTupleMemoryEnabled) { super(leftTuple, rightTuple, currentLeftChild, currentRightChild, sink, leftTupleMemoryEnabled); } public void init(final long activationNumber, final int salience, final PropagationContext pctx, final RuleAgendaItem ruleAgendaItem, InternalAgendaGroup agendaGroup) { setPropagationContext(pctx); this.salience = salience; this.activationNumber = activationNumber; this.queueIndex = -1; this.matched = true; this.ruleAgendaItem = ruleAgendaItem; this.agendaGroup = agendaGroup; } public void update(final int salience, final PropagationContext pctx) { setPropagationContext(pctx); this.salience = salience; this.matched = true; } /** * Retrieve the rule. * * @return The rule. */ public RuleImpl getRule() { return getTerminalNode().getRule(); } public Consequence getConsequence() { String consequenceName = ((RuleTerminalNode) getTerminalNode()).getConsequenceName(); return consequenceName.equals(RuleImpl.DEFAULT_CONSEQUENCE_NAME) ? getTerminalNode().getRule().getConsequence() : getTerminalNode().getRule().getNamedConsequence(consequenceName); } /** * Retrieve the tuple. * * @return The tuple. */ public LeftTuple getTuple() { return this; } public int getSalience() { return this.salience; } public void setSalience(int salience) { this.salience = salience; } public InternalFactHandle getFactHandle() { return factHandle; } public void setFactHandle(InternalFactHandle factHandle) { this.factHandle = factHandle; } public RuleAgendaItem getRuleAgendaItem() { return ruleAgendaItem; } /* * (non-Javadoc) * * @see org.kie.spi.Activation#getActivationNumber() */ public long getActivationNumber() { return this.activationNumber; } public void addBlocked(final LogicalDependency<SimpleMode> dep) { // Adds the blocked to the blockers list if (this.blocked == null) { this.blocked = new LinkedList<LogicalDependency<SimpleMode>>(); } this.blocked.add(dep); // now ad the blocker to the blocked's list - we need to check that references are null first RuleTerminalNodeLeftTuple blocked = (RuleTerminalNodeLeftTuple) dep.getJustified(); if (blocked.blockers == null) { blocked.blockers = new LinkedList<SimpleMode>(); blocked.blockers.add(dep.getMode()); } else if (dep.getMode().getNext() == null && dep.getMode().getPrevious() == null && blocked.getBlockers().getFirst() != dep.getMode()) { blocked.blockers.add(dep.getMode()); } } public void removeAllBlockersAndBlocked(InternalAgenda agenda) { if (this.blockers != null) { // Iterate and remove this node's logical dependency list from each of it's blockers for (SimpleMode node = blockers.getFirst(); node != null; node = node.getNext()) { LogicalDependency dep = node.getObject(); dep.getJustifier().getBlocked().remove(dep); } } this.blockers = null; if (this.blocked != null) { // Iterate and remove this node's logical dependency list from each of it's blocked for (LogicalDependency<SimpleMode> dep = blocked.getFirst(); dep != null; ) { LogicalDependency<SimpleMode> tmp = dep.getNext(); removeBlocked(dep); RuleTerminalNodeLeftTuple justified = (RuleTerminalNodeLeftTuple) dep.getJustified(); if (justified.getBlockers().isEmpty() && justified.isActive()) { agenda.stageLeftTuple(ruleAgendaItem, justified); } dep = tmp; } } this.blocked = null; } public void removeBlocked(final LogicalDependency<SimpleMode> dep) { this.blocked.remove(dep); RuleTerminalNodeLeftTuple blocked = (RuleTerminalNodeLeftTuple) dep.getJustified(); blocked.blockers.remove(dep.getMode()); } public LinkedList<LogicalDependency<SimpleMode>> getBlocked() { return this.blocked; } public void setBlocked(LinkedList<LogicalDependency<SimpleMode>> justified) { this.blocked = justified; } public LinkedList<SimpleMode> getBlockers() { return this.blockers; } public void addLogicalDependency(final LogicalDependency<T> node) { if (this.justified == null) { this.justified = new LinkedList<LogicalDependency<T>>(); } this.justified.add(node); } public LinkedList<LogicalDependency<T>> getLogicalDependencies() { return this.justified; } public void setLogicalDependencies(LinkedList<LogicalDependency<T>> justified) { this.justified = justified; } public boolean isQueued() { return this.queued; } public void setQueued(final boolean queued) { this.queued = queued; if (queued) { setActive(true); } } public void setQueueIndex(final int queueIndex) { this.queueIndex = queueIndex; } public void dequeue() { if (this.agendaGroup != null) { this.agendaGroup.remove(this); } setQueued(false); } public int getQueueIndex() { return this.queueIndex; } public void remove() { dequeue(); } public ActivationGroupNode getActivationGroupNode() { return this.activationGroupNode; } public void setActivationGroupNode(final ActivationGroupNode activationNode) { this.activationGroupNode = activationNode; } public InternalAgendaGroup getAgendaGroup() { return this.agendaGroup; } public ActivationNode getActivationNode() { return this.activationNode; } public void setActivationNode(final ActivationNode activationNode) { this.activationNode = activationNode; } public GroupElement getSubRule() { return getTerminalNode().getSubRule(); } public TerminalNode getTerminalNode() { return (TerminalNode) getLeftTupleSink(); } public ActivationUnMatchListener getActivationUnMatchListener() { return activationUnMatchListener; } public void setActivationUnMatchListener(ActivationUnMatchListener activationUnMatchListener) { this.activationUnMatchListener = activationUnMatchListener; } public List<FactHandle> getFactHandles() { FactHandle[] factHandles = toFactHandles(); List<FactHandle> list = new ArrayList<FactHandle>(factHandles.length); for (FactHandle factHandle : factHandles) { Object o = ((InternalFactHandle) factHandle).getObject(); if (!(o instanceof QueryElementFactHandle)) { list.add(factHandle); } } return Collections.unmodifiableList(list); } public String toExternalForm() { return "[ " + this.getRule().getName() + " active=" + this.queued + " ]"; } public List<Object> getObjects() { FactHandle[] factHandles = toFactHandles(); List<Object> list = new ArrayList<Object>(factHandles.length); for (FactHandle factHandle : factHandles) { Object o = ((InternalFactHandle) factHandle).getObject(); if (!(o instanceof QueryElementFactHandle)) { list.add(o); } } return Collections.unmodifiableList(list); } public Object getDeclarationValue(String variableName) { Declaration decl = getTerminalNode().getSubRule().getOuterDeclarations().get(variableName); InternalFactHandle handle = get(decl); // need to double check, but the working memory reference is only used for resolving globals, right? return decl.getValue(null, handle.getObject()); } public List<String> getDeclarationIds() { Declaration[] declArray = ((org.drools.core.reteoo.RuleTerminalNode) getLeftTupleSink()).getDeclarations(); List<String> declarations = new ArrayList<String>(); for (Declaration decl : declArray) { declarations.add(decl.getIdentifier()); } return Collections.unmodifiableList(declarations); } public boolean isCanceled() { return canceled; } public void cancel() { this.canceled = true; } public boolean isMatched() { return matched; } public void setMatched(boolean matched) { this.matched = matched; } public boolean isActive() { return active; } public void setActive(boolean active) { this.active = active; } public boolean isRuleAgendaItem() { return false; } @Override public String toString() { return "["+toExternalForm()+" [ " + super.toString()+ " ] ]"; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.remote; import com.intellij.execution.CommandLineUtil; import com.intellij.execution.TaskExecutor; import com.intellij.execution.process.*; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.Consumer; import com.intellij.util.io.BaseOutputReader; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.Charset; import java.util.concurrent.Future; /** * @author traff */ public class BaseRemoteProcessHandler<T extends RemoteProcess> extends AbstractRemoteProcessHandler<T> implements TaskExecutor { private static final Logger LOG = Logger.getInstance(BaseRemoteProcessHandler.class); @NotNull protected final String myCommandLine; protected final ProcessWaitFor myWaitFor; @Nullable protected final Charset myCharset; protected T myProcess; public BaseRemoteProcessHandler(@NotNull T process, @NotNull String commandLine, @Nullable Charset charset) { myProcess = process; myCommandLine = commandLine; myWaitFor = new ProcessWaitFor(process, this, CommandLineUtil.extractPresentableName(commandLine)); myCharset = charset; } @Override public T getProcess() { return myProcess; } @Override protected void destroyProcessImpl() { if (!myProcess.killProcessTree()) { baseDestroyProcessImpl(); } } @Override public void startNotify() { notifyTextAvailable(myCommandLine + '\n', ProcessOutputTypes.SYSTEM); addProcessListener(new ProcessAdapter() { @Override public void startNotified(final ProcessEvent event) { try { final RemoteOutputReader stdoutReader = new RemoteOutputReader(myProcess.getInputStream(), getCharset(), myProcess, myCommandLine) { @Override protected void onTextAvailable(@NotNull String text) { notifyTextAvailable(text, ProcessOutputTypes.STDOUT); } @NotNull @Override protected Future<?> executeOnPooledThread(@NotNull Runnable runnable) { return BaseRemoteProcessHandler.executeOnPooledThread(runnable); } }; final RemoteOutputReader stderrReader = new RemoteOutputReader(myProcess.getErrorStream(), getCharset(), myProcess, myCommandLine) { @Override protected void onTextAvailable(@NotNull String text) { notifyTextAvailable(text, ProcessOutputTypes.STDERR); } @NotNull @Override protected Future<?> executeOnPooledThread(@NotNull Runnable runnable) { return BaseRemoteProcessHandler.executeOnPooledThread(runnable); } }; myWaitFor.setTerminationCallback(new Consumer<Integer>() { @Override public void consume(Integer exitCode) { try { try { stderrReader.waitFor(); stdoutReader.waitFor(); } catch (InterruptedException ignore) { } } finally { onOSProcessTerminated(exitCode); } } }); } finally { removeProcessListener(this); } } }); super.startNotify(); } protected void onOSProcessTerminated(final int exitCode) { notifyProcessTerminated(exitCode); } protected void baseDestroyProcessImpl() { try { closeStreams(); } finally { doDestroyProcess(); } } protected void doDestroyProcess() { getProcess().destroy(); } @Override protected void detachProcessImpl() { final Runnable runnable = new Runnable() { @Override public void run() { closeStreams(); myWaitFor.detach(); notifyProcessDetached(); } }; executeOnPooledThread(runnable); } protected void closeStreams() { try { myProcess.getOutputStream().close(); } catch (IOException e) { LOG.error(e); } } @Override public boolean detachIsDefault() { return false; } @Override public OutputStream getProcessInput() { return myProcess.getOutputStream(); } @Nullable public Charset getCharset() { return myCharset; } protected static Future<?> executeOnPooledThread(Runnable task) { final Application application = ApplicationManager.getApplication(); if (application != null) { return application.executeOnPooledThread(task); } return BaseOSProcessHandler.submit(task); } @NotNull @Override public Future<?> executeTask(@NotNull Runnable task) { return executeOnPooledThread(task); } private abstract static class RemoteOutputReader extends BaseOutputReader { @NotNull private final RemoteProcess myRemoteProcess; private boolean myClosed; RemoteOutputReader(@NotNull InputStream inputStream, Charset charset, @NotNull RemoteProcess remoteProcess, @NotNull String commandLine) { super(inputStream, charset); myRemoteProcess = remoteProcess; start(CommandLineUtil.extractPresentableName(commandLine)); } @Override protected void doRun() { try { setClosed(false); while (true) { final boolean read = readAvailable(); if (myRemoteProcess.isDisconnected()) { myReader.close(); break; } if (isStopped) { break; } Thread.sleep(mySleepingPolicy.getTimeToSleep(read)); // give other threads a chance } } catch (InterruptedException ignore) { } catch (IOException e) { LOG.warn(e); } catch (Exception e) { LOG.warn(e); } finally { setClosed(true); } } protected synchronized void setClosed(boolean closed) { myClosed = closed; } @Override public void waitFor() throws InterruptedException { while (!isClosed()) { Thread.sleep(100); } } private synchronized boolean isClosed() { return myClosed; } } @Nullable public String getCommandLine() { return myCommandLine; } }
package org.apache.lucene.index; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Arrays; import org.apache.lucene.analysis.tokenattributes.TermAttribute; import org.apache.lucene.document.Fieldable; import org.apache.lucene.util.UnicodeUtil; final class TermsHashPerField extends InvertedDocConsumerPerField { final TermsHashConsumerPerField consumer; final TermsHashPerField nextPerField; final TermsHashPerThread perThread; final DocumentsWriter.DocState docState; final FieldInvertState fieldState; TermAttribute termAtt; // Copied from our perThread final CharBlockPool charPool; final IntBlockPool intPool; final ByteBlockPool bytePool; final int streamCount; final int numPostingInt; final FieldInfo fieldInfo; boolean postingsCompacted; int numPostings; private int postingsHashSize = 4; private int postingsHashHalfSize = postingsHashSize/2; private int postingsHashMask = postingsHashSize-1; private RawPostingList[] postingsHash = new RawPostingList[postingsHashSize]; private RawPostingList p; public TermsHashPerField(DocInverterPerField docInverterPerField, final TermsHashPerThread perThread, final TermsHashPerThread nextPerThread, final FieldInfo fieldInfo) { this.perThread = perThread; intPool = perThread.intPool; charPool = perThread.charPool; bytePool = perThread.bytePool; docState = perThread.docState; fieldState = docInverterPerField.fieldState; this.consumer = perThread.consumer.addField(this, fieldInfo); streamCount = consumer.getStreamCount(); numPostingInt = 2*streamCount; this.fieldInfo = fieldInfo; if (nextPerThread != null) nextPerField = (TermsHashPerField) nextPerThread.addField(docInverterPerField, fieldInfo); else nextPerField = null; } void shrinkHash(int targetSize) { assert postingsCompacted || numPostings == 0; final int newSize = 4; if (newSize != postingsHash.length) { postingsHash = new RawPostingList[newSize]; postingsHashSize = newSize; postingsHashHalfSize = newSize/2; postingsHashMask = newSize-1; } Arrays.fill(postingsHash, null); } public void reset() { if (!postingsCompacted) compactPostings(); assert numPostings <= postingsHash.length; if (numPostings > 0) { perThread.termsHash.recyclePostings(postingsHash, numPostings); Arrays.fill(postingsHash, 0, numPostings, null); numPostings = 0; } postingsCompacted = false; if (nextPerField != null) nextPerField.reset(); } @Override synchronized public void abort() { reset(); if (nextPerField != null) nextPerField.abort(); } public void initReader(ByteSliceReader reader, RawPostingList p, int stream) { assert stream < streamCount; final int[] ints = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT]; final int upto = p.intStart & DocumentsWriter.INT_BLOCK_MASK; reader.init(bytePool, p.byteStart+stream*ByteBlockPool.FIRST_LEVEL_SIZE, ints[upto+stream]); } private synchronized void compactPostings() { int upto = 0; for(int i=0;i<postingsHashSize;i++) { if (postingsHash[i] != null) { if (upto < i) { postingsHash[upto] = postingsHash[i]; postingsHash[i] = null; } upto++; } } assert upto == numPostings; postingsCompacted = true; } /** Collapse the hash table & sort in-place. */ public RawPostingList[] sortPostings() { compactPostings(); quickSort(postingsHash, 0, numPostings-1); return postingsHash; } void quickSort(RawPostingList[] postings, int lo, int hi) { if (lo >= hi) return; else if (hi == 1+lo) { if (comparePostings(postings[lo], postings[hi]) > 0) { final RawPostingList tmp = postings[lo]; postings[lo] = postings[hi]; postings[hi] = tmp; } return; } int mid = (lo + hi) >>> 1; if (comparePostings(postings[lo], postings[mid]) > 0) { RawPostingList tmp = postings[lo]; postings[lo] = postings[mid]; postings[mid] = tmp; } if (comparePostings(postings[mid], postings[hi]) > 0) { RawPostingList tmp = postings[mid]; postings[mid] = postings[hi]; postings[hi] = tmp; if (comparePostings(postings[lo], postings[mid]) > 0) { RawPostingList tmp2 = postings[lo]; postings[lo] = postings[mid]; postings[mid] = tmp2; } } int left = lo + 1; int right = hi - 1; if (left >= right) return; RawPostingList partition = postings[mid]; for (; ;) { while (comparePostings(postings[right], partition) > 0) --right; while (left < right && comparePostings(postings[left], partition) <= 0) ++left; if (left < right) { RawPostingList tmp = postings[left]; postings[left] = postings[right]; postings[right] = tmp; --right; } else { break; } } quickSort(postings, lo, left); quickSort(postings, left + 1, hi); } /** Compares term text for two Posting instance and * returns -1 if p1 < p2; 1 if p1 > p2; else 0. */ int comparePostings(RawPostingList p1, RawPostingList p2) { if (p1 == p2) return 0; final char[] text1 = charPool.buffers[p1.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT]; int pos1 = p1.textStart & DocumentsWriter.CHAR_BLOCK_MASK; final char[] text2 = charPool.buffers[p2.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT]; int pos2 = p2.textStart & DocumentsWriter.CHAR_BLOCK_MASK; assert text1 != text2 || pos1 != pos2; while(true) { final char c1 = text1[pos1++]; final char c2 = text2[pos2++]; if (c1 != c2) { if (0xffff == c2) return 1; else if (0xffff == c1) return -1; else return c1-c2; } else // This method should never compare equal postings // unless p1==p2 assert c1 != 0xffff; } } /** Test whether the text for current RawPostingList p equals * current tokenText. */ private boolean postingEquals(final char[] tokenText, final int tokenTextLen) { final char[] text = perThread.charPool.buffers[p.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT]; assert text != null; int pos = p.textStart & DocumentsWriter.CHAR_BLOCK_MASK; int tokenPos = 0; for(;tokenPos<tokenTextLen;pos++,tokenPos++) if (tokenText[tokenPos] != text[pos]) return false; return 0xffff == text[pos]; } private boolean doCall; private boolean doNextCall; @Override void start(Fieldable f) { termAtt = fieldState.attributeSource.addAttribute(TermAttribute.class); consumer.start(f); if (nextPerField != null) { nextPerField.start(f); } } @Override boolean start(Fieldable[] fields, int count) throws IOException { doCall = consumer.start(fields, count); if (nextPerField != null) doNextCall = nextPerField.start(fields, count); return doCall || doNextCall; } // Secondary entry point (for 2nd & subsequent TermsHash), // because token text has already been "interned" into // textStart, so we hash by textStart public void add(int textStart) throws IOException { int code = textStart; int hashPos = code & postingsHashMask; assert !postingsCompacted; // Locate RawPostingList in hash p = postingsHash[hashPos]; if (p != null && p.textStart != textStart) { // Conflict: keep searching different locations in // the hash table. final int inc = ((code>>8)+code)|1; do { code += inc; hashPos = code & postingsHashMask; p = postingsHash[hashPos]; } while (p != null && p.textStart != textStart); } if (p == null) { // First time we are seeing this token since we last // flushed the hash. // Refill? if (0 == perThread.freePostingsCount) perThread.morePostings(); // Pull next free RawPostingList from free list p = perThread.freePostings[--perThread.freePostingsCount]; assert p != null; p.textStart = textStart; assert postingsHash[hashPos] == null; postingsHash[hashPos] = p; numPostings++; if (numPostings == postingsHashHalfSize) rehashPostings(2*postingsHashSize); // Init stream slices if (numPostingInt + intPool.intUpto > DocumentsWriter.INT_BLOCK_SIZE) intPool.nextBuffer(); if (DocumentsWriter.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) bytePool.nextBuffer(); intUptos = intPool.buffer; intUptoStart = intPool.intUpto; intPool.intUpto += streamCount; p.intStart = intUptoStart + intPool.intOffset; for(int i=0;i<streamCount;i++) { final int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE); intUptos[intUptoStart+i] = upto + bytePool.byteOffset; } p.byteStart = intUptos[intUptoStart]; consumer.newTerm(p); } else { intUptos = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT]; intUptoStart = p.intStart & DocumentsWriter.INT_BLOCK_MASK; consumer.addTerm(p); } } // Primary entry point (for first TermsHash) @Override void add() throws IOException { assert !postingsCompacted; // We are first in the chain so we must "intern" the // term text into textStart address // Get the text of this term. final char[] tokenText = termAtt.termBuffer();; final int tokenTextLen = termAtt.termLength(); // Compute hashcode & replace any invalid UTF16 sequences int downto = tokenTextLen; int code = 0; while (downto > 0) { char ch = tokenText[--downto]; if (ch >= UnicodeUtil.UNI_SUR_LOW_START && ch <= UnicodeUtil.UNI_SUR_LOW_END) { if (0 == downto) { // Unpaired ch = tokenText[downto] = UnicodeUtil.UNI_REPLACEMENT_CHAR; } else { final char ch2 = tokenText[downto-1]; if (ch2 >= UnicodeUtil.UNI_SUR_HIGH_START && ch2 <= UnicodeUtil.UNI_SUR_HIGH_END) { // OK: high followed by low. This is a valid // surrogate pair. code = ((code*31) + ch)*31+ch2; downto--; continue; } else { // Unpaired ch = tokenText[downto] = UnicodeUtil.UNI_REPLACEMENT_CHAR; } } } else if (ch >= UnicodeUtil.UNI_SUR_HIGH_START && (ch <= UnicodeUtil.UNI_SUR_HIGH_END || ch == 0xffff)) { // Unpaired or 0xffff ch = tokenText[downto] = UnicodeUtil.UNI_REPLACEMENT_CHAR; } code = (code*31) + ch; } int hashPos = code & postingsHashMask; // Locate RawPostingList in hash p = postingsHash[hashPos]; if (p != null && !postingEquals(tokenText, tokenTextLen)) { // Conflict: keep searching different locations in // the hash table. final int inc = ((code>>8)+code)|1; do { code += inc; hashPos = code & postingsHashMask; p = postingsHash[hashPos]; } while (p != null && !postingEquals(tokenText, tokenTextLen)); } if (p == null) { // First time we are seeing this token since we last // flushed the hash. final int textLen1 = 1+tokenTextLen; if (textLen1 + charPool.charUpto > DocumentsWriter.CHAR_BLOCK_SIZE) { if (textLen1 > DocumentsWriter.CHAR_BLOCK_SIZE) { // Just skip this term, to remain as robust as // possible during indexing. A TokenFilter // can be inserted into the analyzer chain if // other behavior is wanted (pruning the term // to a prefix, throwing an exception, etc). if (docState.maxTermPrefix == null) docState.maxTermPrefix = new String(tokenText, 0, 30); consumer.skippingLongTerm(); return; } charPool.nextBuffer(); } // Refill? if (0 == perThread.freePostingsCount) perThread.morePostings(); // Pull next free RawPostingList from free list p = perThread.freePostings[--perThread.freePostingsCount]; assert p != null; final char[] text = charPool.buffer; final int textUpto = charPool.charUpto; p.textStart = textUpto + charPool.charOffset; charPool.charUpto += textLen1; System.arraycopy(tokenText, 0, text, textUpto, tokenTextLen); text[textUpto+tokenTextLen] = 0xffff; assert postingsHash[hashPos] == null; postingsHash[hashPos] = p; numPostings++; if (numPostings == postingsHashHalfSize) rehashPostings(2*postingsHashSize); // Init stream slices if (numPostingInt + intPool.intUpto > DocumentsWriter.INT_BLOCK_SIZE) intPool.nextBuffer(); if (DocumentsWriter.BYTE_BLOCK_SIZE - bytePool.byteUpto < numPostingInt*ByteBlockPool.FIRST_LEVEL_SIZE) bytePool.nextBuffer(); intUptos = intPool.buffer; intUptoStart = intPool.intUpto; intPool.intUpto += streamCount; p.intStart = intUptoStart + intPool.intOffset; for(int i=0;i<streamCount;i++) { final int upto = bytePool.newSlice(ByteBlockPool.FIRST_LEVEL_SIZE); intUptos[intUptoStart+i] = upto + bytePool.byteOffset; } p.byteStart = intUptos[intUptoStart]; consumer.newTerm(p); } else { intUptos = intPool.buffers[p.intStart >> DocumentsWriter.INT_BLOCK_SHIFT]; intUptoStart = p.intStart & DocumentsWriter.INT_BLOCK_MASK; consumer.addTerm(p); } if (doNextCall) nextPerField.add(p.textStart); } int[] intUptos; int intUptoStart; void writeByte(int stream, byte b) { int upto = intUptos[intUptoStart+stream]; byte[] bytes = bytePool.buffers[upto >> DocumentsWriter.BYTE_BLOCK_SHIFT]; assert bytes != null; int offset = upto & DocumentsWriter.BYTE_BLOCK_MASK; if (bytes[offset] != 0) { // End of slice; allocate a new one offset = bytePool.allocSlice(bytes, offset); bytes = bytePool.buffer; intUptos[intUptoStart+stream] = offset + bytePool.byteOffset; } bytes[offset] = b; (intUptos[intUptoStart+stream])++; } public void writeBytes(int stream, byte[] b, int offset, int len) { // TODO: optimize final int end = offset + len; for(int i=offset;i<end;i++) writeByte(stream, b[i]); } void writeVInt(int stream, int i) { assert stream < streamCount; while ((i & ~0x7F) != 0) { writeByte(stream, (byte)((i & 0x7f) | 0x80)); i >>>= 7; } writeByte(stream, (byte) i); } @Override void finish() throws IOException { consumer.finish(); if (nextPerField != null) nextPerField.finish(); } /** Called when postings hash is too small (> 50% * occupied) or too large (< 20% occupied). */ void rehashPostings(final int newSize) { final int newMask = newSize-1; RawPostingList[] newHash = new RawPostingList[newSize]; for(int i=0;i<postingsHashSize;i++) { RawPostingList p0 = postingsHash[i]; if (p0 != null) { int code; if (perThread.primary) { final int start = p0.textStart & DocumentsWriter.CHAR_BLOCK_MASK; final char[] text = charPool.buffers[p0.textStart >> DocumentsWriter.CHAR_BLOCK_SHIFT]; int pos = start; while(text[pos] != 0xffff) pos++; code = 0; while (pos > start) code = (code*31) + text[--pos]; } else code = p0.textStart; int hashPos = code & newMask; assert hashPos >= 0; if (newHash[hashPos] != null) { final int inc = ((code>>8)+code)|1; do { code += inc; hashPos = code & newMask; } while (newHash[hashPos] != null); } newHash[hashPos] = p0; } } postingsHashMask = newMask; postingsHash = newHash; postingsHashSize = newSize; postingsHashHalfSize = newSize >> 1; } }
/** */ package visualizacionMetricas3.visualizacion.presentation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.StringTokenizer; import org.eclipse.emf.common.CommonPlugin; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.xmi.XMLResource; import org.eclipse.emf.edit.ui.provider.ExtendedImageRegistry; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.Wizard; import org.eclipse.jface.wizard.WizardPage; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.ui.INewWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.eclipse.ui.dialogs.WizardNewFileCreationPage; import org.eclipse.ui.part.FileEditorInput; import org.eclipse.ui.part.ISetSelectionTarget; import visualizacionMetricas3.visualizacion.VisualizacionFactory; import visualizacionMetricas3.visualizacion.VisualizacionPackage; import visualizacionMetricas3.provider.VisualizacionMetricas3EditPlugin; import org.eclipse.core.runtime.Path; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchPart; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PartInitException; import visualizacionMetricas3.presentation.VisualizacionMetricas3EditorPlugin; /** * This is a simple wizard for creating a new model file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class VisualizacionModelWizard extends Wizard implements INewWizard { /** * The supported extensions for created files. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final List<String> FILE_EXTENSIONS = Collections.unmodifiableList(Arrays.asList(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionEditorFilenameExtensions").split("\\s*,\\s*"))); /** * A formatted list of supported file extensions, suitable for display. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final String FORMATTED_FILE_EXTENSIONS = VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionEditorFilenameExtensions").replaceAll("\\s*,\\s*", ", "); /** * This caches an instance of the model package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected VisualizacionPackage visualizacionPackage = VisualizacionPackage.eINSTANCE; /** * This caches an instance of the model factory. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected VisualizacionFactory visualizacionFactory = visualizacionPackage.getVisualizacionFactory(); /** * This is the file creation page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected VisualizacionModelWizardNewFileCreationPage newFileCreationPage; /** * This is the initial object creation page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected VisualizacionModelWizardInitialObjectCreationPage initialObjectCreationPage; /** * Remember the selection during initialization for populating the default container. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IStructuredSelection selection; /** * Remember the workbench during initialization. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IWorkbench workbench; /** * Caches the names of the types that can be created as the root object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected List<String> initialObjectNames; /** * This just records the information. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void init(IWorkbench workbench, IStructuredSelection selection) { this.workbench = workbench; this.selection = selection; setWindowTitle(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_Wizard_label")); setDefaultPageImageDescriptor(ExtendedImageRegistry.INSTANCE.getImageDescriptor(VisualizacionMetricas3EditorPlugin.INSTANCE.getImage("full/wizban/NewVisualizacion"))); } /** * Returns the names of the types that can be created as the root object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<String> getInitialObjectNames() { if (initialObjectNames == null) { initialObjectNames = new ArrayList<String>(); for (EClassifier eClassifier : visualizacionPackage.getEClassifiers()) { if (eClassifier instanceof EClass) { EClass eClass = (EClass)eClassifier; if (!eClass.isAbstract()) { initialObjectNames.add(eClass.getName()); } } } Collections.sort(initialObjectNames, CommonPlugin.INSTANCE.getComparator()); } return initialObjectNames; } /** * Create a new model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EObject createInitialModel() { EClass eClass = (EClass)visualizacionPackage.getEClassifier(initialObjectCreationPage.getInitialObjectName()); EObject rootObject = visualizacionFactory.create(eClass); return rootObject; } /** * Do the work after everything is specified. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean performFinish() { try { // Remember the file. // final IFile modelFile = getModelFile(); // Do the work within an operation. // WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor progressMonitor) { try { // Create a resource set // ResourceSet resourceSet = new ResourceSetImpl(); // Get the URI of the model file. // URI fileURI = URI.createPlatformResourceURI(modelFile.getFullPath().toString(), true); // Create a resource for this file. // Resource resource = resourceSet.createResource(fileURI); // Add the initial model object to the contents. // EObject rootObject = createInitialModel(); if (rootObject != null) { resource.getContents().add(rootObject); } // Save the contents of the resource to the file system. // Map<Object, Object> options = new HashMap<Object, Object>(); options.put(XMLResource.OPTION_ENCODING, initialObjectCreationPage.getEncoding()); resource.save(options); } catch (Exception exception) { VisualizacionMetricas3EditorPlugin.INSTANCE.log(exception); } finally { progressMonitor.done(); } } }; getContainer().run(false, false, operation); // Select the new file resource in the current view. // IWorkbenchWindow workbenchWindow = workbench.getActiveWorkbenchWindow(); IWorkbenchPage page = workbenchWindow.getActivePage(); final IWorkbenchPart activePart = page.getActivePart(); if (activePart instanceof ISetSelectionTarget) { final ISelection targetSelection = new StructuredSelection(modelFile); getShell().getDisplay().asyncExec (new Runnable() { public void run() { ((ISetSelectionTarget)activePart).selectReveal(targetSelection); } }); } // Open an editor on the new file. // try { page.openEditor (new FileEditorInput(modelFile), workbench.getEditorRegistry().getDefaultEditor(modelFile.getFullPath().toString()).getId()); } catch (PartInitException exception) { MessageDialog.openError(workbenchWindow.getShell(), VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_OpenEditorError_label"), exception.getMessage()); return false; } return true; } catch (Exception exception) { VisualizacionMetricas3EditorPlugin.INSTANCE.log(exception); return false; } } /** * This is the one page of the wizard. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class VisualizacionModelWizardNewFileCreationPage extends WizardNewFileCreationPage { /** * Pass in the selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VisualizacionModelWizardNewFileCreationPage(String pageId, IStructuredSelection selection) { super(pageId, selection); } /** * The framework calls this to see if the file is correct. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected boolean validatePage() { if (super.validatePage()) { String extension = new Path(getFileName()).getFileExtension(); if (extension == null || !FILE_EXTENSIONS.contains(extension)) { String key = FILE_EXTENSIONS.size() > 1 ? "_WARN_FilenameExtensions" : "_WARN_FilenameExtension"; setErrorMessage(VisualizacionMetricas3EditorPlugin.INSTANCE.getString(key, new Object [] { FORMATTED_FILE_EXTENSIONS })); return false; } return true; } return false; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IFile getModelFile() { return ResourcesPlugin.getWorkspace().getRoot().getFile(getContainerFullPath().append(getFileName())); } } /** * This is the page where the type of object to create is selected. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class VisualizacionModelWizardInitialObjectCreationPage extends WizardPage { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Combo initialObjectField; /** * @generated * <!-- begin-user-doc --> * <!-- end-user-doc --> */ protected List<String> encodings; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Combo encodingField; /** * Pass in the selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VisualizacionModelWizardInitialObjectCreationPage(String pageId) { super(pageId); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void createControl(Composite parent) { Composite composite = new Composite(parent, SWT.NONE); { GridLayout layout = new GridLayout(); layout.numColumns = 1; layout.verticalSpacing = 12; composite.setLayout(layout); GridData data = new GridData(); data.verticalAlignment = GridData.FILL; data.grabExcessVerticalSpace = true; data.horizontalAlignment = GridData.FILL; composite.setLayoutData(data); } Label containerLabel = new Label(composite, SWT.LEFT); { containerLabel.setText(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_ModelObject")); GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; containerLabel.setLayoutData(data); } initialObjectField = new Combo(composite, SWT.BORDER); { GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; data.grabExcessHorizontalSpace = true; initialObjectField.setLayoutData(data); } for (String objectName : getInitialObjectNames()) { initialObjectField.add(getLabel(objectName)); } if (initialObjectField.getItemCount() == 1) { initialObjectField.select(0); } initialObjectField.addModifyListener(validator); Label encodingLabel = new Label(composite, SWT.LEFT); { encodingLabel.setText(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_XMLEncoding")); GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; encodingLabel.setLayoutData(data); } encodingField = new Combo(composite, SWT.BORDER); { GridData data = new GridData(); data.horizontalAlignment = GridData.FILL; data.grabExcessHorizontalSpace = true; encodingField.setLayoutData(data); } for (String encoding : getEncodings()) { encodingField.add(encoding); } encodingField.select(0); encodingField.addModifyListener(validator); setPageComplete(validatePage()); setControl(composite); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ModifyListener validator = new ModifyListener() { public void modifyText(ModifyEvent e) { setPageComplete(validatePage()); } }; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean validatePage() { return getInitialObjectName() != null && getEncodings().contains(encodingField.getText()); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setVisible(boolean visible) { super.setVisible(visible); if (visible) { if (initialObjectField.getItemCount() == 1) { initialObjectField.clearSelection(); encodingField.setFocus(); } else { encodingField.clearSelection(); initialObjectField.setFocus(); } } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getInitialObjectName() { String label = initialObjectField.getText(); for (String name : getInitialObjectNames()) { if (getLabel(name).equals(label)) { return name; } } return null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getEncoding() { return encodingField.getText(); } /** * Returns the label for the specified type name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected String getLabel(String typeName) { try { return VisualizacionMetricas3EditPlugin.INSTANCE.getString("_UI_" + typeName + "_type"); } catch(MissingResourceException mre) { VisualizacionMetricas3EditorPlugin.INSTANCE.log(mre); } return typeName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<String> getEncodings() { if (encodings == null) { encodings = new ArrayList<String>(); for (StringTokenizer stringTokenizer = new StringTokenizer(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_XMLEncodingChoices")); stringTokenizer.hasMoreTokens(); ) { encodings.add(stringTokenizer.nextToken()); } } return encodings; } } /** * The framework calls this to create the contents of the wizard. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void addPages() { // Create a page, set the title, and the initial model file name. // newFileCreationPage = new VisualizacionModelWizardNewFileCreationPage("Whatever", selection); newFileCreationPage.setTitle(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionModelWizard_label")); newFileCreationPage.setDescription(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionModelWizard_description")); newFileCreationPage.setFileName(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionEditorFilenameDefaultBase") + "." + FILE_EXTENSIONS.get(0)); addPage(newFileCreationPage); // Try and get the resource selection to determine a current directory for the file dialog. // if (selection != null && !selection.isEmpty()) { // Get the resource... // Object selectedElement = selection.iterator().next(); if (selectedElement instanceof IResource) { // Get the resource parent, if its a file. // IResource selectedResource = (IResource)selectedElement; if (selectedResource.getType() == IResource.FILE) { selectedResource = selectedResource.getParent(); } // This gives us a directory... // if (selectedResource instanceof IFolder || selectedResource instanceof IProject) { // Set this for the container. // newFileCreationPage.setContainerFullPath(selectedResource.getFullPath()); // Make up a unique new name here. // String defaultModelBaseFilename = VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionEditorFilenameDefaultBase"); String defaultModelFilenameExtension = FILE_EXTENSIONS.get(0); String modelFilename = defaultModelBaseFilename + "." + defaultModelFilenameExtension; for (int i = 1; ((IContainer)selectedResource).findMember(modelFilename) != null; ++i) { modelFilename = defaultModelBaseFilename + i + "." + defaultModelFilenameExtension; } newFileCreationPage.setFileName(modelFilename); } } } initialObjectCreationPage = new VisualizacionModelWizardInitialObjectCreationPage("Whatever2"); initialObjectCreationPage.setTitle(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_VisualizacionModelWizard_label")); initialObjectCreationPage.setDescription(VisualizacionMetricas3EditorPlugin.INSTANCE.getString("_UI_Wizard_initial_object_description")); addPage(initialObjectCreationPage); } /** * Get the file from the page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IFile getModelFile() { return newFileCreationPage.getModelFile(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.pattern; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.impl.ThrowableFormatOptions; import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.util.Strings; /** * Outputs the Throwable portion of the LoggingEvent as a full stack trace * unless this converter's option is 'short', where it just outputs the first line of the trace, or if * the number of lines to print is explicitly specified. */ @Plugin(name = "ThrowablePatternConverter", category = PatternConverter.CATEGORY) @ConverterKeys({ "ex", "throwable", "exception" }) public class ThrowablePatternConverter extends LogEventPatternConverter { protected final List<PatternFormatter> formatters; private String rawOption; /** * The number of lines to write. */ protected final ThrowableFormatOptions options; /** * Constructor. * @param name Name of converter. * @param style CSS style for output. * @param options options, may be null. * @param config */ protected ThrowablePatternConverter(final String name, final String style, final String[] options, final Configuration config) { super(name, style); this.options = ThrowableFormatOptions.newInstance(options); if (options != null && options.length > 0) { rawOption = options[0]; } if (this.options.getSuffix() != null) { final PatternParser parser = PatternLayout.createPatternParser(config); final List<PatternFormatter> parsedFormatters = parser.parse(this.options.getSuffix()); // filter out nested formatters that will handle throwable boolean hasThrowableFormatter = false; for (final PatternFormatter formatter : parsedFormatters) { if (formatter.handlesThrowable()) { hasThrowableFormatter = true; } } if (!hasThrowableFormatter) { this.formatters = parsedFormatters; } else { final List<PatternFormatter> formatters = new ArrayList<>(); for (final PatternFormatter formatter : parsedFormatters) { if (!formatter.handlesThrowable()) { formatters.add(formatter); } } this.formatters = formatters; } } else { this.formatters = Collections.emptyList(); } } /** * Gets an instance of the class. * * * @param config * @param options pattern options, may be null. If first element is "short", * only the first line of the throwable will be formatted. * @return instance of class. */ public static ThrowablePatternConverter newInstance(final Configuration config, final String[] options) { return new ThrowablePatternConverter("Throwable", "throwable", options, config); } /** * {@inheritDoc} */ @Override public void format(final LogEvent event, final StringBuilder buffer) { final Throwable t = event.getThrown(); if (isSubShortOption()) { formatSubShortOption(t, getSuffix(event), buffer); } else if (t != null && options.anyLines()) { formatOption(t, getSuffix(event), buffer); } } private boolean isSubShortOption() { return ThrowableFormatOptions.MESSAGE.equalsIgnoreCase(rawOption) || ThrowableFormatOptions.LOCALIZED_MESSAGE.equalsIgnoreCase(rawOption) || ThrowableFormatOptions.FILE_NAME.equalsIgnoreCase(rawOption) || ThrowableFormatOptions.LINE_NUMBER.equalsIgnoreCase(rawOption) || ThrowableFormatOptions.METHOD_NAME.equalsIgnoreCase(rawOption) || ThrowableFormatOptions.CLASS_NAME.equalsIgnoreCase(rawOption); } private void formatSubShortOption(final Throwable t, final String suffix, final StringBuilder buffer) { StackTraceElement[] trace; StackTraceElement throwingMethod = null; int len; if (t != null) { trace = t.getStackTrace(); if (trace !=null && trace.length > 0) { throwingMethod = trace[0]; } } if (t != null && throwingMethod != null) { String toAppend = Strings.EMPTY; if (ThrowableFormatOptions.CLASS_NAME.equalsIgnoreCase(rawOption)) { toAppend = throwingMethod.getClassName(); } else if (ThrowableFormatOptions.METHOD_NAME.equalsIgnoreCase(rawOption)) { toAppend = throwingMethod.getMethodName(); } else if (ThrowableFormatOptions.LINE_NUMBER.equalsIgnoreCase(rawOption)) { toAppend = String.valueOf(throwingMethod.getLineNumber()); } else if (ThrowableFormatOptions.MESSAGE.equalsIgnoreCase(rawOption)) { toAppend = t.getMessage(); } else if (ThrowableFormatOptions.LOCALIZED_MESSAGE.equalsIgnoreCase(rawOption)) { toAppend = t.getLocalizedMessage(); } else if (ThrowableFormatOptions.FILE_NAME.equalsIgnoreCase(rawOption)) { toAppend = throwingMethod.getFileName(); } len = buffer.length(); if (len > 0 && !Character.isWhitespace(buffer.charAt(len - 1))) { buffer.append(' '); } buffer.append(toAppend); if (Strings.isNotBlank(suffix)) { buffer.append(' '); buffer.append(suffix); } } } private void formatOption(final Throwable throwable, final String suffix, final StringBuilder buffer) { final StringWriter w = new StringWriter(); throwable.printStackTrace(new PrintWriter(w)); final int len = buffer.length(); if (len > 0 && !Character.isWhitespace(buffer.charAt(len - 1))) { buffer.append(' '); } if (!options.allLines() || !Strings.LINE_SEPARATOR.equals(options.getSeparator()) || Strings.isNotBlank(suffix)) { final StringBuilder sb = new StringBuilder(); final String[] array = w.toString().split(Strings.LINE_SEPARATOR); final int limit = options.minLines(array.length) - 1; final boolean suffixNotBlank = Strings.isNotBlank(suffix); for (int i = 0; i <= limit; ++i) { sb.append(array[i]); if (suffixNotBlank) { sb.append(' '); sb.append(suffix); } if (i < limit) { sb.append(options.getSeparator()); } } buffer.append(sb.toString()); } else { buffer.append(w.toString()); } } /** * This converter obviously handles throwables. * * @return true. */ @Override public boolean handlesThrowable() { return true; } protected String getSuffix(final LogEvent event) { //noinspection ForLoopReplaceableByForEach final StringBuilder toAppendTo = new StringBuilder(); for (int i = 0, size = formatters.size(); i < size; i++) { formatters.get(i).format(event, toAppendTo); } return toAppendTo.toString(); } }
/** * $RCSfile$ * $Revision$ * $Date$ * * Copyright 2003-2007 Jive Software. * * All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smack; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.jivesoftware.smack.filter.AndFilter; import org.jivesoftware.smack.filter.PacketFilter; import org.jivesoftware.smack.filter.PacketIDFilter; import org.jivesoftware.smack.filter.PacketTypeFilter; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.Registration; import org.jivesoftware.smack.util.StringUtils; /** * Allows creation and management of accounts on an XMPP server. * * @see Connection#getAccountManager() * @author Matt Tucker */ public class AccountManager { private Connection connection; private Registration info = null; /** * Flag that indicates whether the server supports In-Band Registration. * In-Band Registration may be advertised as a stream feature. If no stream feature * was advertised from the server then try sending an IQ packet to discover if In-Band * Registration is available. */ private boolean accountCreationSupported = false; /** * Creates a new AccountManager instance. * * @param connection a connection to a XMPP server. */ public AccountManager(Connection connection) { this.connection = connection; } /** * Sets whether the server supports In-Band Registration. In-Band Registration may be * advertised as a stream feature. If no stream feature was advertised from the server * then try sending an IQ packet to discover if In-Band Registration is available. * * @param accountCreationSupported true if the server supports In-Band Registration. */ void setSupportsAccountCreation(boolean accountCreationSupported) { this.accountCreationSupported = accountCreationSupported; } /** * Returns true if the server supports creating new accounts. Many servers require * that you not be currently authenticated when creating new accounts, so the safest * behavior is to only create new accounts before having logged in to a server. * * @return true if the server support creating new accounts. */ public boolean supportsAccountCreation() { // Check if we already know that the server supports creating new accounts if (accountCreationSupported) { return true; } // No information is known yet (e.g. no stream feature was received from the server // indicating that it supports creating new accounts) so send an IQ packet as a way // to discover if this feature is supported try { if (info == null) { getRegistrationInfo(); accountCreationSupported = info.getType() != IQ.Type.ERROR; } return accountCreationSupported; } catch (XMPPException xe) { return false; } } /** * Returns an unmodifiable collection of the names of the required account attributes. * All attributes must be set when creating new accounts. The standard set of possible * attributes are as follows: <ul> * <li>name -- the user's name. * <li>first -- the user's first name. * <li>last -- the user's last name. * <li>email -- the user's email address. * <li>city -- the user's city. * <li>state -- the user's state. * <li>zip -- the user's ZIP code. * <li>phone -- the user's phone number. * <li>url -- the user's website. * <li>date -- the date the registration took place. * <li>misc -- other miscellaneous information to associate with the account. * <li>text -- textual information to associate with the account. * <li>remove -- empty flag to remove account. * </ul><p> * * Typically, servers require no attributes when creating new accounts, or just * the user's email address. * * @return the required account attributes. */ public Collection<String> getAccountAttributes() { try { if (info == null) { getRegistrationInfo(); } List<String> attributes = info.getRequiredFields(); if (attributes.size()>0) { HashSet<String> set = new HashSet<String>(attributes); return Collections.unmodifiableSet(set); } } catch (XMPPException xe) { xe.printStackTrace(); } return Collections.emptySet(); } /** * Returns the value of a given account attribute or <tt>null</tt> if the account * attribute wasn't found. * * @param name the name of the account attribute to return its value. * @return the value of the account attribute or <tt>null</tt> if an account * attribute wasn't found for the requested name. */ public String getAccountAttribute(String name) { try { if (info == null) { getRegistrationInfo(); } return info.getAttributes().get(name); } catch (XMPPException xe) { xe.printStackTrace(); } return null; } /** * Returns the instructions for creating a new account, or <tt>null</tt> if there * are no instructions. If present, instructions should be displayed to the end-user * that will complete the registration process. * * @return the account creation instructions, or <tt>null</tt> if there are none. */ public String getAccountInstructions() { try { if (info == null) { getRegistrationInfo(); } return info.getInstructions(); } catch (XMPPException xe) { return null; } } /** * Creates a new account using the specified username and password. The server may * require a number of extra account attributes such as an email address and phone * number. In that case, Smack will attempt to automatically set all required * attributes with blank values, which may or may not be accepted by the server. * Therefore, it's recommended to check the required account attributes and to let * the end-user populate them with real values instead. * * @param username the username. * @param password the password. * @throws XMPPException if an error occurs creating the account. */ public void createAccount(String username, String password) throws XMPPException { if (!supportsAccountCreation()) { throw new XMPPException("Server does not support account creation."); } // Create a map for all the required attributes, but give them blank values. Map<String, String> attributes = new HashMap<String, String>(); for (String attributeName : getAccountAttributes()) { attributes.put(attributeName, ""); } createAccount(username, password, attributes); } /** * Creates a new account using the specified username, password and account attributes. * The attributes Map must contain only String name/value pairs and must also have values * for all required attributes. * * @param username the username. * @param password the password. * @param attributes the account attributes. * @throws XMPPException if an error occurs creating the account. * @see #getAccountAttributes() */ public void createAccount(String username, String password, Map<String, String> attributes) throws XMPPException { if (!supportsAccountCreation()) { throw new XMPPException("Server does not support account creation."); } Registration reg = new Registration(); reg.setType(IQ.Type.SET); reg.setTo(connection.getServiceName()); reg.setUsername(username); reg.setPassword(password); for(String s : attributes.keySet()){ reg.addAttribute(s, attributes.get(s)); } PacketFilter filter = new AndFilter(new PacketIDFilter(reg.getPacketID()), new PacketTypeFilter(IQ.class)); PacketCollector collector = connection.createPacketCollector(filter); connection.sendPacket(reg); IQ result = (IQ)collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); // Stop queuing results collector.cancel(); if (result == null) { throw new XMPPException("No response from server."); } else if (result.getType() == IQ.Type.ERROR) { throw new XMPPException(result.getError()); } } /** * Changes the password of the currently logged-in account. This operation can only * be performed after a successful login operation has been completed. Not all servers * support changing passwords; an XMPPException will be thrown when that is the case. * * @throws IllegalStateException if not currently logged-in to the server. * @throws XMPPException if an error occurs when changing the password. */ public void changePassword(String newPassword) throws XMPPException { Registration reg = new Registration(); reg.setType(IQ.Type.SET); reg.setTo(connection.getServiceName()); reg.setUsername(StringUtils.parseName(connection.getUser())); reg.setPassword(newPassword); PacketFilter filter = new AndFilter(new PacketIDFilter(reg.getPacketID()), new PacketTypeFilter(IQ.class)); PacketCollector collector = connection.createPacketCollector(filter); connection.sendPacket(reg); IQ result = (IQ)collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); // Stop queuing results collector.cancel(); if (result == null) { throw new XMPPException("No response from server."); } else if (result.getType() == IQ.Type.ERROR) { throw new XMPPException(result.getError()); } } /** * Deletes the currently logged-in account from the server. This operation can only * be performed after a successful login operation has been completed. Not all servers * support deleting accounts; an XMPPException will be thrown when that is the case. * * @throws IllegalStateException if not currently logged-in to the server. * @throws XMPPException if an error occurs when deleting the account. */ public void deleteAccount() throws XMPPException { if (!connection.isAuthenticated()) { throw new IllegalStateException("Must be logged in to delete a account."); } Registration reg = new Registration(); reg.setType(IQ.Type.SET); reg.setTo(connection.getServiceName()); // To delete an account, we set remove to true reg.setRemove(true); PacketFilter filter = new AndFilter(new PacketIDFilter(reg.getPacketID()), new PacketTypeFilter(IQ.class)); PacketCollector collector = connection.createPacketCollector(filter); connection.sendPacket(reg); IQ result = (IQ)collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); // Stop queuing results collector.cancel(); if (result == null) { throw new XMPPException("No response from server."); } else if (result.getType() == IQ.Type.ERROR) { throw new XMPPException(result.getError()); } } /** * Gets the account registration info from the server. * * @throws XMPPException if an error occurs. */ private synchronized void getRegistrationInfo() throws XMPPException { Registration reg = new Registration(); reg.setTo(connection.getServiceName()); PacketFilter filter = new AndFilter(new PacketIDFilter(reg.getPacketID()), new PacketTypeFilter(IQ.class)); PacketCollector collector = connection.createPacketCollector(filter); connection.sendPacket(reg); IQ result = (IQ)collector.nextResult(SmackConfiguration.getPacketReplyTimeout()); // Stop queuing results collector.cancel(); if (result == null) { throw new XMPPException("No response from server."); } else if (result.getType() == IQ.Type.ERROR) { throw new XMPPException(result.getError()); } else { info = (Registration)result; } } }
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.List; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi /** * Request object for method compute.instanceGroupManagers.list. Retrieves a list of managed * instance groups that are contained within the specified project and zone. */ public final class ListInstanceGroupManagersHttpRequest implements ApiMessage { private final String access_token; private final String callback; private final String fields; private final String filter; private final String key; private final Integer maxResults; private final String orderBy; private final String pageToken; private final String prettyPrint; private final String quotaUser; private final String userIp; private final String zone; private ListInstanceGroupManagersHttpRequest() { this.access_token = null; this.callback = null; this.fields = null; this.filter = null; this.key = null; this.maxResults = null; this.orderBy = null; this.pageToken = null; this.prettyPrint = null; this.quotaUser = null; this.userIp = null; this.zone = null; } private ListInstanceGroupManagersHttpRequest( String access_token, String callback, String fields, String filter, String key, Integer maxResults, String orderBy, String pageToken, String prettyPrint, String quotaUser, String userIp, String zone) { this.access_token = access_token; this.callback = callback; this.fields = fields; this.filter = filter; this.key = key; this.maxResults = maxResults; this.orderBy = orderBy; this.pageToken = pageToken; this.prettyPrint = prettyPrint; this.quotaUser = quotaUser; this.userIp = userIp; this.zone = zone; } @Override public Object getFieldValue(String fieldName) { if ("access_token".equals(fieldName)) { return access_token; } if ("callback".equals(fieldName)) { return callback; } if ("fields".equals(fieldName)) { return fields; } if ("filter".equals(fieldName)) { return filter; } if ("key".equals(fieldName)) { return key; } if ("maxResults".equals(fieldName)) { return maxResults; } if ("orderBy".equals(fieldName)) { return orderBy; } if ("pageToken".equals(fieldName)) { return pageToken; } if ("prettyPrint".equals(fieldName)) { return prettyPrint; } if ("quotaUser".equals(fieldName)) { return quotaUser; } if ("userIp".equals(fieldName)) { return userIp; } if ("zone".equals(fieldName)) { return zone; } return null; } @Nullable @Override public ApiMessage getApiMessageRequestBody() { return null; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** * A filter expression that filters resources listed in the response. The expression must specify * the field name, a comparison operator, and the value that you want to use for filtering. The * value must be a string, a number, or a boolean. The comparison operator must be either =, !=, * &gt;, or &lt;. * * <p>For example, if you are filtering Compute Engine instances, you can exclude instances named * example-instance by specifying name != example-instance. * * <p>You can also filter nested fields. For example, you could specify * scheduling.automaticRestart = false to include instances only if they are not scheduled for * automatic restarts. You can use filtering on nested fields to filter based on resource labels. * * <p>To filter on multiple expressions, provide each separate expression within parentheses. For * example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By default, each * expression is an AND expression. However, you can include AND and OR expressions explicitly. * For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND * (scheduling.automaticRestart = true). */ public String getFilter() { return filter; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** * The maximum number of results per page that should be returned. If the number of available * results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to * get the next page of results in subsequent list requests. Acceptable values are 0 to 500, * inclusive. (Default: 500) */ public Integer getMaxResults() { return maxResults; } /** * Sorts list results by a certain order. By default, results are returned in alphanumerical order * based on the resource name. * * <p>You can also sort results in descending order based on the creation timestamp using * orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in * reverse chronological order (newest result first). Use this to sort resources like operations * so that the newest operation is returned first. * * <p>Currently, only sorting by name or creationTimestamp desc is supported. */ public String getOrderBy() { return orderBy; } /** * Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list * request to get the next page of results. */ public String getPageToken() { return pageToken; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } /** * The name of the zone where the managed instance group is located. It must have the format * `{project}/zones/{zone}/instanceGroupManagers`. \`{zone}\` must start with a letter, and * contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; underscores * (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs (\`%\`). It must * be between 3 and 255 characters in length, and it &#42; must not start with \`"goog"\`. */ public String getZone() { return zone; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ListInstanceGroupManagersHttpRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static ListInstanceGroupManagersHttpRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final ListInstanceGroupManagersHttpRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ListInstanceGroupManagersHttpRequest(); } public static class Builder { private String access_token; private String callback; private String fields; private String filter; private String key; private Integer maxResults; private String orderBy; private String pageToken; private String prettyPrint; private String quotaUser; private String userIp; private String zone; Builder() {} public Builder mergeFrom(ListInstanceGroupManagersHttpRequest other) { if (other == ListInstanceGroupManagersHttpRequest.getDefaultInstance()) return this; if (other.getAccessToken() != null) { this.access_token = other.access_token; } if (other.getCallback() != null) { this.callback = other.callback; } if (other.getFields() != null) { this.fields = other.fields; } if (other.getFilter() != null) { this.filter = other.filter; } if (other.getKey() != null) { this.key = other.key; } if (other.getMaxResults() != null) { this.maxResults = other.maxResults; } if (other.getOrderBy() != null) { this.orderBy = other.orderBy; } if (other.getPageToken() != null) { this.pageToken = other.pageToken; } if (other.getPrettyPrint() != null) { this.prettyPrint = other.prettyPrint; } if (other.getQuotaUser() != null) { this.quotaUser = other.quotaUser; } if (other.getUserIp() != null) { this.userIp = other.userIp; } if (other.getZone() != null) { this.zone = other.zone; } return this; } Builder(ListInstanceGroupManagersHttpRequest source) { this.access_token = source.access_token; this.callback = source.callback; this.fields = source.fields; this.filter = source.filter; this.key = source.key; this.maxResults = source.maxResults; this.orderBy = source.orderBy; this.pageToken = source.pageToken; this.prettyPrint = source.prettyPrint; this.quotaUser = source.quotaUser; this.userIp = source.userIp; this.zone = source.zone; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** OAuth 2.0 token for the current user. */ public Builder setAccessToken(String access_token) { this.access_token = access_token; return this; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Name of the JavaScript callback function that handles the response. */ public Builder setCallback(String callback) { this.callback = callback; return this; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** Selector specifying a subset of fields to include in the response. */ public Builder setFields(String fields) { this.fields = fields; return this; } /** * A filter expression that filters resources listed in the response. The expression must * specify the field name, a comparison operator, and the value that you want to use for * filtering. The value must be a string, a number, or a boolean. The comparison operator must * be either =, !=, &gt;, or &lt;. * * <p>For example, if you are filtering Compute Engine instances, you can exclude instances * named example-instance by specifying name != example-instance. * * <p>You can also filter nested fields. For example, you could specify * scheduling.automaticRestart = false to include instances only if they are not scheduled for * automatic restarts. You can use filtering on nested fields to filter based on resource * labels. * * <p>To filter on multiple expressions, provide each separate expression within parentheses. * For example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By * default, each expression is an AND expression. However, you can include AND and OR * expressions explicitly. For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel * Broadwell") AND (scheduling.automaticRestart = true). */ public String getFilter() { return filter; } /** * A filter expression that filters resources listed in the response. The expression must * specify the field name, a comparison operator, and the value that you want to use for * filtering. The value must be a string, a number, or a boolean. The comparison operator must * be either =, !=, &gt;, or &lt;. * * <p>For example, if you are filtering Compute Engine instances, you can exclude instances * named example-instance by specifying name != example-instance. * * <p>You can also filter nested fields. For example, you could specify * scheduling.automaticRestart = false to include instances only if they are not scheduled for * automatic restarts. You can use filtering on nested fields to filter based on resource * labels. * * <p>To filter on multiple expressions, provide each separate expression within parentheses. * For example, (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake"). By * default, each expression is an AND expression. However, you can include AND and OR * expressions explicitly. For example, (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel * Broadwell") AND (scheduling.automaticRestart = true). */ public Builder setFilter(String filter) { this.filter = filter; return this; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** API key. Required unless you provide an OAuth 2.0 token. */ public Builder setKey(String key) { this.key = key; return this; } /** * The maximum number of results per page that should be returned. If the number of available * results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to * get the next page of results in subsequent list requests. Acceptable values are 0 to 500, * inclusive. (Default: 500) */ public Integer getMaxResults() { return maxResults; } /** * The maximum number of results per page that should be returned. If the number of available * results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to * get the next page of results in subsequent list requests. Acceptable values are 0 to 500, * inclusive. (Default: 500) */ public Builder setMaxResults(Integer maxResults) { this.maxResults = maxResults; return this; } /** * Sorts list results by a certain order. By default, results are returned in alphanumerical * order based on the resource name. * * <p>You can also sort results in descending order based on the creation timestamp using * orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in * reverse chronological order (newest result first). Use this to sort resources like operations * so that the newest operation is returned first. * * <p>Currently, only sorting by name or creationTimestamp desc is supported. */ public String getOrderBy() { return orderBy; } /** * Sorts list results by a certain order. By default, results are returned in alphanumerical * order based on the resource name. * * <p>You can also sort results in descending order based on the creation timestamp using * orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in * reverse chronological order (newest result first). Use this to sort resources like operations * so that the newest operation is returned first. * * <p>Currently, only sorting by name or creationTimestamp desc is supported. */ public Builder setOrderBy(String orderBy) { this.orderBy = orderBy; return this; } /** * Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list * request to get the next page of results. */ public String getPageToken() { return pageToken; } /** * Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list * request to get the next page of results. */ public Builder setPageToken(String pageToken) { this.pageToken = pageToken; return this; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public Builder setPrettyPrint(String prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** Alternative to userIp. */ public Builder setQuotaUser(String quotaUser) { this.quotaUser = quotaUser; return this; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } /** IP address of the end user for whom the API call is being made. */ public Builder setUserIp(String userIp) { this.userIp = userIp; return this; } /** * The name of the zone where the managed instance group is located. It must have the format * `{project}/zones/{zone}/instanceGroupManagers`. \`{zone}\` must start with a letter, and * contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; underscores * (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs (\`%\`). It * must be between 3 and 255 characters in length, and it &#42; must not start with \`"goog"\`. */ public String getZone() { return zone; } /** * The name of the zone where the managed instance group is located. It must have the format * `{project}/zones/{zone}/instanceGroupManagers`. \`{zone}\` must start with a letter, and * contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; underscores * (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs (\`%\`). It * must be between 3 and 255 characters in length, and it &#42; must not start with \`"goog"\`. */ public Builder setZone(String zone) { this.zone = zone; return this; } public ListInstanceGroupManagersHttpRequest build() { String missing = ""; if (zone == null) { missing += " zone"; } if (!missing.isEmpty()) { throw new IllegalStateException("Missing required properties:" + missing); } return new ListInstanceGroupManagersHttpRequest( access_token, callback, fields, filter, key, maxResults, orderBy, pageToken, prettyPrint, quotaUser, userIp, zone); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setAccessToken(this.access_token); newBuilder.setCallback(this.callback); newBuilder.setFields(this.fields); newBuilder.setFilter(this.filter); newBuilder.setKey(this.key); newBuilder.setMaxResults(this.maxResults); newBuilder.setOrderBy(this.orderBy); newBuilder.setPageToken(this.pageToken); newBuilder.setPrettyPrint(this.prettyPrint); newBuilder.setQuotaUser(this.quotaUser); newBuilder.setUserIp(this.userIp); newBuilder.setZone(this.zone); return newBuilder; } } @Override public String toString() { return "ListInstanceGroupManagersHttpRequest{" + "access_token=" + access_token + ", " + "callback=" + callback + ", " + "fields=" + fields + ", " + "filter=" + filter + ", " + "key=" + key + ", " + "maxResults=" + maxResults + ", " + "orderBy=" + orderBy + ", " + "pageToken=" + pageToken + ", " + "prettyPrint=" + prettyPrint + ", " + "quotaUser=" + quotaUser + ", " + "userIp=" + userIp + ", " + "zone=" + zone + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof ListInstanceGroupManagersHttpRequest) { ListInstanceGroupManagersHttpRequest that = (ListInstanceGroupManagersHttpRequest) o; return Objects.equals(this.access_token, that.getAccessToken()) && Objects.equals(this.callback, that.getCallback()) && Objects.equals(this.fields, that.getFields()) && Objects.equals(this.filter, that.getFilter()) && Objects.equals(this.key, that.getKey()) && Objects.equals(this.maxResults, that.getMaxResults()) && Objects.equals(this.orderBy, that.getOrderBy()) && Objects.equals(this.pageToken, that.getPageToken()) && Objects.equals(this.prettyPrint, that.getPrettyPrint()) && Objects.equals(this.quotaUser, that.getQuotaUser()) && Objects.equals(this.userIp, that.getUserIp()) && Objects.equals(this.zone, that.getZone()); } return false; } @Override public int hashCode() { return Objects.hash( access_token, callback, fields, filter, key, maxResults, orderBy, pageToken, prettyPrint, quotaUser, userIp, zone); } }
package com.example.android.sunshine; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.ProgressBar; import com.example.android.sunshine.data.SunshinePreferences; import com.example.android.sunshine.data.WeatherContract; import com.example.android.sunshine.sync.SunshineSyncUtils; public class MainActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor>, ForecastAdapter.ForecastAdapterOnClickHandler { private final String TAG = MainActivity.class.getSimpleName(); /* * The columns of data that we are interested in displaying within our MainActivity's list of * weather data. */ public static final String[] MAIN_FORECAST_PROJECTION = { WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, }; /* * We store the indices of the values in the array of Strings above to more quickly be able to * access the data from our query. If the order of the Strings above changes, these indices * must be adjusted to match the order of the Strings. */ public static final int INDEX_WEATHER_DATE = 0; public static final int INDEX_WEATHER_MAX_TEMP = 1; public static final int INDEX_WEATHER_MIN_TEMP = 2; public static final int INDEX_WEATHER_CONDITION_ID = 3; /* * This ID will be used to identify the Loader responsible for loading our weather forecast. In * some cases, one Activity can deal with many Loaders. However, in our case, there is only one. * We will still use this ID to initialize the loader and create the loader for best practice. * Please note that 44 was chosen arbitrarily. You can use whatever number you like, so long as * it is unique and consistent. */ private static final int ID_FORECAST_LOADER = 44; private ForecastAdapter mForecastAdapter; private RecyclerView mRecyclerView; private int mPosition = RecyclerView.NO_POSITION; private ProgressBar mLoadingIndicator; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_forecast); getSupportActionBar().setElevation(0f); /* * Using findViewById, we get a reference to our RecyclerView from xml. This allows us to * do things like set the adapter of the RecyclerView and toggle the visibility. */ mRecyclerView = (RecyclerView) findViewById(R.id.recyclerview_forecast); /* * The ProgressBar that will indicate to the user that we are loading data. It will be * hidden when no data is loading. * * Please note: This so called "ProgressBar" isn't a bar by default. It is more of a * circle. We didn't make the rules (or the names of Views), we just follow them. */ mLoadingIndicator = (ProgressBar) findViewById(R.id.pb_loading_indicator); /* * A LinearLayoutManager is responsible for measuring and positioning item views within a * RecyclerView into a linear list. This means that it can produce either a horizontal or * vertical list depending on which parameter you pass in to the LinearLayoutManager * constructor. In our case, we want a vertical list, so we pass in the constant from the * LinearLayoutManager class for vertical lists, LinearLayoutManager.VERTICAL. * * There are other LayoutManagers available to display your data in uniform grids, * staggered grids, and more! See the developer documentation for more details. * * The third parameter (shouldReverseLayout) should be true if you want to reverse your * layout. Generally, this is only true with horizontal lists that need to support a * right-to-left layout. */ LinearLayoutManager layoutManager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false); /* setLayoutManager associates the LayoutManager we created above with our RecyclerView */ mRecyclerView.setLayoutManager(layoutManager); /* * Use this setting to improve performance if you know that changes in content do not * change the child layout size in the RecyclerView */ mRecyclerView.setHasFixedSize(true); /* * The ForecastAdapter is responsible for linking our weather data with the Views that * will end up displaying our weather data. * * Although passing in "this" twice may seem strange, it is actually a sign of separation * of concerns, which is best programming practice. The ForecastAdapter requires an * Android Context (which all Activities are) as well as an onClickHandler. Since our * MainActivity implements the ForecastAdapter ForecastOnClickHandler interface, "this" * is also an instance of that type of handler. */ mForecastAdapter = new ForecastAdapter(this, this); /* Setting the adapter attaches it to the RecyclerView in our layout. */ mRecyclerView.setAdapter(mForecastAdapter); showLoading(); /* * Ensures a loader is initialized and active. If the loader doesn't already exist, one is * created and (if the activity/fragment is currently started) starts the loader. Otherwise * the last created loader is re-used. */ getSupportLoaderManager().initLoader(ID_FORECAST_LOADER, null, this); SunshineSyncUtils.initialize(this); } /** * Uses the URI scheme for showing a location found on a map in conjunction with * an implicit Intent. This super-handy Intent is detailed in the "Common Intents" page of * Android's developer site: * * @see "http://developer.android.com/guide/components/intents-common.html#Maps" * <p> * Protip: Hold Command on Mac or Control on Windows and click that link to automagically * open the Common Intents page */ private void openPreferredLocationInMap() { double[] coords = SunshinePreferences.getLocationCoordinates(this); String posLat = Double.toString(coords[0]); String posLong = Double.toString(coords[1]); Uri geoLocation = Uri.parse("geo:" + posLat + "," + posLong); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(geoLocation); if (intent.resolveActivity(getPackageManager()) != null) { startActivity(intent); } else { Log.d(TAG, "Couldn't call " + geoLocation.toString() + ", no receiving apps installed!"); } } /** * Called by the {@link android.support.v4.app.LoaderManagerImpl} when a new Loader needs to be * created. This Activity only uses one loader, so we don't necessarily NEED to check the * loaderId, but this is certainly best practice. * * @param loaderId The loader ID for which we need to create a loader * @param bundle Any arguments supplied by the caller * @return A new Loader instance that is ready to start loading. */ @Override public Loader<Cursor> onCreateLoader(int loaderId, Bundle bundle) { switch (loaderId) { case ID_FORECAST_LOADER: /* URI for all rows of weather data in our weather table */ Uri forecastQueryUri = WeatherContract.WeatherEntry.CONTENT_URI; /* Sort order: Ascending by date */ String sortOrder = WeatherContract.WeatherEntry.COLUMN_DATE + " ASC"; /* * A SELECTION in SQL declares which rows you'd like to return. In our case, we * want all weather data from today onwards that is stored in our weather table. * We created a handy method to do that in our WeatherEntry class. */ String selection = WeatherContract.WeatherEntry.getSqlSelectForTodayOnwards(); return new CursorLoader(this, forecastQueryUri, MAIN_FORECAST_PROJECTION, selection, null, sortOrder); default: throw new RuntimeException("Loader Not Implemented: " + loaderId); } } /** * Called when a Loader has finished loading its data. * * NOTE: There is one small bug in this code. If no data is present in the cursor do to an * initial load being performed with no access to internet, the loading indicator will show * indefinitely, until data is present from the ContentProvider. This will be fixed in a * future version of the course. * * @param loader The Loader that has finished. * @param data The data generated by the Loader. */ @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { mForecastAdapter.swapCursor(data); if (mPosition == RecyclerView.NO_POSITION) mPosition = 0; mRecyclerView.smoothScrollToPosition(mPosition); if (data.getCount() != 0) showWeatherDataView(); } /** * Called when a previously created loader is being reset, and thus making its data unavailable. * The application should at this point remove any references it has to the Loader's data. * * @param loader The Loader that is being reset. */ @Override public void onLoaderReset(Loader<Cursor> loader) { /* * Since this Loader's data is now invalid, we need to clear the Adapter that is * displaying the data. */ mForecastAdapter.swapCursor(null); } /** * This method is for responding to clicks from our list. * * @param date Normalized UTC time that represents the local date of the weather in GMT time. * @see WeatherContract.WeatherEntry#COLUMN_DATE */ @Override public void onClick(long date) { Intent weatherDetailIntent = new Intent(MainActivity.this, DetailActivity.class); Uri uriForDateClicked = WeatherContract.WeatherEntry.buildWeatherUriWithDate(date); weatherDetailIntent.setData(uriForDateClicked); startActivity(weatherDetailIntent); } /** * This method will make the View for the weather data visible and hide the error message and * loading indicator. * <p> * Since it is okay to redundantly set the visibility of a View, we don't need to check whether * each view is currently visible or invisible. */ private void showWeatherDataView() { /* First, hide the loading indicator */ mLoadingIndicator.setVisibility(View.INVISIBLE); /* Finally, make sure the weather data is visible */ mRecyclerView.setVisibility(View.VISIBLE); } /** * This method will make the loading indicator visible and hide the weather View and error * message. * <p> * Since it is okay to redundantly set the visibility of a View, we don't need to check whether * each view is currently visible or invisible. */ private void showLoading() { /* Then, hide the weather data */ mRecyclerView.setVisibility(View.INVISIBLE); /* Finally, show the loading indicator */ mLoadingIndicator.setVisibility(View.VISIBLE); } /** * This is where we inflate and set up the menu for this Activity. * * @param menu The options menu in which you place your items. * * @return You must return true for the menu to be displayed; * if you return false it will not be shown. * * @see #onPrepareOptionsMenu * @see #onOptionsItemSelected */ @Override public boolean onCreateOptionsMenu(Menu menu) { /* Use AppCompatActivity's method getMenuInflater to get a handle on the menu inflater */ MenuInflater inflater = getMenuInflater(); /* Use the inflater's inflate method to inflate our menu layout to this menu */ inflater.inflate(R.menu.forecast, menu); /* Return true so that the menu is displayed in the Toolbar */ return true; } /** * Callback invoked when a menu item was selected from this Activity's menu. * * @param item The menu item that was selected by the user * * @return true if you handle the menu click here, false otherwise */ @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.action_settings) { startActivity(new Intent(this, SettingsActivity.class)); return true; } if (id == R.id.action_map) { openPreferredLocationInMap(); return true; } return super.onOptionsItemSelected(item); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.Session; import com.facebook.presto.execution.StageInfo; import com.facebook.presto.execution.StageStats; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.OperatorNotFoundException; import com.facebook.presto.metadata.Signature; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.operator.OperatorStats; import com.facebook.presto.operator.PipelineStats; import com.facebook.presto.operator.TaskStats; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorTableLayoutHandle; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.Marker; import com.facebook.presto.spi.predicate.NullableValue; import com.facebook.presto.spi.predicate.Range; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.FunctionInvoker; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.ApplyNode; import com.facebook.presto.sql.planner.plan.AssignUniqueId; import com.facebook.presto.sql.planner.plan.DeleteNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode; import com.facebook.presto.sql.planner.plan.ExceptNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.ExchangeNode.Scope; import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.GroupIdNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.IntersectNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.MarkDistinctNode; import com.facebook.presto.sql.planner.plan.MetadataDeleteNode; import com.facebook.presto.sql.planner.plan.OutputNode; import com.facebook.presto.sql.planner.plan.PlanFragmentId; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.sql.planner.plan.PlanVisitor; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.RemoteSourceNode; import com.facebook.presto.sql.planner.plan.RowNumberNode; import com.facebook.presto.sql.planner.plan.SampleNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableFinishNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.TableWriterNode; import com.facebook.presto.sql.planner.plan.TopNNode; import com.facebook.presto.sql.planner.plan.TopNRowNumberNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.UnnestNode; import com.facebook.presto.sql.planner.plan.ValuesNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.FunctionCall; import com.facebook.presto.sql.tree.SymbolReference; import com.facebook.presto.util.GraphvizPrinter; import com.google.common.base.CaseFormat; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import io.airlift.units.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.facebook.presto.execution.StageInfo.getAllStages; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.planner.DomainUtils.simplifyDomain; import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; import static com.google.common.base.Preconditions.checkArgument; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.DataSize.succinctDataSize; import static java.lang.Double.isFinite; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.stream.Collectors.toList; public class PlanPrinter { private final StringBuilder output = new StringBuilder(); private final Metadata metadata; private final Optional<Map<PlanNodeId, PlanNodeStats>> stats; private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session sesion) { this(plan, types, metadata, sesion, 0); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.empty(); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } private PlanPrinter(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { requireNonNull(plan, "plan is null"); requireNonNull(types, "types is null"); requireNonNull(metadata, "metadata is null"); this.metadata = metadata; this.stats = Optional.of(stats); Visitor visitor = new Visitor(types, session); plan.accept(visitor, indent); } @Override public String toString() { return output.toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session) { return new PlanPrinter(plan, types, metadata, session).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, int indent) { return new PlanPrinter(plan, types, metadata, session, indent).toString(); } public static String textLogicalPlan(PlanNode plan, Map<Symbol, Type> types, Metadata metadata, Session session, Map<PlanNodeId, PlanNodeStats> stats, int indent) { return new PlanPrinter(plan, types, metadata, session, stats, indent).toString(); } public static String textDistributedPlan(List<StageInfo> stages, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); List<StageInfo> allStages = stages.stream() .flatMap(stage -> getAllStages(Optional.of(stage)).stream()) .collect(toImmutableList()); for (StageInfo stageInfo : allStages) { Map<PlanNodeId, PlanNodeStats> aggregatedStats = new HashMap<>(); List<PlanNodeStats> planNodeStats = stageInfo.getTasks().stream() .map(TaskInfo::getStats) .flatMap(taskStats -> getPlanNodeStats(taskStats).stream()) .collect(toList()); for (PlanNodeStats stats : planNodeStats) { aggregatedStats.merge(stats.getPlanNodeId(), stats, PlanNodeStats::merge); } builder.append(formatFragment(metadata, session, stageInfo.getPlan(), Optional.of(stageInfo.getStageStats()), Optional.of(aggregatedStats))); } return builder.toString(); } private static List<PlanNodeStats> getPlanNodeStats(TaskStats taskStats) { // Best effort to reconstruct the plan nodes from operators. // Because stats are collected separately from query execution, // it's possible that some or all of them are missing or out of date. // For example, a LIMIT clause can cause a query to finish before stats // are collected from the leaf stages. Map<PlanNodeId, Long> outputPositions = new HashMap<>(); Map<PlanNodeId, Long> outputBytes = new HashMap<>(); Map<PlanNodeId, Long> wallMillis = new HashMap<>(); for (PipelineStats pipelineStats : taskStats.getPipelines()) { Map<PlanNodeId, Long> pipelineOutputPositions = new HashMap<>(); Map<PlanNodeId, Long> pipelineOutputBytes = new HashMap<>(); List<OperatorStats> operatorSummaries = pipelineStats.getOperatorSummaries(); for (int i = 0; i < operatorSummaries.size(); i++) { OperatorStats operatorStats = operatorSummaries.get(i); PlanNodeId planNodeId = operatorStats.getPlanNodeId(); long wall = operatorStats.getAddInputWall().toMillis() + operatorStats.getGetOutputWall().toMillis() + operatorStats.getFinishWall().toMillis(); wallMillis.merge(planNodeId, wall, Long::sum); // An "internal" pipeline like a hash build, links to another pipeline which is the actual output for this plan node if (i == operatorSummaries.size() - 1 && !pipelineStats.isOutputPipeline()) { pipelineOutputBytes.remove(planNodeId); pipelineOutputPositions.remove(planNodeId); } else { // Overwrite whatever we currently have, to get the last operator's stats for this plan node in this pipeline pipelineOutputPositions.put(planNodeId, operatorStats.getOutputPositions()); pipelineOutputBytes.put(planNodeId, operatorStats.getOutputDataSize().toBytes()); } } for (Map.Entry<PlanNodeId, Long> entry : pipelineOutputPositions.entrySet()) { outputBytes.merge(entry.getKey(), pipelineOutputBytes.get(entry.getKey()), Long::sum); outputPositions.merge(entry.getKey(), entry.getValue(), Long::sum); } } List<PlanNodeStats> stats = new ArrayList<>(); for (Map.Entry<PlanNodeId, Long> entry : wallMillis.entrySet()) { if (outputPositions.containsKey(entry.getKey())) { stats.add(new PlanNodeStats(entry.getKey(), new Duration(entry.getValue(), MILLISECONDS), outputPositions.get(entry.getKey()), succinctDataSize(outputBytes.get(entry.getKey()), BYTE))); } else { // It's possible there will be no output stats because all the pipelines that we observed were non-output. // For example in a query like SELECT * FROM a JOIN b ON c = d LIMIT 1 // It's possible to observe stats after the build starts, but before the probe does // and therefore only have wall time, but no output stats stats.add(new PlanNodeStats(entry.getKey(), new Duration(entry.getValue(), MILLISECONDS))); } } return stats; } public static String textDistributedPlan(SubPlan plan, Metadata metadata, Session session) { StringBuilder builder = new StringBuilder(); for (PlanFragment fragment : plan.getAllFragments()) { builder.append(formatFragment(metadata, session, fragment, Optional.empty(), Optional.empty())); } return builder.toString(); } private static String formatFragment(Metadata metadata, Session session, PlanFragment fragment, Optional<StageStats> stageStats, Optional<Map<PlanNodeId, PlanNodeStats>> planNodeStats) { StringBuilder builder = new StringBuilder(); builder.append(format("Fragment %s [%s]\n", fragment.getId(), fragment.getPartitioning())); if (stageStats.isPresent()) { builder.append(indentString(1)) .append(format("Cost: CPU %s, Input %d (%s), Output %d (%s)\n", stageStats.get().getTotalCpuTime(), stageStats.get().getProcessedInputPositions(), stageStats.get().getProcessedInputDataSize(), stageStats.get().getOutputPositions(), stageStats.get().getOutputDataSize())); } PartitioningScheme partitioningScheme = fragment.getPartitioningScheme(); builder.append(indentString(1)) .append(format("Output layout: [%s]\n", Joiner.on(", ").join(partitioningScheme.getOutputLayout()))); boolean replicateNulls = partitioningScheme.isReplicateNulls(); List<String> arguments = partitioningScheme.getPartitioning().getArguments().stream() .map(argument -> { if (argument.isConstant()) { NullableValue constant = argument.getConstant(); String printableValue = castToVarchar(constant.getType(), constant.getValue(), metadata, session); return constant.getType().getDisplayName() + "(" + printableValue + ")"; } return argument.getColumn().toString(); }) .collect(toImmutableList()); builder.append(indentString(1)); if (replicateNulls) { builder.append(format("Output partitioning: %s (replicate nulls) [%s]\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments))); } else { builder.append(format("Output partitioning: %s [%s]\n", partitioningScheme.getPartitioning().getHandle(), Joiner.on(", ").join(arguments))); } if (stageStats.isPresent()) { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, planNodeStats.get(), 1)) .append("\n"); } else { builder.append(textLogicalPlan(fragment.getRoot(), fragment.getSymbols(), metadata, session, 1)) .append("\n"); } return builder.toString(); } public static String graphvizLogicalPlan(PlanNode plan, Map<Symbol, Type> types) { PlanFragment fragment = new PlanFragment( new PlanFragmentId("graphviz_plan"), plan, types, SINGLE_DISTRIBUTION, ImmutableList.of(plan.getId()), new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getOutputSymbols())); return GraphvizPrinter.printLogical(ImmutableList.of(fragment)); } public static String graphvizDistributedPlan(SubPlan plan) { return GraphvizPrinter.printDistributed(plan); } private void print(int indent, String format, Object... args) { String value; if (args.length == 0) { value = format; } else { value = format(format, args); } output.append(indentString(indent)).append(value).append('\n'); } private void printStats(int indent, PlanNodeId planNodeId) { if (!stats.isPresent()) { return; } long totalMillis = stats.get().values().stream() .mapToLong(node -> node.getWallTime().toMillis()) .sum(); PlanNodeStats stats = this.stats.get().get(planNodeId); if (stats == null) { output.append(indentString(indent)) .append("Cost: unknown, Output: unknown \n"); return; } double fraction = (stats.getWallTime().toMillis()) / (double) totalMillis; String fractionString; if (isFinite(fraction)) { fractionString = format("%.2f%%", 100.0 * fraction); } else { fractionString = "unknown"; } String outputString; if (stats.getOutputPositions().isPresent() && stats.getOutputDataSize().isPresent()) { outputString = format("%s rows (%s)", stats.getOutputPositions().get(), stats.getOutputDataSize().get()); } else { outputString = "unknown"; } output.append(indentString(indent)) .append(format("Cost: %s, Output: %s\n", fractionString, outputString)); } private static String indentString(int indent) { return Strings.repeat(" ", indent); } private class Visitor extends PlanVisitor<Integer, Void> { private final Map<Symbol, Type> types; private final Session session; @SuppressWarnings("AssignmentToCollectionOrArrayFieldFromParameter") public Visitor(Map<Symbol, Type> types, Session session) { this.types = types; this.session = session; } @Override public Void visitExplainAnalyze(ExplainAnalyzeNode node, Integer indent) { print(indent, "- ExplainAnalyze => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitJoin(JoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (JoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpression.Type.EQUAL, clause.getLeft().toSymbolReference(), clause.getRight().toSymbolReference())); } node.getFilter().ifPresent(expression -> joinExpressions.add(expression)); print(indent, "- %s[%s] => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getLeft().accept(this, indent + 1); node.getRight().accept(this, indent + 1); return null; } @Override public Void visitSemiJoin(SemiJoinNode node, Integer indent) { print(indent, "- SemiJoin[%s = %s] => [%s]", node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getSource().accept(this, indent + 1); node.getFilteringSource().accept(this, indent + 1); return null; } @Override public Void visitIndexSource(IndexSourceNode node, Integer indent) { print(indent, "- IndexSource[%s, lookup = %s] => [%s]", node.getIndexHandle(), node.getLookupSymbols(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, ColumnHandle> entry : node.getAssignments().entrySet()) { if (node.getOutputSymbols().contains(entry.getKey())) { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return null; } @Override public Void visitIndexJoin(IndexJoinNode node, Integer indent) { List<Expression> joinExpressions = new ArrayList<>(); for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) { joinExpressions.add(new ComparisonExpression(ComparisonExpression.Type.EQUAL, clause.getProbe().toSymbolReference(), clause.getIndex().toSymbolReference())); } print(indent, "- %sIndexJoin[%s] => [%s]", node.getType().getJoinLabel(), Joiner.on(" AND ").join(joinExpressions), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); node.getProbeSource().accept(this, indent + 1); node.getIndexSource().accept(this, indent + 1); return null; } @Override public Void visitLimit(LimitNode node, Integer indent) { print(indent, "- Limit%s[%s] => [%s]", node.isPartial() ? "Partial" : "", node.getCount(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDistinctLimit(DistinctLimitNode node, Integer indent) { print(indent, "- DistinctLimit%s[%s] => [%s]", node.isPartial() ? "Partial" : "", node.getLimit(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAggregation(AggregationNode node, Integer indent) { String type = ""; if (node.getStep() != AggregationNode.Step.SINGLE) { type = format("(%s)", node.getStep().toString()); } String key = ""; if (!node.getGroupingKeys().isEmpty()) { key = node.getGroupingKeys().toString(); } String sampleWeight = ""; if (node.getSampleWeight().isPresent()) { sampleWeight = format("[sampleWeight = %s]", node.getSampleWeight().get()); } print(indent, "- Aggregate%s%s%s => [%s]", type, key, sampleWeight, formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, FunctionCall> entry : node.getAggregations().entrySet()) { if (node.getMasks().containsKey(entry.getKey())) { print(indent + 2, "%s := %s (mask = %s)", entry.getKey(), entry.getValue(), node.getMasks().get(entry.getKey())); } else { print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } } return processChildren(node, indent + 1); } @Override public Void visitGroupId(GroupIdNode node, Integer indent) { print(indent, "- GroupId%s => [%s]", node.getGroupingSets(), formatOutputs(node.getOutputSymbols())); return processChildren(node, indent + 1); } @Override public Void visitMarkDistinct(MarkDistinctNode node, Integer indent) { print(indent, "- MarkDistinct[distinct=%s marker=%s] => [%s]", formatOutputs(node.getDistinctSymbols()), node.getMarkerSymbol(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitWindow(WindowNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { List<Symbol> prePartitioned = node.getPartitionBy().stream() .filter(node.getPrePartitionedInputs()::contains) .collect(toImmutableList()); List<Symbol> notPrePartitioned = node.getPartitionBy().stream() .filter(column -> !node.getPrePartitionedInputs().contains(column)) .collect(toImmutableList()); StringBuilder builder = new StringBuilder(); if (!prePartitioned.isEmpty()) { builder.append("<") .append(Joiner.on(", ").join(prePartitioned)) .append(">"); if (!notPrePartitioned.isEmpty()) { builder.append(", "); } } if (!notPrePartitioned.isEmpty()) { builder.append(Joiner.on(", ").join(notPrePartitioned)); } args.add(format("partition by (%s)", builder)); } if (!orderBy.isEmpty()) { args.add(format("order by (%s)", Stream.concat( node.getOrderBy().stream() .limit(node.getPreSortedOrderPrefix()) .map(symbol -> "<" + symbol + ">"), node.getOrderBy().stream() .skip(node.getPreSortedOrderPrefix()) .map(Symbol::toString)) .collect(Collectors.joining(", ")))); } print(indent, "- Window[%s] => [%s]", Joiner.on(", ").join(args), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) { FunctionCall call = entry.getValue().getFunctionCall(); print(indent + 2, "%s := %s(%s)", entry.getKey(), call.getName(), Joiner.on(", ").join(call.getArguments())); } return processChildren(node, indent + 1); } @Override public Void visitTopNRowNumber(TopNRowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> orderBy = Lists.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); List<String> args = new ArrayList<>(); args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); args.add(format("order by (%s)", Joiner.on(", ").join(orderBy))); print(indent, "- TopNRowNumber[%s limit %s] => [%s]", Joiner.on(", ").join(args), node.getMaxRowCountPerPartition(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitRowNumber(RowNumberNode node, Integer indent) { List<String> partitionBy = Lists.transform(node.getPartitionBy(), Functions.toStringFunction()); List<String> args = new ArrayList<>(); if (!partitionBy.isEmpty()) { args.add(format("partition by (%s)", Joiner.on(", ").join(partitionBy))); } if (node.getMaxRowCountPerPartition().isPresent()) { args.add(format("limit = %s", node.getMaxRowCountPerPartition().get())); } print(indent, "- RowNumber[%s] => [%s]", Joiner.on(", ").join(args), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); print(indent + 2, "%s := %s", node.getRowNumberSymbol(), "row_number()"); return processChildren(node, indent + 1); } @Override public Void visitTableScan(TableScanNode node, Integer indent) { TableHandle table = node.getTable(); print(indent, "- TableScan[%s, originalConstraint = %s] => [%s]", table, node.getOriginalConstraint(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); TupleDomain<ColumnHandle> predicate = node.getLayout() .map(layoutHandle -> metadata.getLayout(session, layoutHandle)) .map(TableLayout::getPredicate) .orElse(TupleDomain.<ColumnHandle>all()); if (node.getLayout().isPresent()) { // TODO: find a better way to do this ConnectorTableLayoutHandle layout = node.getLayout().get().getConnectorHandle(); if (!table.getConnectorHandle().toString().equals(layout.toString())) { print(indent + 2, "LAYOUT: %s", layout); } } if (predicate.isNone()) { print(indent + 2, ":: NONE"); } else { // first, print output columns and their constraints for (Map.Entry<Symbol, ColumnHandle> assignment : node.getAssignments().entrySet()) { ColumnHandle column = assignment.getValue(); print(indent + 2, "%s := %s", assignment.getKey(), column); printConstraint(indent + 3, column, predicate); } // then, print constraints for columns that are not in the output if (!predicate.isAll()) { Set<ColumnHandle> outputs = ImmutableSet.copyOf(node.getAssignments().values()); predicate.getDomains().get() .entrySet().stream() .filter(entry -> !outputs.contains(entry.getKey())) .forEach(entry -> { ColumnHandle column = entry.getKey(); print(indent + 2, "%s", column); printConstraint(indent + 3, column, predicate); }); } } return null; } @Override public Void visitValues(ValuesNode node, Integer indent) { print(indent, "- Values => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (List<Expression> row : node.getRows()) { print(indent + 2, "(" + Joiner.on(", ").join(row) + ")"); } return null; } @Override public Void visitFilter(FilterNode node, Integer indent) { print(indent, "- Filter[%s] => [%s]", node.getPredicate(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitProject(ProjectNode node, Integer indent) { print(indent, "- Project => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (Map.Entry<Symbol, Expression> entry : node.getAssignments().entrySet()) { if (entry.getValue() instanceof SymbolReference && ((SymbolReference) entry.getValue()).getName().equals(entry.getKey().getName())) { // skip identity assignments continue; } print(indent + 2, "%s := %s", entry.getKey(), entry.getValue()); } return processChildren(node, indent + 1); } @Override public Void visitUnnest(UnnestNode node, Integer indent) { print(indent, "- Unnest [replicate=%s, unnest=%s] => [%s]", formatOutputs(node.getReplicateSymbols()), formatOutputs(node.getUnnestSymbols().keySet()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitOutput(OutputNode node, Integer indent) { print(indent, "- Output[%s] => [%s]", Joiner.on(", ").join(node.getColumnNames()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getOutputSymbols().get(i); if (!name.equals(symbol.toString())) { print(indent + 2, "%s := %s", name, symbol); } } return processChildren(node, indent + 1); } @Override public Void visitTopN(TopNNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- TopN[%s by (%s)] => [%s]", node.getCount(), Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSort(SortNode node, Integer indent) { Iterable<String> keys = Iterables.transform(node.getOrderBy(), input -> input + " " + node.getOrderings().get(input)); print(indent, "- Sort[%s] => [%s]", Joiner.on(", ").join(keys), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitRemoteSource(RemoteSourceNode node, Integer indent) { print(indent, "- RemoteSource[%s] => [%s]", Joiner.on(',').join(node.getSourceFragmentIds()), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return null; } @Override public Void visitUnion(UnionNode node, Integer indent) { print(indent, "- Union => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitIntersect(IntersectNode node, Integer indent) { print(indent, "- Intersect => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExcept(ExceptNode node, Integer indent) { print(indent, "- Except => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitTableWriter(TableWriterNode node, Integer indent) { print(indent, "- TableWriter => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); for (int i = 0; i < node.getColumnNames().size(); i++) { String name = node.getColumnNames().get(i); Symbol symbol = node.getColumns().get(i); print(indent + 2, "%s := %s", name, symbol); } return processChildren(node, indent + 1); } @Override public Void visitTableFinish(TableFinishNode node, Integer indent) { print(indent, "- TableCommit[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitSample(SampleNode node, Integer indent) { print(indent, "- Sample[%s: %s] => [%s]", node.getSampleType(), node.getSampleRatio(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitExchange(ExchangeNode node, Integer indent) { if (node.getScope() == Scope.LOCAL) { print(indent, "- LocalExchange[%s%s] (%s) => %s", node.getPartitioningScheme().getPartitioning().getHandle(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", Joiner.on(", ").join(node.getPartitioningScheme().getPartitioning().getArguments()), formatOutputs(node.getOutputSymbols())); } else { print(indent, "- %sExchange[%s%s] => %s", UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, node.getScope().toString()), node.getType(), node.getPartitioningScheme().isReplicateNulls() ? " - REPLICATE NULLS" : "", formatOutputs(node.getOutputSymbols())); } printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitDelete(DeleteNode node, Integer indent) { print(indent, "- Delete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitMetadataDelete(MetadataDeleteNode node, Integer indent) { print(indent, "- MetadataDelete[%s] => [%s]", node.getTarget(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitEnforceSingleRow(EnforceSingleRowNode node, Integer indent) { print(indent, "- Scalar => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitAssignUniqueId(AssignUniqueId node, Integer indent) { print(indent, "- AssignUniqueId => [%s]", formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override public Void visitApply(ApplyNode node, Integer indent) { print(indent, "- Apply[%s] => [%s]", node.getCorrelation(), formatOutputs(node.getOutputSymbols())); printStats(indent + 2, node.getId()); return processChildren(node, indent + 1); } @Override protected Void visitPlan(PlanNode node, Integer indent) { throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName()); } private Void processChildren(PlanNode node, int indent) { for (PlanNode child : node.getSources()) { child.accept(this, indent); } return null; } private String formatOutputs(Iterable<Symbol> symbols) { return Joiner.on(", ").join(Iterables.transform(symbols, input -> input + ":" + types.get(input).getDisplayName())); } private void printConstraint(int indent, ColumnHandle column, TupleDomain<ColumnHandle> constraint) { checkArgument(!constraint.isNone()); Map<ColumnHandle, Domain> domains = constraint.getDomains().get(); if (!constraint.isAll() && domains.containsKey(column)) { print(indent, ":: %s", formatDomain(simplifyDomain(domains.get(column)))); } } private String formatDomain(Domain domain) { ImmutableList.Builder<String> parts = ImmutableList.builder(); if (domain.isNullAllowed()) { parts.add("NULL"); } Type type = domain.getType(); domain.getValues().getValuesProcessor().consume( ranges -> { for (Range range : ranges.getOrderedRanges()) { StringBuilder builder = new StringBuilder(); if (range.isSingleValue()) { String value = castToVarchar(type, range.getSingleValue(), PlanPrinter.this.metadata, session); builder.append('[').append(value).append(']'); } else { builder.append((range.getLow().getBound() == Marker.Bound.EXACTLY) ? '[' : '('); if (range.getLow().isLowerUnbounded()) { builder.append("<min>"); } else { builder.append(castToVarchar(type, range.getLow().getValue(), PlanPrinter.this.metadata, session)); } builder.append(", "); if (range.getHigh().isUpperUnbounded()) { builder.append("<max>"); } else { builder.append(castToVarchar(type, range.getHigh().getValue(), PlanPrinter.this.metadata, session)); } builder.append((range.getHigh().getBound() == Marker.Bound.EXACTLY) ? ']' : ')'); } parts.add(builder.toString()); } }, discreteValues -> discreteValues.getValues().stream() .map(value -> castToVarchar(type, value, PlanPrinter.this.metadata, session)) .sorted() // Sort so the values will be printed in predictable order .forEach(parts::add), allOrNone -> { if (allOrNone.isAll()) { parts.add("ALL VALUES"); } }); return "[" + Joiner.on(", ").join(parts.build()) + "]"; } } private static String castToVarchar(Type type, Object value, Metadata metadata, Session session) { if (value == null) { return "NULL"; } Signature coercion = metadata.getFunctionRegistry().getCoercion(type, VARCHAR); try { Slice coerced = (Slice) new FunctionInvoker(metadata.getFunctionRegistry()).invoke(coercion, session.toConnectorSession(), value); return coerced.toStringUtf8(); } catch (OperatorNotFoundException e) { return "<UNREPRESENTABLE VALUE>"; } catch (Throwable throwable) { throw Throwables.propagate(throwable); } } private static class PlanNodeStats { private final PlanNodeId planNodeId; private final Duration wallTime; private final Optional<Long> outputPositions; private final Optional<DataSize> outputDataSize; public PlanNodeStats(PlanNodeId planNodeId, Duration wallTime) { this(planNodeId, wallTime, Optional.empty(), Optional.empty()); } public PlanNodeStats(PlanNodeId planNodeId, Duration wallTime, long outputPositions, DataSize outputDataSize) { this(planNodeId, wallTime, Optional.of(outputPositions), Optional.of(outputDataSize)); } private PlanNodeStats(PlanNodeId planNodeId, Duration wallTime, Optional<Long> outputPositions, Optional<DataSize> outputDataSize) { this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.wallTime = requireNonNull(wallTime, "wallTime is null"); this.outputPositions = outputPositions; this.outputDataSize = outputDataSize; } public PlanNodeId getPlanNodeId() { return planNodeId; } public Duration getWallTime() { return wallTime; } public Optional<Long> getOutputPositions() { return outputPositions; } public Optional<DataSize> getOutputDataSize() { return outputDataSize; } public static PlanNodeStats merge(PlanNodeStats planNodeStats1, PlanNodeStats planNodeStats2) { checkArgument(planNodeStats1.getPlanNodeId().equals(planNodeStats2.getPlanNodeId()), "planNodeIds do not match. %s != %s", planNodeStats1.getPlanNodeId(), planNodeStats2.getPlanNodeId()); Optional<Long> outputPositions; if (planNodeStats1.getOutputPositions().isPresent() && planNodeStats2.getOutputPositions().isPresent()) { outputPositions = Optional.of(planNodeStats1.getOutputPositions().get() + planNodeStats2.getOutputPositions().get()); } else if (planNodeStats1.getOutputPositions().isPresent()) { outputPositions = planNodeStats1.getOutputPositions(); } else { outputPositions = planNodeStats2.getOutputPositions(); } Optional<DataSize> outputDataSize; if (planNodeStats1.getOutputDataSize().isPresent() && planNodeStats2.getOutputDataSize().isPresent()) { outputDataSize = Optional.of(succinctBytes(planNodeStats1.getOutputDataSize().get().toBytes() + planNodeStats2.getOutputDataSize().get().toBytes())); } else if (planNodeStats1.getOutputDataSize().isPresent()) { outputDataSize = planNodeStats1.getOutputDataSize(); } else { outputDataSize = planNodeStats2.getOutputDataSize(); } return new PlanNodeStats( planNodeStats1.getPlanNodeId(), new Duration(planNodeStats1.getWallTime().toMillis() + planNodeStats2.getWallTime().toMillis(), MILLISECONDS), outputPositions, outputDataSize); } } }
/* * Copyright 2006 The Apache Software Foundation or its licensors, as applicable * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Olessia Salmina */ package org.apache.harmony.test.reliability.api.text; import org.apache.harmony.test.reliability.share.Test; import java.util.*; import java.text.*; /** * Goal: * 1) find resource leaks (or intermittent failures, or cache problems), * connected with use of following java.text.BreakIterator methods: * - BreakIterator.getSentenceInstance(Locale) * - BreakIterator.setText(String) * - BreakIterator.next() * * 2) check, that sentence-breaks are set correctly. * * The test does: * * 1. Reads parameters, which are: * param[0] - number of threads to be run in parallel * param[1] - number of iterations in each thread * * 2. Obtains array of sinrings with different sentances and different punctuation marks. * 3. Parse these strings and comapre the result of parsing with f standard. * 4. Runs System.gc() */ public class getSentenceInstance_check extends Test { public int callSystemGC = 1; public int NUMBER_OF_ITERATIONS = 100; public int numThreads = 10; public int[] statuses; public static void main(String[] args) { System.exit(new getSentenceInstance_check().test(args)); } public int test(String[] params) { parseParams(params); // Start 'numThreads' threads each reading from file, inflating/deflating Thread[] t = new Thread[numThreads]; statuses = new int[t.length]; for (int i = 0; i < t.length; i++) { t[i] = new Thread(new SentenceInstanceChRunner(i, this)); t[i].start(); //log.add("Thread " + i + " started"); } // Correctly wait for all threads to finish for (int i = 0; i < t.length; ++i){ try { t[i].join(); //log.add("Thread " + i + ": joined() "); } catch (InterruptedException ie){ return fail("interruptedException while join() of thread #" + i); } } // For each thread check whether operations/checks PASSed in the thread for (int i = 0; i < statuses.length; ++i){ if (statuses[i] != Status.PASS){ return fail("thread #" + i + " returned not PASS status"); } //log.add("Status of thread " + i + ": is PASS"); } return pass("OK"); } public void parseParams(String[] params) { if (params.length >= 1) { numThreads = Integer.parseInt(params[0]); } if (params.length >= 2) { NUMBER_OF_ITERATIONS = Integer.parseInt(params[1]); } } } class SentenceInstanceChRunner implements Runnable { public int id; public getSentenceInstance_check base; public SentenceInstanceChRunner(int id, getSentenceInstance_check base) { this.id = id; this.base = base; } public void run() { int k = 0; while (k++ < base.NUMBER_OF_ITERATIONS) { Thread.yield(); String[] toParse = new String[14]; toParse[0] = "\"What\'s the matter?..\""; toParse[1] = "I was pretty shaken up!.."; toParse[2] = "\"I was pretty shaken up!..\""; toParse[3] = "\"I was pretty shaken up,\" - he sad."; toParse[4] = "\"I was pretty shaken up.\" - he sad."; toParse[5] = "\"I was pretty shaken up...\" - he sad."; toParse[6] = "\"I was pretty shaken up!..\" - he sad."; toParse[7] = "W3C\'s easy-to-use HTML validation service, based on ... an SGML parser."; toParse[8] = "Published Wednesday 21st December 2006 11:58 GMT"; toParse[9] = "I.e. is the abbreviation for a two-word Latin term, id est. " + "Translated word for word, id est means\"that is.\""; toParse[10] = "This specification defines the HyperText Markup Language (HTML)," + "version 4.0, the publishing language..."; toParse[11] = "Published 2005-08-10"; toParse[12] = "Number pi is approximately equal to 3.14."; toParse[13] = "Number pi is approximately equal to 3.14"; byte[][] CorrectMarksPosition = new byte[toParse.length][]; for(int i = 0; i < toParse.length; i++){ CorrectMarksPosition[i] = new byte[toParse[i].length()+1]; } for(int i = 0; i < CorrectMarksPosition.length; i++){ for(int j = 0; j < CorrectMarksPosition[i].length; j++) CorrectMarksPosition[i][j] = 0; } CorrectMarksPosition[0][0] = 1; CorrectMarksPosition[0][22] = 1; CorrectMarksPosition[1][0] = 1; CorrectMarksPosition[1][25] = 1; CorrectMarksPosition[2][0] = 1; CorrectMarksPosition[2][27] = 1; CorrectMarksPosition[3][0] = 1; CorrectMarksPosition[3][35] = 1; CorrectMarksPosition[4][0] = 1; CorrectMarksPosition[4][35] = 1; CorrectMarksPosition[5][0] = 1; CorrectMarksPosition[5][37] = 1; CorrectMarksPosition[6][0] = 1; CorrectMarksPosition[6][28] = 1; CorrectMarksPosition[6][37] = 1; CorrectMarksPosition[7][0] = 1; CorrectMarksPosition[7][71] = 1; CorrectMarksPosition[8][0] = 1; CorrectMarksPosition[8][48] = 1; CorrectMarksPosition[9][0] = 1; CorrectMarksPosition[9][60] = 1; CorrectMarksPosition[9][108] = 1; CorrectMarksPosition[10][0] = 1; CorrectMarksPosition[10][103] = 1; CorrectMarksPosition[11][0] = 1; CorrectMarksPosition[11][20] = 1; CorrectMarksPosition[12][0] = 1; CorrectMarksPosition[12][41] = 1; CorrectMarksPosition[13][0] = 1; CorrectMarksPosition[13][40] = 1; for (int i = 0; i < toParse.length; i++) { SIterator_Marker_Ch SI = new SIterator_Marker_Ch(toParse[i]); Thread.yield(); int boundary = SI.sentenceIterator.first(); SI.markArr[boundary] = 1; while (boundary != BreakIterator.DONE) { SI.markers.setCharAt(boundary, '^'); boundary = SI.sentenceIterator.next(); if (boundary != BreakIterator.DONE){ SI.markArr[boundary] = 1; } Thread.yield(); } //while //base.log.add(" " + SI.ScanStr + "\r\n" // + "-----------------------" + SI.markers); Thread.yield(); if(! Arrays.equals(SI.markArr,CorrectMarksPosition[i])){ base.statuses[id] = Status.FAIL; base.log.add("Thread "+ id + ":" + SI.ScanStr + "\r\n" + "------------------------------" + SI.markers + "\r\n" + "This sentance break was made incorrect"); } }//for if (base.callSystemGC != 0) { System.gc(); } }//while if(base.statuses[id] != Status.FAIL){ base.statuses[id] = Status.PASS; } } } //class Status { // public static final int FAIL = -10; // public static final int PASS = 10; //} class SIterator_Marker_Ch { public BreakIterator sentenceIterator; public StringBuffer markers; public byte[] markArr; public String ScanStr; public SIterator_Marker_Ch(String toScan) { Locale currentLocale = new Locale("en", "US"); sentenceIterator = BreakIterator.getSentenceInstance(currentLocale); markers = new StringBuffer(); sentenceIterator.setText(toScan); markers.setLength(toScan.length() + 1); for (int j = 0; j < markers.length(); j++) { markers.setCharAt(j, ' '); } ScanStr = new String(toScan); markArr = new byte[ScanStr.length()+1]; for (int j = 0; j < markArr.length; j++){ markArr[j] = 0; } } }
package gftgu; /* This work is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike License. To view a copy of this license, visit http://creativecommons.org/licenses/by-nc-sa/1.0/ or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA. */ import org.trianacode.taskgraph.Task; import org.trianacode.taskgraph.Unit; import triana.types.Curve; /** * Computes a planetary atmosphere * * @author B F Schutz * @version $Revision: 1.5 $ * @created 19 May 2003 * @date $Date: 2003/11/19 20:22:19 $ modified by $Author: schutz $ * @todo */ public class Atmosphere extends Unit { // parameter data type definitions /* planetName is a String given by the user, which will be used as a name for the output graph. */ private String planetName; /* gAccel is the value of the acceleration of gravity at the surface of the planet (bottom of the atmosphere), in SI units (m/s^2). Given by the user in the user interface window. */ private double gAccel; /* mu is the mean molecular weight of the atmospheric gas, which is defined as the average mass, in units of the proton mass, of all the atoms, molecules, ions, electrons, etc that move freely in the atmosphere. It is given by the user in the user interface window. */ private double mu; /* p0 is the pressure of the atmosphere at its base (the surface of the planet), in pascals. Given by the user in the user interface window. */ private double p0; /* temperatureArray is a String that contains the temperature as a function of altitude. This is given by the user in the user interface window, in the form (h0,T0) (h1,T1) (h2,T2) ..., where h is in meters and T in kelvin. The program converts this into two double arrays h and T. */ private String temperatureArray; /* outputType is a String which governs what kind of data will be output. All data is output as a Curve with x-values being the altitude and y-values being one of three choices: pressure, density, or temperature. The user chooses one of these three in the user interface window. */ private String outputType; /* Define double arrays h, T that hold the numerical values of the altitude at which the temperature is given (h[]) and the values of the temperature at those altitudes (T[]). These arrays are re-defined and given values each time the parameter temperatureArray is set. Define the int measurements to hold the length of these arrays. */ private double[] h, T; private int measurements; /* Two constants: k is Boltzmann's constant and mp is the mass of the proton, both in SI units. */ private double k = 1.38e-23; private double mp = 1.67e-27; /* Three constants used for the computation of the temperature at the top of the atmosphere, above the highest altitude where the temperature has been measured. - reachedTop is a boolean that is set to false each time process() is called, and is then used by the method getTemp(). See the comments in getTemp() below for details. - power and beta are the constants in the formula for the temperature in the upper region, T = beta * p^(power), where p is the pressure at that height and where "^" indicates raising to a power. We fix the value of power here, but the value of beta must be computed during the calculation. See the comments in getTemp() below for details. */ private boolean reachedTop = false; private double power = 0.5; private double beta; /* * Called whenever there is data for the unit to process */ public void process() throws Exception { /* Define variables needed for the calculation: - q is a combination of constants in the ideal gas law, used often. - rho0 is the density at the bottom of the atmosphere. - scale is the scale-height of the atmosphere, roughly the distance over which the pressure will fall by a factor of 2. - dh is the size of the step in altitude that the program will make. - arrays alt (height), p (pressure), rho (density), and Temp (temperature) hold the values of the associated physical quantities at the successive altitude steps. The arrays are initially given 1000 elements. The choice of altitude step dh is designed to ensure that the top of the atmosphere (where p = 0) is reached in fewer than 1000 steps. Then give the values of the first elements of the arrays. - lastStep is an int that will hold the value of the array index associated with the top of the atmosphere. Set it to zero and use it as a test of whether the top has been reached (see below). - j is a loop counter. */ double q = mp * mu / k; double rho0 = p0 * q / T[0]; //use ideal gas law to get density double scale = p0 / gAccel / rho0; double dh = scale / 200.; double[] alt = new double[1000]; double[] p = new double[1000]; double[] rho = new double[1000]; double[] Temp = new double[1000]; alt[0] = 0; p[0] = p0; Temp[0] = T[0]; rho[0] = rho0; int lastStep = 0; int j; /* Set reachedTop to false at the beginning of the computation. */ reachedTop = false; /* Do the calculation as long as the top has not been reached. */ while (lastStep == 0) { /* Do calculation step by step, using the equation of hydrostatic equilibrium (in the second line of the loop). */ for (j = 1; j < 1000; j++) { alt[j] = alt[j - 1] + dh; p[j] = p[j - 1] - gAccel * rho[j - 1] * dh; if (p[j] < 0) { lastStep = j; //stop when the pressure goes negative break; } Temp[j] = getTemp(alt[j], p[j]); rho[j] = p[j] * q / Temp[j]; //ideal gas law } /* If we reach this point and lastStep is still zero, then we have used 1000 steps and not yet reached the top. We must start the loop again with a larger step dh so that we can reach the top in 1000 steps. The next line of the code resets the value of dh, and then when we reach the end-bracket of the "while"-loop the test in the loop will evaluate to true and the "for"-loop will be done again with this step-size. If we reach this point and lastStep is no longer zero, then we have finished the calculation. The next step (changing dh) will be executed but we will leave the "while"-loop and so the new value of dh will not be used. */ dh *= 2.; } /* Now prepare output arrays depending on what output data type has been selected by the user. The arrays are only long enough to contain the number of points to the top of the atmosphere. Since the value of the variable lastStep is the step where the pressure first went negative, if we create arrays of length lastStep then this value will be excluded, since such arrays start at index 0 and finish at index lastStep-1. We attach to each output Curve a title (which will appear on the graph legend), and we attach to the first output Curve the axis labels. */ double[] finalH = new double[lastStep]; Curve outData = null; String unitLabel = ""; if (outputType.equals("Pressure")) { double[] finalP = new double[lastStep]; for (j = 0; j < lastStep; j++) { finalH[j] = alt[j]; finalP[j] = p[j]; } outData = new Curve(finalH, finalP); unitLabel = " (Pa)"; } else if (outputType.equals("Density")) { double[] finalRho = new double[lastStep]; for (j = 0; j < lastStep; j++) { finalH[j] = alt[j]; finalRho[j] = rho[j]; } outData = new Curve(finalH, finalRho); unitLabel = " (kg/m^3)"; } else if (outputType.equals("Temperature")) { double[] finalT = new double[lastStep]; for (j = 0; j < lastStep; j++) { finalH[j] = alt[j]; finalT[j] = Temp[j]; } outData = new Curve(finalH, finalT); unitLabel = " (K)"; } outData.setTitle(outputType + " for the atmosphere of " + planetName); outData.setIndependentLabels(0, "altitude (m)"); outData.setDependentLabels(0, outputType + unitLabel); output(outData); } /* Method to compute the temperature at any height from the given measurements at specific altitudes. There are three cases: (1) The height is below the first measured altitude. In this case we simply use the temperature at the first measured altitude, i.e. we take the temperature to be constant from the ground up to the first measured value. (2) The height is between two measured altitudes. This is the normal case over most of the atmosphere. We find the temperature between two measurements by linear interpolation, which means that we draw a straight line on a graph of temperature versus altitude between the two measurements, and we use the temperature on the line at the actual required height. (3) The height is above the highest measured altitude. Here we cannot make the simple constant-temperature assumption of case (1), since an isothermal atmosphere goes on forever. Instead, we assume a temperature law of the form T = beta*pressure^(power), where beta and power are constants. We set the value of power = 0.5 in the initialization part of the code at the very beginning, but we do not know ahead of time what value of beta to use. This is determined by insisting that the temperature law join continuously onto the straight line between the last two measured points, and to do that we need to know the pressure at the height of the last measurement. We only know this as we are moving through the calculation, so the value of beta can only be computed at the step where we first reach the highest measured point. To keep track of when this happens, we use the boolean variable reachedTop, which is false at first. If the height is greater than or equal to that of the highest measurement, we test the value of reachedTop. If it is false, as it will be the first time we reach this altitude, then we compute beta from the local value of the pressure, and we then set reachedTop equal to true. This ensures that the next and subsequent times we reach past the altitude of the highest measurement, we will not re-compute beta. In both cases we then compute the temperature from our pressure law. */ private double getTemp(double height, double pressure) { if (height <= h[0]) return T[0]; // return if height low if (height >= h[measurements - 1]) { // do if height high if (!reachedTop) { beta = T[measurements - 1] / Math.pow(pressure, power); reachedTop = true; } return beta * Math.pow(pressure, power); //return when high } int j = 1; // only reach this step if between measured heights while (height > h[j]) j++; return T[j - 1] + (T[j] - T[j - 1]) / (h[j] - h[j - 1]) * (height - h[j - 1]); } /** * Called when the unit is created. Initialises the unit's properties and * parameters. */ public void init() { super.init(); // Initialise node properties setDefaultInputNodes(0); setMinimumInputNodes(0); setMaximumInputNodes(0); setDefaultOutputNodes(1); setMinimumOutputNodes(1); setMaximumOutputNodes(Integer.MAX_VALUE); // Initialise parameter update policy setParameterUpdatePolicy(Task.IMMEDIATE_UPDATE); // Initialise pop-up description and help file location setPopUpDescription("Computes a planetary atmosphere"); setHelpFileLocation("Atmosphere.html"); // Define initial value and type of parameters defineParameter("planetName", "Earth", USER_ACCESSIBLE); defineParameter("gAccel", "9.8", USER_ACCESSIBLE); defineParameter("mu", "29.0", USER_ACCESSIBLE); defineParameter("p0", "1.01e5", USER_ACCESSIBLE); defineParameter("temperatureArray", "(0,288) (2E3,275) (4E3,262) (6E3,249) (8E3,236) (1E4,223) (2E4,217) (4E4,250) (6E4,256) (8E4,181) (1E5,210) (1.4E5,714) (1.8E5,1156)", USER_ACCESSIBLE); // createTemperatures(); defineParameter("outputType", "Pressure", USER_ACCESSIBLE); // Initialise GUI builder interface String guilines = ""; guilines += "Give the name of the planet $title planetName TextField Earth\n"; guilines += "Give the surface acceleration of gravity (meters per second per second) $title gAccel TextField 9.8\n"; guilines += "Give the mean molecular weight of the atmospheric gas $title mu TextField 29.0\n"; guilines += "Give the surface pressure in pascals $title p0 TextField 1.01e5\n"; guilines += "Give the temperature function in the form (h0,T0) (h1,T1), ... $title temperatureArray TextField (0,288) (2E3,275) (4E3,262) (6E3,249) (8E3,236) (1E4,223) (2E4,217) (4E4,250) (6E4,256) (8E4,181) (1E5,210) (1.4E5,714) (1.8E5,1156)\n"; guilines += "Select the type of output data $title outputType Choice [Pressure] [Density] [Temperature]\n"; setGUIBuilderV2Info(guilines); } /** * Called when the unit is reset. */ public void reset() { // Set unit parameters to the values specified by the task definition planetName = (String) getParameter("planetName"); gAccel = new Double((String) getParameter("gAccel")).doubleValue(); mu = new Double((String) getParameter("mu")).doubleValue(); p0 = new Double((String) getParameter("p0")).doubleValue(); temperatureArray = (String) getParameter("temperatureArray"); outputType = (String) getParameter("outputType"); } /** * Called when the unit is disposed of. */ public void dispose() { // Insert code to clean-up Atmosphere (e.g. close open files) } /** * Called a parameters is updated (e.g. by the GUI) */ public void parameterUpdate(String paramname, Object value) { // Code to update local variables if (paramname.equals("planetName")) planetName = (String) value; if (paramname.equals("gAccel")) gAccel = new Double((String) value).doubleValue(); if (paramname.equals("mu")) mu = new Double((String) value).doubleValue(); if (paramname.equals("p0")) p0 = new Double((String) value).doubleValue(); if (paramname.equals("temperatureArray")) { temperatureArray = (String) value; createTemperatures(); } if (paramname.equals("outputType")) outputType = (String) value; } /* In the following code we extract from the String temperatureArray the values of the altitude and temperature and place them in the arrays h[] and T[]. The job is simplified by using the Java String utility method split(), which divides a String into several Strings by splitting it at the given argument, and then puts the new Strings into a String array. Each of these pieces contains both h and T, which the remaining code separates from one another and converts their String representations into genuine double values using the Double utility method parseDouble(). The method split() uses a device called a "regular expression" for its argument, which is a powerful pattern-forming language. To explain how it works is beyond our scope here. */ private void createTemperatures() { temperatureArray = "(0,288) (2E3,275) (4E3,262) (6E3,249) (8E3,236) (1E4,223) (2E4,217) (4E4,250) (6E4,256) (8E4,181) (1E5,210) (1.4E5,714) (1.8E5,1156)"; String[] pairs = temperatureArray.split("\\).*?\\("); //divide at each ") (" measurements = pairs.length; pairs[0] = pairs[0].substring(1); // remove "(" from first substring int ln = pairs[measurements - 1].length(); pairs[measurements - 1] = pairs[measurements - 1].substring(0, ln - 1); //remove ")" from last substring h = new double[measurements]; T = new double[measurements]; String tmp; int commaLocation; for (int j = 0; j < measurements; j++) { commaLocation = pairs[j].indexOf(','); tmp = pairs[j].substring(0, commaLocation); h[j] = Double.parseDouble(tmp); tmp = pairs[j].substring(commaLocation + 1); T[j] = Double.parseDouble(tmp); } } /** * @return an array of the input types for Atmosphere */ public String[] getInputTypes() { return new String[]{}; } /** * @return an array of the output types for Atmosphere */ public String[] getOutputTypes() { return new String[]{"triana.types.Curve"}; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.lint.checks; import static com.android.SdkConstants.ANDROID_URI; import static com.android.SdkConstants.ATTR_CLASS; import static com.android.SdkConstants.ATTR_ID; import static com.android.SdkConstants.DOT_XML; import static com.android.SdkConstants.ID_PREFIX; import static com.android.SdkConstants.NEW_ID_PREFIX; import static com.android.SdkConstants.VIEW_TAG; import com.android.annotations.NonNull; import com.android.annotations.Nullable; import com.android.ide.common.res2.AbstractResourceRepository; import com.android.ide.common.res2.ResourceFile; import com.android.ide.common.res2.ResourceItem; import com.android.resources.ResourceFolderType; import com.android.resources.ResourceType; import com.android.tools.lint.client.api.LintClient; import com.android.tools.lint.detector.api.Category; import com.android.tools.lint.detector.api.Context; import com.android.tools.lint.detector.api.Detector; import com.android.tools.lint.detector.api.Implementation; import com.android.tools.lint.detector.api.Issue; import com.android.tools.lint.detector.api.JavaContext; import com.android.tools.lint.detector.api.LintUtils; import com.android.tools.lint.detector.api.ResourceXmlDetector; import com.android.tools.lint.detector.api.Scope; import com.android.tools.lint.detector.api.Severity; import com.android.tools.lint.detector.api.Speed; import com.android.tools.lint.detector.api.XmlContext; import com.android.utils.XmlUtils; import com.google.common.base.Joiner; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import lombok.ast.AstVisitor; import lombok.ast.Cast; import lombok.ast.Expression; import lombok.ast.MethodInvocation; import lombok.ast.Select; import lombok.ast.StrictListAccessor; /** Detector for finding inconsistent usage of views and casts * <p> * TODO: Check findFragmentById * <pre> * ((ItemListFragment) getSupportFragmentManager() * .findFragmentById(R.id.item_list)) * .setActivateOnItemClick(true); * </pre> * Here we should check the {@code <fragment>} tag pointed to by the id, and * check its name or class attributes to make sure the cast is compatible with * the named fragment class! */ public class ViewTypeDetector extends ResourceXmlDetector implements Detector.JavaScanner { /** Mismatched view types */ @SuppressWarnings("unchecked") public static final Issue ISSUE = Issue.create( "WrongViewCast", //$NON-NLS-1$ "Mismatched view type", "Keeps track of the view types associated with ids and if it finds a usage of " + "the id in the Java code it ensures that it is treated as the same type.", Category.CORRECTNESS, 9, Severity.FATAL, new Implementation( ViewTypeDetector.class, EnumSet.of(Scope.ALL_RESOURCE_FILES, Scope.ALL_JAVA_FILES), Scope.JAVA_FILE_SCOPE)); /** Flag used to do no work if we're running in incremental mode in a .java file without * a client supporting project resources */ private Boolean mIgnore = null; private final Map<String, Object> mIdToViewTag = new HashMap<String, Object>(50); @NonNull @Override public Speed getSpeed() { return Speed.SLOW; } @Override public boolean appliesTo(@NonNull ResourceFolderType folderType) { return folderType == ResourceFolderType.LAYOUT; } @Override public Collection<String> getApplicableAttributes() { return Collections.singletonList(ATTR_ID); } @Override public void visitAttribute(@NonNull XmlContext context, @NonNull Attr attribute) { String view = attribute.getOwnerElement().getTagName(); String value = attribute.getValue(); String id = null; if (value.startsWith(ID_PREFIX)) { id = value.substring(ID_PREFIX.length()); } else if (value.startsWith(NEW_ID_PREFIX)) { id = value.substring(NEW_ID_PREFIX.length()); } // else: could be @android id if (id != null) { if (view.equals(VIEW_TAG)) { view = attribute.getOwnerElement().getAttribute(ATTR_CLASS); } Object existing = mIdToViewTag.get(id); if (existing == null) { mIdToViewTag.put(id, view); } else if (existing instanceof String) { String existingString = (String) existing; if (!existingString.equals(view)) { // Convert to list List<String> list = new ArrayList<String>(2); list.add((String) existing); list.add(view); mIdToViewTag.put(id, list); } } else if (existing instanceof List<?>) { @SuppressWarnings("unchecked") List<String> list = (List<String>) existing; if (!list.contains(view)) { list.add(view); } } } } // ---- Implements Detector.JavaScanner ---- @Override public List<String> getApplicableMethodNames() { return Collections.singletonList("findViewById"); //$NON-NLS-1$ } @Override public void visitMethod(@NonNull JavaContext context, @Nullable AstVisitor visitor, @NonNull MethodInvocation node) { LintClient client = context.getClient(); if (mIgnore == Boolean.TRUE) { return; } else if (mIgnore == null) { mIgnore = !context.getScope().contains(Scope.ALL_RESOURCE_FILES) && !client.supportsProjectResources(); if (mIgnore) { return; } } assert node.astName().astValue().equals("findViewById"); if (node.getParent() instanceof Cast) { Cast cast = (Cast) node.getParent(); String castType = cast.astTypeReference().getTypeName(); StrictListAccessor<Expression, MethodInvocation> args = node.astArguments(); if (args.size() == 1) { Expression first = args.first(); // TODO: Do flow analysis as in the StringFormatDetector in order // to handle variable references too if (first instanceof Select) { String resource = first.toString(); if (resource.startsWith("R.id.")) { //$NON-NLS-1$ String id = ((Select) first).astIdentifier().astValue(); if (client.supportsProjectResources()) { AbstractResourceRepository resources = client .getProjectResources(context.getMainProject(), true); if (resources == null) { return; } List<ResourceItem> items = resources.getResourceItem(ResourceType.ID, id); if (items != null && !items.isEmpty()) { Set<String> compatible = Sets.newHashSet(); for (ResourceItem item : items) { Collection<String> tags = getViewTags(context, item); if (tags != null) { compatible.addAll(tags); } } if (!compatible.isEmpty()) { ArrayList<String> layoutTypes = Lists.newArrayList(compatible); checkCompatible(context, castType, null, layoutTypes, cast); } } } else { Object types = mIdToViewTag.get(id); if (types instanceof String) { String layoutType = (String) types; checkCompatible(context, castType, layoutType, null, cast); } else if (types instanceof List<?>) { @SuppressWarnings("unchecked") List<String> layoutTypes = (List<String>) types; checkCompatible(context, castType, null, layoutTypes, cast); } } } } } } } @Nullable protected Collection<String> getViewTags( @NonNull Context context, @NonNull ResourceItem item) { // Check view tag in this file. Can I do it cheaply? Try with // an XML pull parser. Or DOM if we have multiple resources looked // up? ResourceFile source = item.getSource(); if (source != null) { File file = source.getFile(); Multimap<String,String> map = getIdToTagsIn(context, file); if (map != null) { return map.get(item.getName()); } } return null; } private Map<File, Multimap<String, String>> mFileIdMap; @Nullable private Multimap<String, String> getIdToTagsIn(@NonNull Context context, @NonNull File file) { if (!file.getPath().endsWith(DOT_XML)) { return null; } if (mFileIdMap == null) { mFileIdMap = Maps.newHashMap(); } Multimap<String, String> map = mFileIdMap.get(file); if (map == null) { map = ArrayListMultimap.create(); mFileIdMap.put(file, map); String xml = context.getClient().readFile(file); // TODO: Use pull parser instead for better performance! Document document = XmlUtils.parseDocumentSilently(xml, true); if (document != null && document.getDocumentElement() != null) { addViewTags(map, document.getDocumentElement()); } } return map; } private static void addViewTags(Multimap<String, String> map, Element element) { String id = element.getAttributeNS(ANDROID_URI, ATTR_ID); if (id != null && !id.isEmpty()) { id = LintUtils.stripIdPrefix(id); if (!map.containsEntry(id, element.getTagName())) { map.put(id, element.getTagName()); } } NodeList children = element.getChildNodes(); for (int i = 0, n = children.getLength(); i < n; i++) { Node child = children.item(i); if (child.getNodeType() == Node.ELEMENT_NODE) { addViewTags(map, (Element) child); } } } /** Check if the view and cast type are compatible */ private static void checkCompatible(JavaContext context, String castType, String layoutType, List<String> layoutTypes, Cast node) { assert layoutType == null || layoutTypes == null; // Should only specify one or the other boolean compatible = true; if (layoutType != null) { if (!layoutType.equals(castType) && !context.getSdkInfo().isSubViewOf(castType, layoutType)) { compatible = false; } } else { compatible = false; assert layoutTypes != null; for (String type : layoutTypes) { if (type.equals(castType) || context.getSdkInfo().isSubViewOf(castType, type)) { compatible = true; break; } } } if (!compatible) { if (layoutType == null) { layoutType = Joiner.on("|").join(layoutTypes); } String message = String.format( "Unexpected cast to `%1$s`: layout tag was `%2$s`", castType, layoutType); context.report(ISSUE, node, context.getLocation(node), message); } } }
/** * Copyright (c) 2004-2011 Wang Jinbao(Julian Wong), http://www.ralasafe.com * Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php */ /* * This class was automatically generated with * <a href="http://www.castor.org">Castor 1.2</a>, using an XML * Schema. * $Id: QueryTypeSequenceDescriptor.java,v 1.1 2010/07/09 08:17:21 back Exp $ */ package org.ralasafe.db.sql.xml.descriptors; //---------------------------------/ //- Imported classes and packages -/ //---------------------------------/ import org.ralasafe.db.sql.xml.QueryTypeSequence; /** * Class QueryTypeSequenceDescriptor. * * @version $Revision: 1.1 $ $Date: 2010/07/09 08:17:21 $ */ public class QueryTypeSequenceDescriptor extends org.exolab.castor.xml.util.XMLClassDescriptorImpl { //--------------------------/ //- Class/Member Variables -/ //--------------------------/ /** * Field _elementDefinition. */ private boolean _elementDefinition; /** * Field _nsPrefix. */ private java.lang.String _nsPrefix; /** * Field _nsURI. */ private java.lang.String _nsURI; /** * Field _xmlName. */ private java.lang.String _xmlName; /** * Field _identity. */ private org.exolab.castor.xml.XMLFieldDescriptor _identity; //----------------/ //- Constructors -/ //----------------/ public QueryTypeSequenceDescriptor() { super(); _elementDefinition = false; //-- set grouping compositor setCompositorAsSequence(); org.exolab.castor.xml.util.XMLFieldDescriptorImpl desc = null; org.exolab.castor.mapping.FieldHandler handler = null; org.exolab.castor.xml.FieldValidator fieldValidator = null; //-- initialize attribute descriptors //-- initialize element descriptors //-- _select desc = new org.exolab.castor.xml.util.XMLFieldDescriptorImpl(org.ralasafe.db.sql.xml.Select.class, "_select", "select", org.exolab.castor.xml.NodeType.Element); handler = new org.exolab.castor.xml.XMLFieldHandler() { public java.lang.Object getValue( java.lang.Object object ) throws IllegalStateException { QueryTypeSequence target = (QueryTypeSequence) object; return target.getSelect(); } public void setValue( java.lang.Object object, java.lang.Object value) throws IllegalStateException, IllegalArgumentException { try { QueryTypeSequence target = (QueryTypeSequence) object; target.setSelect( (org.ralasafe.db.sql.xml.Select) value); } catch (java.lang.Exception ex) { throw new IllegalStateException(ex.toString()); } } public java.lang.Object newInstance(java.lang.Object parent) { return new org.ralasafe.db.sql.xml.Select(); } }; desc.setSchemaType("org.ralasafe.db.sql.xml.Select"); desc.setHandler(handler); desc.setMultivalued(false); addFieldDescriptor(desc); addSequenceElement(desc); //-- validation code for: _select fieldValidator = new org.exolab.castor.xml.FieldValidator(); { //-- local scope } desc.setValidator(fieldValidator); //-- _from desc = new org.exolab.castor.xml.util.XMLFieldDescriptorImpl(org.ralasafe.db.sql.xml.From.class, "_from", "from", org.exolab.castor.xml.NodeType.Element); handler = new org.exolab.castor.xml.XMLFieldHandler() { public java.lang.Object getValue( java.lang.Object object ) throws IllegalStateException { QueryTypeSequence target = (QueryTypeSequence) object; return target.getFrom(); } public void setValue( java.lang.Object object, java.lang.Object value) throws IllegalStateException, IllegalArgumentException { try { QueryTypeSequence target = (QueryTypeSequence) object; target.setFrom( (org.ralasafe.db.sql.xml.From) value); } catch (java.lang.Exception ex) { throw new IllegalStateException(ex.toString()); } } public java.lang.Object newInstance(java.lang.Object parent) { return new org.ralasafe.db.sql.xml.From(); } }; desc.setSchemaType("org.ralasafe.db.sql.xml.From"); desc.setHandler(handler); desc.setMultivalued(false); addFieldDescriptor(desc); addSequenceElement(desc); //-- validation code for: _from fieldValidator = new org.exolab.castor.xml.FieldValidator(); { //-- local scope } desc.setValidator(fieldValidator); //-- _where desc = new org.exolab.castor.xml.util.XMLFieldDescriptorImpl(org.ralasafe.db.sql.xml.Where.class, "_where", "where", org.exolab.castor.xml.NodeType.Element); handler = new org.exolab.castor.xml.XMLFieldHandler() { public java.lang.Object getValue( java.lang.Object object ) throws IllegalStateException { QueryTypeSequence target = (QueryTypeSequence) object; return target.getWhere(); } public void setValue( java.lang.Object object, java.lang.Object value) throws IllegalStateException, IllegalArgumentException { try { QueryTypeSequence target = (QueryTypeSequence) object; target.setWhere( (org.ralasafe.db.sql.xml.Where) value); } catch (java.lang.Exception ex) { throw new IllegalStateException(ex.toString()); } } public java.lang.Object newInstance(java.lang.Object parent) { return new org.ralasafe.db.sql.xml.Where(); } }; desc.setSchemaType("org.ralasafe.db.sql.xml.Where"); desc.setHandler(handler); desc.setMultivalued(false); addFieldDescriptor(desc); addSequenceElement(desc); //-- validation code for: _where fieldValidator = new org.exolab.castor.xml.FieldValidator(); { //-- local scope } desc.setValidator(fieldValidator); //-- _groupBy desc = new org.exolab.castor.xml.util.XMLFieldDescriptorImpl(org.ralasafe.db.sql.xml.GroupBy.class, "_groupBy", "groupBy", org.exolab.castor.xml.NodeType.Element); handler = new org.exolab.castor.xml.XMLFieldHandler() { public java.lang.Object getValue( java.lang.Object object ) throws IllegalStateException { QueryTypeSequence target = (QueryTypeSequence) object; return target.getGroupBy(); } public void setValue( java.lang.Object object, java.lang.Object value) throws IllegalStateException, IllegalArgumentException { try { QueryTypeSequence target = (QueryTypeSequence) object; target.setGroupBy( (org.ralasafe.db.sql.xml.GroupBy) value); } catch (java.lang.Exception ex) { throw new IllegalStateException(ex.toString()); } } public java.lang.Object newInstance(java.lang.Object parent) { return new org.ralasafe.db.sql.xml.GroupBy(); } }; desc.setSchemaType("org.ralasafe.db.sql.xml.GroupBy"); desc.setHandler(handler); desc.setMultivalued(false); addFieldDescriptor(desc); addSequenceElement(desc); //-- validation code for: _groupBy fieldValidator = new org.exolab.castor.xml.FieldValidator(); { //-- local scope } desc.setValidator(fieldValidator); //-- _orderBy desc = new org.exolab.castor.xml.util.XMLFieldDescriptorImpl(org.ralasafe.db.sql.xml.OrderBy.class, "_orderBy", "orderBy", org.exolab.castor.xml.NodeType.Element); handler = new org.exolab.castor.xml.XMLFieldHandler() { public java.lang.Object getValue( java.lang.Object object ) throws IllegalStateException { QueryTypeSequence target = (QueryTypeSequence) object; return target.getOrderBy(); } public void setValue( java.lang.Object object, java.lang.Object value) throws IllegalStateException, IllegalArgumentException { try { QueryTypeSequence target = (QueryTypeSequence) object; target.setOrderBy( (org.ralasafe.db.sql.xml.OrderBy) value); } catch (java.lang.Exception ex) { throw new IllegalStateException(ex.toString()); } } public java.lang.Object newInstance(java.lang.Object parent) { return new org.ralasafe.db.sql.xml.OrderBy(); } }; desc.setSchemaType("org.ralasafe.db.sql.xml.OrderBy"); desc.setHandler(handler); desc.setMultivalued(false); addFieldDescriptor(desc); addSequenceElement(desc); //-- validation code for: _orderBy fieldValidator = new org.exolab.castor.xml.FieldValidator(); { //-- local scope } desc.setValidator(fieldValidator); } //-----------/ //- Methods -/ //-----------/ /** * Method getAccessMode. * * @return the access mode specified for this class. */ public org.exolab.castor.mapping.AccessMode getAccessMode( ) { return null; } /** * Method getIdentity. * * @return the identity field, null if this class has no * identity. */ public org.exolab.castor.mapping.FieldDescriptor getIdentity( ) { return _identity; } /** * Method getJavaClass. * * @return the Java class represented by this descriptor. */ public java.lang.Class getJavaClass( ) { return org.ralasafe.db.sql.xml.QueryTypeSequence.class; } /** * Method getNameSpacePrefix. * * @return the namespace prefix to use when marshaling as XML. */ public java.lang.String getNameSpacePrefix( ) { return _nsPrefix; } /** * Method getNameSpaceURI. * * @return the namespace URI used when marshaling and * unmarshaling as XML. */ public java.lang.String getNameSpaceURI( ) { return _nsURI; } /** * Method getValidator. * * @return a specific validator for the class described by this * ClassDescriptor. */ public org.exolab.castor.xml.TypeValidator getValidator( ) { return this; } /** * Method getXMLName. * * @return the XML Name for the Class being described. */ public java.lang.String getXMLName( ) { return _xmlName; } /** * Method isElementDefinition. * * @return true if XML schema definition of this Class is that * of a global * element or element with anonymous type definition. */ public boolean isElementDefinition( ) { return _elementDefinition; } }
package client.controllers; import common.data.Budget; import common.data.Payment; import common.data.Settlement; import common.data.User; import client.BudgetExporter; import client.view.Alerts; import client.windows.AddParticipantsWindow; import client.windows.AddPaymentWindow; import client.windows.ParticipantDetailsWindow; import client.windows.SettleWindow; import client.windows.SettlementsHistoryWindow; import client.windows.UpdatePaymentWindow; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.collections.transformation.FilteredList; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.geometry.Pos; import javafx.scene.control.Button; import javafx.scene.control.ButtonType; import javafx.scene.control.CheckBox; import javafx.scene.control.Label; import javafx.scene.control.TableCell; import javafx.scene.control.TableColumn; import javafx.scene.control.TableColumn.CellDataFeatures; import javafx.scene.control.TableRow; import javafx.scene.control.TableView; import javafx.scene.control.ToggleButton; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.input.MouseEvent; import java.math.BigDecimal; import java.net.URL; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.ResourceBundle; import java.util.stream.Collectors; public class BudgetController extends BasicController implements Initializable, SelfUpdating { @FXML private Label labelBudgetName, labelBudgetDescription; @FXML private Label labelSpentMoney, labelCurrentUserBalance; @FXML private Button buttonBudgetClose, buttonBudgetDelete, buttonBudgetExport; @FXML private Button buttonAddPayment, buttonAddParticipant, buttonSettle, buttonHistory, buttonBankTransfers; @FXML private ToggleButton buttonPaymentsPaid, buttonPaymentsOwed, buttonPaymentsOther; @FXML private TableView<Payment> tableUnsettledPayments; @FXML private TableColumn<Payment, String> columnUnsettledPayer, columnUnsettledDescription; @FXML private TableColumn<Payment, Integer> columnUnsettledAmount; @FXML private TableColumn<Payment, Boolean> columnConfirm; @FXML private TableView<User> tableParticipants; @FXML private TableColumn<User, String> columnUserName, columnUserMail; @FXML private TableColumn<User, BigDecimal> columnUserBalance; private final ObservableList<User> participantsList = FXCollections.observableArrayList(); private final ObservableList<Payment> unsettledPayments = FXCollections.observableArrayList(); private Budget budget; private double spentMoneySum = 0; public void setBudget(Budget budget) { this.budget = budget; labelBudgetName.setText(budget.getName()); labelBudgetDescription.setText(budget.getDescription()); if (!isCurrentUserBudgetOwner()) disableOnlyBudgetOwnerButtons(); } private void disableOnlyBudgetOwnerButtons() { buttonAddParticipant.setDisable(true); buttonSettle.setDisable(true); buttonBudgetDelete.setDisable(true); } @Override public void initialize(URL location, ResourceBundle resources) { initButtons(); initTables(); } private void initButtons() { buttonBudgetClose.setOnAction(event -> currentStage.close()); buttonBudgetExport.setOnAction(event -> exportBudget()); buttonBudgetDelete.setOnAction(event -> deleteBudget()); buttonAddParticipant.setOnAction(event -> displayAddParticipantsWindow()); buttonAddPayment.setOnAction(event -> displayAddPaymentWindow()); buttonSettle.setOnAction(event -> settlePayments()); buttonHistory.setOnAction(event -> displaySettlementsHistoryWindow()); } private void exportBudget() { try { List<Payment> settledPayments = dbHandler.getAllPayments(budget.getId(), true); List<Settlement> settlements = dbHandler.getAllSettlementsOfBudget(budget.getId()); BudgetExporter budgetExporter = new BudgetExporter(budget, participantsList, settledPayments, unsettledPayments, settlements, currentStage); budgetExporter.export(); } catch (RemoteException e) { e.printStackTrace(); Alerts.serverConnectionError(); } } private void deleteBudget() { Optional<ButtonType> result = Alerts.deleteBudgetConfirmation(); if (!result.isPresent() || result.get() != ButtonType.OK) return; try { dbHandler.deleteBudget(budget); currentStage.close(); } catch (RemoteException e) { e.printStackTrace(); Alerts.serverConnectionError(); } catch (Exception e) { e.printStackTrace(); // TODO budget couldn't be deleted } } private void displayAddParticipantsWindow() { AddParticipantsWindow addParticipantsWindow = new AddParticipantsWindow(this); addParticipantsWindow.initOwner(currentStage); addParticipantsWindow.setOnHidden(event -> fillTableUnsettledPayments()); addParticipantsWindow.show(); } private void displayAddPaymentWindow() { AddPaymentWindow addPaymentWindow = new AddPaymentWindow(budget, participantsList); addPaymentWindow.initOwner(currentStage); addPaymentWindow.setOnHidden(event -> fillTableUnsettledPayments()); addPaymentWindow.show(); } private void settlePayments() { try { if (unsettledPayments.size() > 0) displaySettleWindow(); else Alerts.noPaymentsToSettle(); } catch (Exception e) { e.printStackTrace(); // TODO display proper message } } private void displaySettleWindow() { final List<Payment> paymentsToSettle = getPaymentsToSettle(); final Map<Integer, Double> usersBalance = participantsList.stream() .collect(Collectors.toMap(User::getId, user -> calculateUserBalance(user, paymentsToSettle))); SettleWindow settleWindow = new SettleWindow(this, budget, participantsList, paymentsToSettle, usersBalance); settleWindow.initOwner(currentStage); settleWindow.showAndWait(); } private List<Payment> getPaymentsToSettle() { return unsettledPayments.stream() .filter(Payment::isAccepted) .collect(Collectors.toList()); } private void displaySettlementsHistoryWindow() { SettlementsHistoryWindow historyWindow = new SettlementsHistoryWindow(budget); historyWindow.initOwner(currentStage); historyWindow.showAndWait(); } private void initTables() { initUnsettledPaymentsTable(); initParticipantsTable(); } private void initUnsettledPaymentsTable() { columnUnsettledPayer.setCellValueFactory(new PropertyValueFactory<>("payer")); columnUnsettledDescription.setCellValueFactory(new PropertyValueFactory<>("description")); columnUnsettledAmount.setCellValueFactory(new PropertyValueFactory<>("amount")); columnConfirm.setCellFactory(param -> new CheckBoxTableCell()); tableUnsettledPayments.setItems(initFilteredPaymentsView()); tableUnsettledPayments.setRowFactory(param -> { TableRow<Payment> row = new TableRow<>(); row.setOnMouseClicked(mouseEvent -> handlePaymentRowClicked(row, mouseEvent)); return row; }); } private FilteredList<Payment> initFilteredPaymentsView() { FilteredList<Payment> filteredPayments = new FilteredList<>(unsettledPayments, payment -> true); buttonPaymentsPaid.setOnAction(event -> updatePaymentsView(filteredPayments)); buttonPaymentsOwed.setOnAction(event -> updatePaymentsView(filteredPayments)); buttonPaymentsOther.setOnAction(event -> updatePaymentsView(filteredPayments)); return filteredPayments; } private void updatePaymentsView(FilteredList<Payment> filteredPayments) { filteredPayments.setPredicate(this::isPaymentVisibleOnList); updateSpentMoney(); } private boolean isPaymentVisibleOnList(Payment payment) { if (isCurrentUserPaymentOwner(payment)) return buttonPaymentsPaid.isSelected(); if (payment.isUserOwing(currentUser.getId())) return buttonPaymentsOwed.isSelected(); return buttonPaymentsOther.isSelected(); } private void handlePaymentRowClicked(TableRow<Payment> row, MouseEvent mouseEvent) { if (row.isEmpty() || mouseEvent.getClickCount() != 2) return; Payment payment = row.getItem(); if (isCurrentUserBudgetOwner() || isCurrentUserPaymentOwner(payment)) displayPaymentWindow(payment); // TODO else: you have no rights to edit this payment (special color or information) } private boolean isCurrentUserBudgetOwner() { return Objects.equals(currentUser, budget.getOwner()); } private boolean isCurrentUserPaymentOwner(Payment payment) { return payment.getPayerId() == currentUser.getId(); } private void displayPaymentWindow(Payment payment) { UpdatePaymentWindow paymentWindow = new UpdatePaymentWindow(budget, payment, participantsList); paymentWindow.initOwner(currentStage); paymentWindow.setOnHidden(event -> fillTableUnsettledPayments()); paymentWindow.show(); } private void initParticipantsTable() { columnUserName.setCellValueFactory(new PropertyValueFactory<>("name")); columnUserMail.setCellValueFactory(new PropertyValueFactory<>("email")); columnUserBalance.setCellValueFactory(this::userBalanceCellFactory); tableParticipants.setItems(participantsList); tableParticipants.setRowFactory(param -> { TableRow<User> row = new TableRow<>(); row.setOnMouseClicked(mouseEvent -> handleParticipantCellClicked(row, mouseEvent)); return row; }); } private void handleParticipantCellClicked(TableRow<User> row, MouseEvent mouseEvent) { if (mouseEvent.getClickCount() == 2 && !row.isEmpty()) { final User participant = row.getItem(); boolean hasUnsettledPayments = unsettledPayments.stream() .anyMatch(payment -> payment.getPayerId() == participant.getId()); displayParticipantDetailsWindow(participant, hasUnsettledPayments); } } private void displayParticipantDetailsWindow(User participant, boolean hasUnsettledPayments) { ParticipantDetailsWindow participantWindow = new ParticipantDetailsWindow(budget, participant, hasUnsettledPayments); participantWindow.initOwner(currentStage); participantWindow.setOnHidden(event -> { fillTableParticipants(); fillTableUnsettledPayments(); }); participantWindow.show(); } private ObservableValue<BigDecimal> userBalanceCellFactory(CellDataFeatures<User, BigDecimal> cell) { final User participant = cell.getValue(); final BigDecimal userBalance = BigDecimal.valueOf(calculateUserBalance(participant, unsettledPayments)); return new ReadOnlyObjectWrapper<>(userBalance.setScale(2, BigDecimal.ROUND_HALF_DOWN)); } void addParticipants(List<User> users) { users.removeAll(participantsList); participantsList.addAll(users); try { List<User> usersSerializable = new ArrayList<>(users); dbHandler.addBudgetParticipants(budget.getId(), usersSerializable); } catch (RemoteException e) { e.printStackTrace(); Alerts.serverConnectionError(); } } public void update() { fillTableParticipants(); fillTableUnsettledPayments(); } private void fillTableUnsettledPayments() { fillTablePayments(unsettledPayments, false); updateSpentMoney(); } private void fillTableParticipants() { participantsList.clear(); try { participantsList.addAll(dbHandler.getBudgetParticipants(budget.getId())); } catch (RemoteException e) { e.printStackTrace(); Alerts.serverConnectionError(); } } private void fillTablePayments(ObservableList<Payment> payments, boolean settled) { payments.clear(); try { payments.addAll(dbHandler.getAllPayments(budget.getId(), settled)); } catch (RemoteException e) { e.printStackTrace(); Alerts.serverConnectionError(); } } private void updateSpentMoney() { refreshBalanceCells(); spentMoneySum = unsettledPayments.stream() .filter(this::isPaymentVisibleOnList) .mapToDouble(Payment::getAmount) .sum(); labelSpentMoney.setText(String.format("Sum: %.2f$", spentMoneySum)); } private double calculateUserBalance(User user, List<Payment> payments) { double balance = 0.0; for (Payment payment : payments) { final double amount = payment.getAmount(); final double perUser = amount / (payment.getOwingUsers().size() + 1 /* payer */); if (payment.getPayerId() == user.getId()) balance += amount - perUser; else if (payment.isUserOwing(user.getId())) balance -= perUser; } return balance; } private void refreshBalanceCells() { tableParticipants.getColumns().get(2).setVisible(false); tableParticipants.getColumns().get(2).setVisible(true); labelCurrentUserBalance.setText( String.format("Balance: %.2f$", calculateUserBalance(currentUser, unsettledPayments))); } @Override protected void clearErrorHighlights() { } public class CheckBoxTableCell extends TableCell<Payment, Boolean> { private final CheckBox checkBox = new CheckBox(); public CheckBoxTableCell() { setAlignment(Pos.CENTER); checkBox.setOnAction(event -> { Payment payment = (Payment) CheckBoxTableCell.this.getTableRow().getItem(); payment.setAccept(!payment.isAccepted()); }); } @Override public void updateItem(Boolean item, boolean empty) { super.updateItem(item, empty); if (!empty) { checkBox.setSelected(true); setGraphic(checkBox); checkBox.setDisable(!isCurrentUserBudgetOwner()); } else setGraphic(null); } } }
package redis.clients.jedis.tests.commands; import java.util.Arrays; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.Test; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; public class SetCommandsTest extends JedisCommandTestBase { final byte[] bfoo = { 0x01, 0x02, 0x03, 0x04 }; final byte[] bbar = { 0x05, 0x06, 0x07, 0x08 }; final byte[] bcar = { 0x09, 0x0A, 0x0B, 0x0C }; final byte[] ba = { 0x0A }; final byte[] bb = { 0x0B }; final byte[] bc = { 0x0C }; final byte[] bd = { 0x0D }; final byte[] bx = { 0x42 }; @Test public void sadd() { long status = jedis.sadd("foo", "a"); assertEquals(1, status); status = jedis.sadd("foo", "a"); assertEquals(0, status); long bstatus = jedis.sadd(bfoo, ba); assertEquals(1, bstatus); bstatus = jedis.sadd(bfoo, ba); assertEquals(0, bstatus); } @Test public void smembers() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); Set<String> expected = new HashSet<String>(); expected.add("a"); expected.add("b"); Set<String> members = jedis.smembers("foo"); assertEquals(expected, members); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); bexpected.add(ba); Set<byte[]> bmembers = jedis.smembers(bfoo); assertEquals(bexpected, bmembers); } @Test public void srem() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); long status = jedis.srem("foo", "a"); Set<String> expected = new HashSet<String>(); expected.add("b"); assertEquals(1, status); assertEquals(expected, jedis.smembers("foo")); status = jedis.srem("foo", "bar"); assertEquals(0, status); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); long bstatus = jedis.srem(bfoo, ba); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); assertEquals(1, bstatus); assertEquals(bexpected, jedis.smembers(bfoo)); bstatus = jedis.srem(bfoo, bbar); assertEquals(0, bstatus); } @Test public void spop() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); String member = jedis.spop("foo"); assertTrue("a".equals(member) || "b".equals(member)); assertEquals(1, jedis.smembers("foo").size()); member = jedis.spop("bar"); assertNull(member); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); byte[] bmember = jedis.spop(bfoo); assertTrue(Arrays.equals(ba, bmember) || Arrays.equals(bb, bmember)); assertEquals(1, jedis.smembers(bfoo).size()); bmember = jedis.spop(bbar); assertNull(bmember); } @Test public void smove() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("bar", "c"); long status = jedis.smove("foo", "bar", "a"); Set<String> expectedSrc = new HashSet<String>(); expectedSrc.add("b"); Set<String> expectedDst = new HashSet<String>(); expectedDst.add("c"); expectedDst.add("a"); assertEquals(status, 1); assertEquals(expectedSrc, jedis.smembers("foo")); assertEquals(expectedDst, jedis.smembers("bar")); status = jedis.smove("foo", "bar", "a"); assertEquals(status, 0); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bbar, bc); long bstatus = jedis.smove(bfoo, bbar, ba); Set<byte[]> bexpectedSrc = new HashSet<byte[]>(); bexpectedSrc.add(bb); Set<byte[]> bexpectedDst = new HashSet<byte[]>(); bexpectedDst.add(bc); bexpectedDst.add(ba); assertEquals(bstatus, 1); assertEquals(bexpectedSrc, jedis.smembers(bfoo)); assertEquals(bexpectedDst, jedis.smembers(bbar)); bstatus = jedis.smove(bfoo, bbar, ba); assertEquals(bstatus, 0); } @Test public void scard() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); long card = jedis.scard("foo"); assertEquals(2, card); card = jedis.scard("bar"); assertEquals(0, card); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); long bcard = jedis.scard(bfoo); assertEquals(2, bcard); bcard = jedis.scard(bbar); assertEquals(0, bcard); } @Test public void sismember() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); assertTrue(jedis.sismember("foo", "a")); assertFalse(jedis.sismember("foo", "c")); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); assertTrue(jedis.sismember(bfoo, ba)); assertFalse(jedis.sismember(bfoo, bc)); } @Test public void sinter() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("bar", "b"); jedis.sadd("bar", "c"); Set<String> expected = new HashSet<String>(); expected.add("b"); Set<String> intersection = jedis.sinter("foo", "bar"); assertEquals(expected, intersection); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bbar, bb); jedis.sadd(bbar, bc); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); Set<byte[]> bintersection = jedis.sinter(bfoo, bbar); assertEquals(bexpected, bintersection); } @Test public void sinterstore() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("bar", "b"); jedis.sadd("bar", "c"); Set<String> expected = new HashSet<String>(); expected.add("b"); long status = jedis.sinterstore("car", "foo", "bar"); assertEquals(1, status); assertEquals(expected, jedis.smembers("car")); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bbar, bb); jedis.sadd(bbar, bc); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); long bstatus = jedis.sinterstore(bcar, bfoo, bbar); assertEquals(1, bstatus); assertEquals(bexpected, jedis.smembers(bcar)); } @Test public void sunion() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("bar", "b"); jedis.sadd("bar", "c"); Set<String> expected = new HashSet<String>(); expected.add("a"); expected.add("b"); expected.add("c"); Set<String> union = jedis.sunion("foo", "bar"); assertEquals(expected, union); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bbar, bb); jedis.sadd(bbar, bc); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); bexpected.add(bc); bexpected.add(ba); Set<byte[]> bunion = jedis.sunion(bfoo, bbar); assertEquals(bexpected, bunion); } @Test public void sunionstore() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("bar", "b"); jedis.sadd("bar", "c"); Set<String> expected = new HashSet<String>(); expected.add("a"); expected.add("b"); expected.add("c"); long status = jedis.sunionstore("car", "foo", "bar"); assertEquals(3, status); assertEquals(expected, jedis.smembers("car")); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bbar, bb); jedis.sadd(bbar, bc); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); bexpected.add(bc); bexpected.add(ba); long bstatus = jedis.sunionstore(bcar, bfoo, bbar); assertEquals(3, bstatus); assertEquals(bexpected, jedis.smembers(bcar)); } @Test public void sdiff() { jedis.sadd("foo", "x"); jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("foo", "c"); jedis.sadd("bar", "c"); jedis.sadd("car", "a"); jedis.sadd("car", "d"); Set<String> expected = new HashSet<String>(); expected.add("x"); expected.add("b"); Set<String> diff = jedis.sdiff("foo", "bar", "car"); assertEquals(expected, diff); // Binary jedis.sadd(bfoo, bx); jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bfoo, bc); jedis.sadd(bbar, bc); jedis.sadd(bcar, ba); jedis.sadd(bcar, bd); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bb); bexpected.add(bx); Set<byte[]> bdiff = jedis.sdiff(bfoo, bbar, bcar); assertEquals(bexpected, bdiff); } @Test public void sdiffstore() { jedis.sadd("foo", "x"); jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); jedis.sadd("foo", "c"); jedis.sadd("bar", "c"); jedis.sadd("car", "a"); jedis.sadd("car", "d"); Set<String> expected = new HashSet<String>(); expected.add("d"); expected.add("a"); long status = jedis.sdiffstore("tar", "foo", "bar", "car"); assertEquals(2, status); assertEquals(expected, jedis.smembers("car")); // Binary jedis.sadd(bfoo, bx); jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); jedis.sadd(bfoo, bc); jedis.sadd(bbar, bc); jedis.sadd(bcar, ba); jedis.sadd(bcar, bd); Set<byte[]> bexpected = new HashSet<byte[]>(); bexpected.add(bd); bexpected.add(ba); long bstatus = jedis.sdiffstore("tar".getBytes(), bfoo, bbar, bcar); assertEquals(2, bstatus); assertEquals(bexpected, jedis.smembers(bcar)); } @Test public void srandmember() { jedis.sadd("foo", "a"); jedis.sadd("foo", "b"); String member = jedis.srandmember("foo"); assertTrue("a".equals(member) || "b".equals(member)); assertEquals(2, jedis.smembers("foo").size()); member = jedis.srandmember("bar"); assertNull(member); // Binary jedis.sadd(bfoo, ba); jedis.sadd(bfoo, bb); byte[] bmember = jedis.srandmember(bfoo); assertTrue(Arrays.equals(ba, bmember) || Arrays.equals(bb, bmember)); assertEquals(2, jedis.smembers(bfoo).size()); bmember = jedis.srandmember(bbar); assertNull(bmember); } @Test public void sscan() { jedis.sadd("foo", "a", "b"); ScanResult<String> result = jedis.sscan("foo", 0); assertEquals(0, result.getCursor()); assertFalse(result.getResult().isEmpty()); } @Test public void sscanMatch() { ScanParams params = new ScanParams(); params.match("a*"); jedis.sadd("foo", "b", "a", "aa"); ScanResult<String> result = jedis.sscan("foo", 0, params); assertEquals(0, result.getCursor()); assertFalse(result.getResult().isEmpty()); } @Test public void sscanCount() { ScanParams params = new ScanParams(); params.count(2); jedis.sadd("foo", "a1", "a2", "a3", "a4", "a5"); ScanResult<String> result = jedis.sscan("foo", 0, params); assertFalse(result.getResult().isEmpty()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.apache.geode.cache.EntryEvent; import org.apache.geode.internal.cache.lru.EnableLRU; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.InternalStatisticsDisabledException; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.cache.versions.VersionSource; import org.apache.geode.internal.cache.versions.VersionStamp; import org.apache.geode.internal.cache.versions.VersionTag; import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper; import org.apache.geode.internal.offheap.annotations.Released; import org.apache.geode.internal.offheap.annotations.Retained; import org.apache.geode.internal.offheap.annotations.Unretained; import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; // macros whose definition changes this class: // disk: 1 // lru: LRU // stats: 1 // versioned: 1 // offheap: 1 // One of the following key macros must be defined: // key object: 1 // key int: KEY_INT // key long: KEY_LONG // key uuid: KEY_UUID // key string1: KEY_STRING1 // key string2: KEY_STRING2 /** * Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run * ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory). */ public class VersionedStatsDiskRegionEntryOffHeapObjectKey extends VersionedStatsDiskRegionEntryOffHeap { public VersionedStatsDiskRegionEntryOffHeapObjectKey(RegionEntryContext context, Object key, @Retained Object value ) { super(context, (value instanceof RecoveredEntry ? null : value) ); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp initialize(context, value); this.key = key; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // common code protected int hash; private HashEntry<Object, Object> next; @SuppressWarnings("unused") private volatile long lastModified; private static final AtomicLongFieldUpdater<VersionedStatsDiskRegionEntryOffHeapObjectKey> lastModifiedUpdater = AtomicLongFieldUpdater.newUpdater(VersionedStatsDiskRegionEntryOffHeapObjectKey.class, "lastModified"); /** * All access done using ohAddrUpdater so it is used even though the compiler can not tell it is. */ @SuppressWarnings("unused") @Retained @Released private volatile long ohAddress; /** * I needed to add this because I wanted clear to call setValue which normally can only be called * while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the * disk regions because they also get a rw lock in clear. Some hardware platforms do not support * CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the * re and we will once again be deadlocked. I don't know if we support any of the hardware * platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk regions. */ private final static AtomicLongFieldUpdater<VersionedStatsDiskRegionEntryOffHeapObjectKey> ohAddrUpdater = AtomicLongFieldUpdater.newUpdater(VersionedStatsDiskRegionEntryOffHeapObjectKey.class, "ohAddress"); @Override public Token getValueAsToken() { return OffHeapRegionEntryHelper.getValueAsToken(this); } @Override protected Object getValueField() { return OffHeapRegionEntryHelper._getValue(this); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override @Unretained protected void setValueField(@Unretained Object v) { OffHeapRegionEntryHelper.setValue(this, v); } @Override @Retained public Object _getValueRetain(RegionEntryContext context, boolean decompress) { return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context); } @Override public long getAddress() { return ohAddrUpdater.get(this); } @Override public boolean setAddress(long expectedAddr, long newAddr) { return ohAddrUpdater.compareAndSet(this, expectedAddr, newAddr); } @Override @Released public void release() { OffHeapRegionEntryHelper.releaseEntry(this); } @Override public void returnToPool() { // Deadcoded for now; never was working // if (this instanceof VMThinRegionEntryLongKey) { // factory.returnToPool((VMThinRegionEntryLongKey)this); // } } protected long getLastModifiedField() { return lastModifiedUpdater.get(this); } protected boolean compareAndSetLastModifiedField(long expectedValue, long newValue) { return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue); } /** * @see HashEntry#getEntryHash() */ public int getEntryHash() { return this.hash; } protected void setEntryHash(int v) { this.hash = v; } /** * @see HashEntry#getNextEntry() */ public HashEntry<Object, Object> getNextEntry() { return this.next; } /** * @see HashEntry#setNextEntry */ public void setNextEntry(final HashEntry<Object, Object> n) { this.next = n; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // disk code protected void initialize(RegionEntryContext context, Object value) { diskInitialize(context, value); } @Override public int updateAsyncEntrySize(EnableLRU capacityController) { throw new IllegalStateException("should never be called"); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private void diskInitialize(RegionEntryContext context, Object value) { DiskRecoveryStore drs = (DiskRecoveryStore) context; DiskStoreImpl ds = drs.getDiskStore(); long maxOplogSize = ds.getMaxOplogSize(); // get appropriate instance of DiskId implementation based on maxOplogSize this.id = DiskId.createDiskId(maxOplogSize, true/* is persistence */, ds.needsLinkedList()); Helper.initialize(this, drs, value); } /** * DiskId * * @since GemFire 5.1 */ protected DiskId id;// = new DiskId(); public DiskId getDiskId() { return this.id; } @Override void setDiskId(RegionEntry old) { this.id = ((AbstractDiskRegionEntry) old).getDiskId(); } // // inlining DiskId // // always have these fields // /** // * id consists of // * most significant // * 1 byte = users bits // * 2-8 bytes = oplog id // * least significant. // * // * The highest bit in the oplog id part is set to 1 if the oplog id // * is negative. // * @todo this field could be an int for an overflow only region // */ // private long id; // /** // * Length of the bytes on disk. // * This is always set. If the value is invalid then it will be set to 0. // * The most significant bit is used by overflow to mark it as needing to be written. // */ // protected int valueLength = 0; // // have intOffset or longOffset // // intOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile int offsetInOplog; // // longOffset // /** // * The position in the oplog (the oplog offset) where this entry's value is // * stored // */ // private volatile long offsetInOplog; // // have overflowOnly or persistence // // overflowOnly // // no fields // // persistent // /** unique entry identifier * */ // private long keyId; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // stats code @Override public void updateStatsForGet(boolean hit, long time) { setLastAccessed(time); if (hit) { incrementHitCount(); } else { incrementMissCount(); } } @Override protected void setLastModifiedAndAccessedTimes(long lastModified, long lastAccessed) { _setLastModified(lastModified); if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) { setLastAccessed(lastAccessed); } } private volatile long lastAccessed; private volatile int hitCount; private volatile int missCount; private static final AtomicIntegerFieldUpdater<VersionedStatsDiskRegionEntryOffHeapObjectKey> hitCountUpdater = AtomicIntegerFieldUpdater.newUpdater(VersionedStatsDiskRegionEntryOffHeapObjectKey.class, "hitCount"); private static final AtomicIntegerFieldUpdater<VersionedStatsDiskRegionEntryOffHeapObjectKey> missCountUpdater = AtomicIntegerFieldUpdater.newUpdater(VersionedStatsDiskRegionEntryOffHeapObjectKey.class, "missCount"); @Override public long getLastAccessed() throws InternalStatisticsDisabledException { return this.lastAccessed; } private void setLastAccessed(long lastAccessed) { this.lastAccessed = lastAccessed; } @Override public long getHitCount() throws InternalStatisticsDisabledException { return this.hitCount & 0xFFFFFFFFL; } @Override public long getMissCount() throws InternalStatisticsDisabledException { return this.missCount & 0xFFFFFFFFL; } private void incrementHitCount() { hitCountUpdater.incrementAndGet(this); } private void incrementMissCount() { missCountUpdater.incrementAndGet(this); } @Override public void resetCounts() throws InternalStatisticsDisabledException { hitCountUpdater.set(this, 0); missCountUpdater.set(this, 0); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public void txDidDestroy(long currTime) { setLastModified(currTime); setLastAccessed(currTime); this.hitCount = 0; this.missCount = 0; } @Override public boolean hasStats() { return true; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // versioned code private VersionSource memberID; private short entryVersionLowBytes; private short regionVersionHighBytes; private int regionVersionLowBytes; private byte entryVersionHighByte; private byte distributedSystemId; public int getEntryVersion() { return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF); } public long getRegionVersion() { return (((long) regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL); } public long getVersionTimeStamp() { return getLastModified(); } public void setVersionTimeStamp(long time) { setLastModified(time); } public VersionSource getMemberID() { return this.memberID; } public int getDistributedSystemId() { return this.distributedSystemId; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public void setVersions(VersionTag tag) { this.memberID = tag.getMemberID(); int eVersion = tag.getEntryVersion(); this.entryVersionLowBytes = (short) (eVersion & 0xffff); this.entryVersionHighByte = (byte) ((eVersion & 0xff0000) >> 16); this.regionVersionHighBytes = tag.getRegionVersionHighBytes(); this.regionVersionLowBytes = tag.getRegionVersionLowBytes(); if (!(tag.isGatewayTag()) && this.distributedSystemId == tag.getDistributedSystemId()) { if (getVersionTimeStamp() <= tag.getVersionTimeStamp()) { setVersionTimeStamp(tag.getVersionTimeStamp()); } else { tag.setVersionTimeStamp(getVersionTimeStamp()); } } else { setVersionTimeStamp(tag.getVersionTimeStamp()); } this.distributedSystemId = (byte) (tag.getDistributedSystemId() & 0xff); } public void setMemberID(VersionSource memberID) { this.memberID = memberID; } @Override public VersionStamp getVersionStamp() { return this; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp public VersionTag asVersionTag() { VersionTag tag = VersionTag.create(memberID); tag.setEntryVersion(getEntryVersion()); tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes); tag.setVersionTimeStamp(getVersionTimeStamp()); tag.setDistributedSystemId(this.distributedSystemId); return tag; } public void processVersionTag(LocalRegion r, VersionTag tag, boolean isTombstoneFromGII, boolean hasDelta, VersionSource thisVM, InternalDistributedMember sender, boolean checkForConflicts) { basicProcessVersionTag(r, tag, isTombstoneFromGII, hasDelta, thisVM, sender, checkForConflicts); } @Override public void processVersionTag(EntryEvent cacheEvent) { // this keeps Eclipse happy. without it the sender chain becomes confused // while browsing this code super.processVersionTag(cacheEvent); } /** get rvv internal high byte. Used by region entries for transferring to storage */ public short getRegionVersionHighBytes() { return this.regionVersionHighBytes; } /** get rvv internal low bytes. Used by region entries for transferring to storage */ public int getRegionVersionLowBytes() { return this.regionVersionLowBytes; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp // key code private final Object key; @Override public Object getKey() { return this.key; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
/* * The MIT License * * Copyright (c) 2011 The Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ /** * $Id$ */ package picard.sam; import com.google.common.annotations.VisibleForTesting; import htsjdk.samtools.*; import htsjdk.samtools.filter.*; import htsjdk.samtools.util.*; import org.broadinstitute.barclay.argparser.Argument; import org.broadinstitute.barclay.argparser.CommandLineParser; import org.broadinstitute.barclay.argparser.CommandLineProgramProperties; import org.broadinstitute.barclay.help.DocumentedFeature; import picard.cmdline.CommandLineProgram; import picard.cmdline.StandardOptionDefinitions; import picard.cmdline.programgroups.ReadDataManipulationProgramGroup; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.text.DecimalFormat; import java.util.*; import java.util.stream.Collectors; /** * <h3>Summary</h3> * Subsets a SAM file by either selecting or excluding certain reads * <p> * <h3>Details</h3> * Subsets a SAM or BAM file by either excluding or selecting reads as specified by FILTER. * Other parameters influence the behavior of the FILTER algorithm as described below. * <p> * <h3>Usage examples</h3> * <h4>Filter by queryname:</h4> * <pre> * java -jar picard.jar FilterSamReads \ * I=input.bam \ * O=output.bam \ * READ_LIST_FILE=read_names.txt \ * FILTER=includeReadList * </pre> * <h4>Filter by interval:</h4> * <pre> * java -jar picard.jar FilterSamReads \ * I=input.bam \ * O=output.bam \ * INTERVAL_LIST=regions.interval_list \ * FILTER=includePairedIntervals * </pre> * <h4>Filter reads having a (2-base or more) soft clip on the beginning of the read:</h4> * <pre> * cat <<EOF > script.js * // reads having a soft clip larger than 2 bases in start of read * function accept(rec) { * if (rec.getReadUnmappedFlag()) return false; * var cigar = rec.getCigar(); * if (cigar == null) return false; * var ce = cigar.getCigarElement(0); * return ce.getOperator().name() == "S" && ce.length() > 2; * } * * accept(record); * EOF * * java -jar picard.jar FilterSamReads \ * I=input.bam \ * O=output.bam \ * JAVASCRIPT_FILE=script.js \ * FILTER=includeJavascript * </pre> */ @CommandLineProgramProperties( summary = FilterSamReads.USAGE_SUMMARY + FilterSamReads.USAGE_DETAILS, oneLineSummary = FilterSamReads.USAGE_SUMMARY, programGroup = ReadDataManipulationProgramGroup.class) @DocumentedFeature public class FilterSamReads extends CommandLineProgram { static final String USAGE_SUMMARY = "Subsets reads from a SAM or BAM file by applying one of several filters."; static final String USAGE_DETAILS = "\nTakes a SAM or BAM file and subsets it by either excluding or " + "only including certain reads such as aligned or unaligned reads, specific reads based on a list of reads names, " + "an interval list, by Tag Values (type Z / String values only), or using a JavaScript script.\n" + "<br />" + "<h3>Usage example:</h3>" + "<h4>Filter by queryname</h4>" + "<pre>" + "java -jar picard.jar FilterSamReads \\<br /> " + " I=input.bam \\ <br /> " + " O=output.bam \\ <br /> " + " READ_LIST_FILE=read_names.txt \\ <br />" + " FILTER=includeReadList" + "</pre> " + "<h4>Filter by interval</h4>" + "<pre>" + "java -jar picard.jar FilterSamReads \\ <br /> " + " I=input.bam \\ <br /> " + " O=output.bam \\ <br /> " + " INTERVAL_LIST=regions.interval_list \\ <br/>" + " FILTER=includePairedIntervals" + "</pre> " + "<h4>Filter by Tag Value (type Z / String values only)</h4>" + "<pre>" + "java -jar picard.jar FilterSamReads \\ <br /> " + " I=input.bam \\ <br /> " + " O=output.bam \\ <br /> " + " TAG=CR \\ <br/>" + " TAG_VALUE=TTTGTCATCTCGAGTA \\ <br/>" + " FILTER=includeTagValues" + "</pre> " + "<h4>Filter reads having a soft clip on the beginning of the read larger than 2 bases with a JavaScript script</h4>" + "<pre>" + "cat <<EOF > script.js <br/>" + "/** reads having a soft clip larger than 2 bases in beginning of read*/ <br/>" + "function accept(rec) { <br/>" + " if (rec.getReadUnmappedFlag()) return false; <br/>" + " var cigar = rec.getCigar(); <br/>" + " if (cigar == null) return false; <br/>" + " var ce = cigar.getCigarElement(0); <br/>" + " return ce.getOperator().name() == \"S\" && ce.length() > 2; <br/>" + "} <br />" + "<br />" + "accept(record); <br/>" + "EOF <br/>" + "<br/>" + "java -jar picard.jar FilterSamReads \\ <br /> " + " I=input.bam \\ <br /> " + " O=output.bam \\ <br /> " + " JAVASCRIPT_FILE=script.js \\ <br/>" + " FILTER=includeJavascript" + "</pre> "; private static final Log log = Log.getInstance(FilterSamReads.class); @VisibleForTesting protected enum Filter implements CommandLineParser.ClpEnum { includeAligned("Output aligned reads only. INPUT SAM/BAM must be in queryname SortOrder. (Note: first and second of paired reads must both be aligned to be included in OUTPUT.)"), excludeAligned("Output Unmapped reads only. INPUT SAM/BAM must be in queryname SortOrder. (Note: first and second of pair must both be aligned to be excluded from OUTPUT.)"), includeReadList("Output reads with names contained in READ_LIST_FILE. See READ_LIST_FILE for more detail."), excludeReadList("Output reads with names *not* contained in READ_LIST_FILE. See READ_LIST_FILE for more detail."), includeJavascript("Output reads that have been accepted by the JAVASCRIPT_FILE script, that is, reads for which the value of the script is true. " + "See the JAVASCRIPT_FILE argument for more detail. "), includePairedIntervals("Output reads that overlap with an interval from INTERVAL_LIST (and their mate). INPUT must be coordinate sorted."), includeTagValues("OUTPUT SAM/BAM will contain reads that have a value of tag TAG that is contained in the values for TAG_VALUES"), excludeTagValues("OUTPUT SAM/BAM will contain reads that do not have a value of tag TAG that is contained in the values for TAG_VALUES"); private final String description; Filter(final String description) { this.description = description; } @Override public String getHelpDoc() { return description; } } @Argument(doc = "The SAM or BAM file that will be filtered.", shortName = StandardOptionDefinitions.INPUT_SHORT_NAME) public File INPUT; @Argument(doc = "Which filter to use.") public Filter FILTER = null; @Argument(doc = "File containing reads that will be included in or excluded from the OUTPUT SAM or BAM file, when using FILTER=includeReadList or FILTER=excludeReadList.", optional = true, shortName = "RLF") public File READ_LIST_FILE; @Argument(doc = "Interval List File containing intervals that will be included in the OUTPUT when using FILTER=includePairedIntervals", optional = true, shortName = "IL") public File INTERVAL_LIST; @Argument(doc = "The tag to select from input SAM/BAM", optional = true, shortName = "T") public String TAG; @Argument(doc = "The tag value(s) to filter by", optional = true, shortName = "TV") public List<String> TAG_VALUE; @Argument( doc = "SortOrder of the OUTPUT file, otherwise use the SortOrder of the INPUT file.", optional = true, shortName = "SO") public SAMFileHeader.SortOrder SORT_ORDER; @Argument(doc = "SAM or BAM file for resulting reads.", shortName = StandardOptionDefinitions.OUTPUT_SHORT_NAME) public File OUTPUT; @Argument(shortName = "JS", doc = "Filters the INPUT with a javascript expression using the java javascript-engine, when using FILTER=includeJavascript. " + " The script puts the following variables in the script context: \n" + " 'record' a SamRecord ( https://samtools.github.io/htsjdk/javadoc/htsjdk/htsjdk/samtools/SAMRecord.html ) and \n " + " 'header' a SAMFileHeader ( https://samtools.github.io/htsjdk/javadoc/htsjdk/htsjdk/samtools/SAMFileHeader.html ).\n" + " all the public members of SamRecord and SAMFileHeader are accessible. " + "A record is accepted if the last value of the script evaluates to true.", optional = true) public File JAVASCRIPT_FILE = null; @Argument( doc = "Create <OUTPUT>.reads file containing names of reads from INPUT and OUTPUT (for debugging purposes.)", optional = true) public boolean WRITE_READS_FILES = false; private void filterReads(final FilteringSamIterator filteringIterator) { // get OUTPUT header from INPUT and overwrite it if necessary final SAMFileHeader fileHeader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).getFileHeader(INPUT); final SAMFileHeader.SortOrder inputSortOrder = fileHeader.getSortOrder(); if (SORT_ORDER != null) { fileHeader.setSortOrder(SORT_ORDER); } if (FILTER == Filter.includePairedIntervals && fileHeader.getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UnsupportedOperationException("Input must be coordinate sorted to use includePairedIntervals"); } final boolean presorted = inputSortOrder.equals(fileHeader.getSortOrder()); log.info("Filtering [presorted=" + presorted + "] " + INPUT.getName() + " -> OUTPUT=" + OUTPUT.getName() + " [sortorder=" + fileHeader.getSortOrder().name() + "]"); // create OUTPUT file final SAMFileWriter outputWriter = new SAMFileWriterFactory().makeSAMOrBAMWriter(fileHeader, presorted, OUTPUT); final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Written"); while (filteringIterator.hasNext()) { final SAMRecord rec = filteringIterator.next(); outputWriter.addAlignment(rec); progress.record(rec); } filteringIterator.close(); outputWriter.close(); log.info(new DecimalFormat("#,###").format(progress.getCount()) + " SAMRecords written to " + OUTPUT.getName()); } /** * Write out a file of read names for debugging purposes. * * @param samOrBamFile The SAM or BAM file for which we are going to write out a file of its * containing read names */ private void writeReadsFile(final File samOrBamFile) throws IOException { final File readsFile = new File(OUTPUT.getParentFile(), IOUtil.basename(samOrBamFile) + ".reads"); IOUtil.assertFileIsWritable(readsFile); try (final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(samOrBamFile); final BufferedWriter bw = IOUtil.openFileForBufferedWriting(readsFile, false)) { for (final SAMRecord rec : reader) { bw.write(rec.toString() + "\n"); } } IOUtil.assertFileIsReadable(readsFile); } private List<Interval> getIntervalList (final File intervalFile) throws IOException { IOUtil.assertFileIsReadable(intervalFile); return IntervalList.fromFile(intervalFile).getIntervals(); } @Override protected int doWork() { try { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); if (WRITE_READS_FILES) writeReadsFile(INPUT); final SamReader samReader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); final FilteringSamIterator filteringIterator; // Used for exclude/include tag filter which expects a List<Object> input so casting here // This is done to get around poor constructors of TagFilter that should be addressed in // https://github.com/samtools/htsjdk/issues/1082 List<Object> tagList = (List) TAG_VALUE; switch (FILTER) { case includeAligned: filteringIterator = new FilteringSamIterator(samReader.iterator(), new AlignedFilter(true), true); break; case excludeAligned: filteringIterator = new FilteringSamIterator(samReader.iterator(), new AlignedFilter(false), true); break; case includeReadList: filteringIterator = new FilteringSamIterator(samReader.iterator(), new ReadNameFilter(READ_LIST_FILE, true)); break; case excludeReadList: filteringIterator = new FilteringSamIterator(samReader.iterator(), new ReadNameFilter(READ_LIST_FILE, false)); break; case includeJavascript: filteringIterator = new FilteringSamIterator(samReader.iterator(), new JavascriptSamRecordFilter( JAVASCRIPT_FILE, samReader.getFileHeader())); break; case includePairedIntervals: filteringIterator = new FilteringSamIterator(samReader.iterator(), new IntervalKeepPairFilter(getIntervalList(INTERVAL_LIST))); break; case includeTagValues: filteringIterator = new FilteringSamIterator(samReader.iterator(), new TagFilter(TAG, tagList, true)); break; case excludeTagValues: filteringIterator = new FilteringSamIterator(samReader.iterator(), new TagFilter(TAG, tagList, false)); break; default: throw new UnsupportedOperationException(FILTER.name() + " has not been implemented!"); } filterReads(filteringIterator); IOUtil.assertFileIsReadable(OUTPUT); if (WRITE_READS_FILES) writeReadsFile(OUTPUT); return 0; } catch (Exception e) { log.error(e, "Failed to filter " + INPUT.getName()); if (OUTPUT.exists() && !OUTPUT.delete()) { log.warn("Failed to delete possibly incomplete output file:" + OUTPUT.getAbsolutePath()); } return 1; } } @Override protected String[] customCommandLineValidation() { List<String> errors = new ArrayList<>(); if (INPUT.equals(OUTPUT)) errors.add("INPUT file and OUTPUT file must differ!"); List<Filter> tagFilters = Arrays.asList(Filter.includeTagValues, Filter.excludeTagValues); checkInputs(Arrays.asList(Filter.includeReadList, Filter.excludeReadList), READ_LIST_FILE, "READ_LIST_FILE").ifPresent(errors::add); checkInputs(Collections.singletonList(Filter.includePairedIntervals), INTERVAL_LIST, "INTERVAL_LIST").ifPresent(errors::add); checkInputs(Collections.singletonList(Filter.includeJavascript), JAVASCRIPT_FILE, "JAVASCRIPT_FILE").ifPresent(errors::add); checkInputs(tagFilters, TAG, "TAG").ifPresent(errors::add); if (tagFilters.contains(FILTER) && TAG_VALUE.isEmpty()) { log.warn("Running FilterSamReads with a Tag Filter but no TAG_VALUE argument provided. This " + "will recreate the original input file i.e. not filter anything"); } if (!errors.isEmpty()) return errors.toArray(new String[errors.size()]); return super.customCommandLineValidation(); } private Optional<String> checkInputs(final List<Filter> filters, final Object inputObject, final String inputFileVariable) { if (filters.contains(FILTER) && inputObject == null) return Optional.of(String.format("%s must be specified when using FILTER=%s, but it was null.", inputFileVariable, FILTER)); if (!filters.contains(FILTER) && inputObject != null) return Optional.of(String.format("%s may only be specified when using FILTER from %s, FILTER value: %s, %s value: %s", inputFileVariable, String.join(", ", filters.stream().map(Enum::toString).collect(Collectors.toList())), FILTER, inputFileVariable, inputObject)); return Optional.empty(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.computer.util; import org.apache.commons.configuration.Configuration; import org.apache.tinkerpop.gremlin.process.computer.GraphComputer; import org.apache.tinkerpop.gremlin.process.computer.VertexProgram; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Element; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Transaction; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.apache.tinkerpop.gremlin.structure.util.ElementHelper; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedEdge; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedElement; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedProperty; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedVertex; import org.apache.tinkerpop.gremlin.structure.util.wrapped.WrappedVertexProperty; import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; import java.util.Collections; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class ComputerGraph implements Graph { private enum State {VERTEX_PROGRAM, MAP_REDUCE} private ComputerVertex starVertex; private final Set<String> computeKeys; private State state; private ComputerGraph(final State state, final Vertex starVertex, final Optional<VertexProgram> vertexProgram) { this.state = state; this.computeKeys = vertexProgram.isPresent() ? vertexProgram.get().getElementComputeKeys() : Collections.emptySet(); this.starVertex = new ComputerVertex(starVertex); } public static ComputerVertex vertexProgram(final Vertex starVertex, VertexProgram vertexProgram) { return new ComputerGraph(State.VERTEX_PROGRAM, starVertex, Optional.of(vertexProgram)).getStarVertex(); } public static ComputerVertex mapReduce(final Vertex starVertex) { return new ComputerGraph(State.MAP_REDUCE, starVertex, Optional.empty()).getStarVertex(); } public ComputerVertex getStarVertex() { return this.starVertex; } @Override public Vertex addVertex(final Object... keyValues) { throw new UnsupportedOperationException(); } @Override public <C extends GraphComputer> C compute(final Class<C> graphComputerClass) throws IllegalArgumentException { throw new UnsupportedOperationException(); } @Override public GraphComputer compute() throws IllegalArgumentException { throw new UnsupportedOperationException(); } @Override public Iterator<Vertex> vertices(final Object... vertexIds) { throw new UnsupportedOperationException(); } @Override public Iterator<Edge> edges(final Object... edgeIds) { throw new UnsupportedOperationException(); } @Override public Transaction tx() { return this.starVertex.graph().tx(); } @Override public Variables variables() { throw new UnsupportedOperationException(); } @Override public Configuration configuration() { throw new UnsupportedOperationException(); } @Override public void close() throws Exception { throw new UnsupportedOperationException(); } private class ComputerElement implements Element, WrappedElement<Element> { private final Element element; public ComputerElement(final Element element) { this.element = element; } @Override public Object id() { return this.element.id(); } @Override public String label() { return this.element.label(); } @Override public Graph graph() { return ComputerGraph.this; } @Override public Set<String> keys() { return this.element.keys().stream().filter(key -> !computeKeys.contains(key)).collect(Collectors.toSet()); } @Override public <V> Property<V> property(final String key) { return new ComputerProperty<>(this.element.property(key)); } @Override public <V> Property<V> property(final String key, final V value) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.vertexPropertiesCanNotBeUpdatedInMapReduce(); return new ComputerProperty<>(this.element.property(key, value)); } @Override public <V> V value(final String key) throws NoSuchElementException { return this.element.value(key); } @Override public void remove() { this.element.remove(); } @Override public <V> Iterator<? extends Property<V>> properties(final String... propertyKeys) { return (Iterator) IteratorUtils.filter(this.element.properties(propertyKeys), property -> !computeKeys.contains(property.key())); } @Override public <V> Iterator<V> values(final String... propertyKeys) { return IteratorUtils.map(this.<V>properties(propertyKeys), property -> property.value()); } @Override public int hashCode() { return this.element.hashCode(); } @Override public String toString() { return this.element.toString(); } @Override public boolean equals(final Object other) { return this.element.equals(other); } @Override public Element getBaseElement() { return this.element; } } /////////////////////////////////// private class ComputerVertex extends ComputerElement implements Vertex, WrappedVertex<Vertex> { public ComputerVertex(final Vertex vertex) { super(vertex); } @Override public <V> VertexProperty<V> property(final String key) { return new ComputerVertexProperty<>(this.getBaseVertex().property(key)); } @Override public <V> VertexProperty<V> property(final String key, final V value) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.vertexPropertiesCanNotBeUpdatedInMapReduce(); if (!computeKeys.contains(key)) throw GraphComputer.Exceptions.providedKeyIsNotAnElementComputeKey(key); return new ComputerVertexProperty<>(this.getBaseVertex().property(key, value)); } @Override public <V> VertexProperty<V> property(final String key, final V value, final Object... keyValues) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.vertexPropertiesCanNotBeUpdatedInMapReduce(); if (!computeKeys.contains(key)) throw GraphComputer.Exceptions.providedKeyIsNotAnElementComputeKey(key); return new ComputerVertexProperty<>(this.getBaseVertex().property(key, value, keyValues)); } @Override public <V> VertexProperty<V> property(final VertexProperty.Cardinality cardinality, final String key, final V value, final Object... keyValues) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.vertexPropertiesCanNotBeUpdatedInMapReduce(); if (!computeKeys.contains(key)) throw GraphComputer.Exceptions.providedKeyIsNotAnElementComputeKey(key); return new ComputerVertexProperty<>(this.getBaseVertex().property(cardinality, key, value, keyValues)); } @Override public Edge addEdge(final String label, final Vertex inVertex, final Object... keyValues) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.incidentAndAdjacentElementsCanNotBeAccessedInMapReduce(); return new ComputerEdge(this.getBaseVertex().addEdge(label, inVertex, keyValues)); } @Override public Iterator<Edge> edges(final Direction direction, final String... edgeLabels) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.incidentAndAdjacentElementsCanNotBeAccessedInMapReduce(); return IteratorUtils.map(this.getBaseVertex().edges(direction, edgeLabels), ComputerEdge::new); } @Override public Iterator<Vertex> vertices(final Direction direction, final String... edgeLabels) { if (state.equals(State.MAP_REDUCE)) throw GraphComputer.Exceptions.incidentAndAdjacentElementsCanNotBeAccessedInMapReduce(); return IteratorUtils.map(this.getBaseVertex().vertices(direction, edgeLabels), v -> v.equals(starVertex) ? starVertex : new ComputerAdjacentVertex(v)); } @Override public <V> Iterator<VertexProperty<V>> properties(final String... propertyKeys) { return IteratorUtils.map(super.properties(propertyKeys), property -> new ComputerVertexProperty<V>((VertexProperty<V>) property)); } @Override public Vertex getBaseVertex() { return (Vertex) this.getBaseElement(); } } //////////////////////////// private class ComputerEdge extends ComputerElement implements Edge, WrappedEdge<Edge> { public ComputerEdge(final Edge edge) { super(edge); } @Override public Iterator<Vertex> vertices(final Direction direction) { if (direction.equals(Direction.OUT)) return IteratorUtils.of(this.outVertex()); if (direction.equals(Direction.IN)) return IteratorUtils.of(this.inVertex()); else return IteratorUtils.of(this.outVertex(), this.inVertex()); } @Override public Vertex outVertex() { return this.getBaseEdge().outVertex().equals(starVertex) ? starVertex : new ComputerAdjacentVertex(this.getBaseEdge().outVertex()); } @Override public Vertex inVertex() { return this.getBaseEdge().inVertex().equals(starVertex) ? starVertex : new ComputerAdjacentVertex(this.getBaseEdge().inVertex()); } @Override public <V> Iterator<Property<V>> properties(final String... propertyKeys) { return IteratorUtils.map(super.properties(propertyKeys), property -> new ComputerProperty(property)); } @Override public Edge getBaseEdge() { return (Edge) this.getBaseElement(); } } /////////////////////////// private class ComputerVertexProperty<V> extends ComputerElement implements VertexProperty<V>, WrappedVertexProperty<VertexProperty<V>> { public ComputerVertexProperty(final VertexProperty<V> vertexProperty) { super(vertexProperty); } @Override public String key() { return this.getBaseVertexProperty().key(); } @Override public V value() throws NoSuchElementException { return this.<V>getBaseVertexProperty().value(); } @Override public boolean isPresent() { return this.getBaseVertexProperty().isPresent(); } @Override public Vertex element() { return new ComputerVertex(this.getBaseVertexProperty().element()); } @Override public <U> Iterator<Property<U>> properties(final String... propertyKeys) { return IteratorUtils.map(super.properties(propertyKeys), property -> new ComputerProperty(property)); } @Override public VertexProperty<V> getBaseVertexProperty() { return (VertexProperty<V>) this.getBaseElement(); } } /////////////////////////// private class ComputerProperty<V> implements Property<V>, WrappedProperty<Property<V>> { private final Property<V> property; public ComputerProperty(final Property<V> property) { this.property = property; } @Override public String key() { return this.property.key(); } @Override public V value() throws NoSuchElementException { return this.property.value(); } @Override public boolean isPresent() { return this.property.isPresent(); } @Override public Element element() { final Element element = this.property.element(); if (element instanceof Vertex) return new ComputerVertex((Vertex) element); else if (element instanceof Edge) return new ComputerEdge((Edge) element); else return new ComputerVertexProperty((VertexProperty) element); } @Override public void remove() { this.property.remove(); } @Override public Property<V> getBaseProperty() { return this.property; } @Override public String toString() { return this.property.toString(); } @Override public int hashCode() { return this.property.hashCode(); } @Override public boolean equals(final Object other) { return this.property.equals(other); } } /////////////////////////// private class ComputerAdjacentVertex implements Vertex, WrappedVertex<Vertex> { private final Vertex adjacentVertex; public ComputerAdjacentVertex(final Vertex adjacentVertex) { this.adjacentVertex = adjacentVertex; } @Override public Edge addEdge(final String label, final Vertex inVertex, final Object... keyValues) { throw GraphComputer.Exceptions.adjacentVertexEdgesAndVerticesCanNotBeReadOrUpdated(); } @Override public <V> VertexProperty<V> property(final String key, final V value, final Object... keyValues) { throw GraphComputer.Exceptions.adjacentVertexPropertiesCanNotBeReadOrUpdated(); } @Override public <V> VertexProperty<V> property(final VertexProperty.Cardinality cardinality, final String key, final V value, final Object... keyValues) { throw GraphComputer.Exceptions.adjacentVertexPropertiesCanNotBeReadOrUpdated(); } @Override public Iterator<Edge> edges(Direction direction, String... edgeLabels) { throw GraphComputer.Exceptions.adjacentVertexEdgesAndVerticesCanNotBeReadOrUpdated(); } @Override public Iterator<Vertex> vertices(Direction direction, String... edgeLabels) { throw GraphComputer.Exceptions.adjacentVertexEdgesAndVerticesCanNotBeReadOrUpdated(); } @Override public Object id() { return this.adjacentVertex.id(); } @Override public String label() { throw GraphComputer.Exceptions.adjacentVertexLabelsCanNotBeRead(); } @Override public Graph graph() { return ComputerGraph.this; } @Override public void remove() { } @Override public <V> Iterator<VertexProperty<V>> properties(final String... propertyKeys) { throw GraphComputer.Exceptions.adjacentVertexPropertiesCanNotBeReadOrUpdated(); } @Override public int hashCode() { return this.adjacentVertex.hashCode(); } @Override public String toString() { return this.adjacentVertex.toString(); } @Override public boolean equals(final Object other) { return this.adjacentVertex.equals(other); } @Override public Vertex getBaseVertex() { return this.adjacentVertex; } } }
/* * Copyright 2018 Google LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.tools.jib.api; import com.google.cloud.tools.jib.builder.steps.BuildResult; import com.google.cloud.tools.jib.builder.steps.StepsRunner; import com.google.cloud.tools.jib.configuration.BuildContext; import com.google.cloud.tools.jib.configuration.ImageConfiguration; import com.google.cloud.tools.jib.docker.DockerClient; import com.google.cloud.tools.jib.event.EventHandlers; import com.google.cloud.tools.jib.filesystem.XdgDirectories; import com.google.common.base.Preconditions; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ListMultimap; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.function.Consumer; import java.util.function.Function; import javax.annotation.Nullable; /** Configures how to containerize. */ public class Containerizer { /** * The default directory for caching the base image layers, in {@code [user cache * home]/google-cloud-tools-java/jib}. */ public static final Path DEFAULT_BASE_CACHE_DIRECTORY = XdgDirectories.getCacheHome(); public static final String DEFAULT_APPLICATION_CACHE_DIRECTORY_NAME = "jib-core-application-layers-cache"; private static final String DEFAULT_TOOL_NAME = "jib-core"; private static final String DEFAULT_TOOL_VERSION = Containerizer.class.getPackage().getImplementationVersion(); private static final String DESCRIPTION_FOR_DOCKER_REGISTRY = "Building and pushing image"; private static final String DESCRIPTION_FOR_DOCKER_DAEMON = "Building image to Docker daemon"; private static final String DESCRIPTION_FOR_TARBALL = "Building image tarball"; /** * Gets a new {@link Containerizer} that containerizes to a container registry. * * @param registryImage the {@link RegistryImage} that defines target container registry and * credentials * @return a new {@link Containerizer} */ public static Containerizer to(RegistryImage registryImage) { ImageConfiguration imageConfiguration = ImageConfiguration.builder(registryImage.getImageReference()) .setCredentialRetrievers(registryImage.getCredentialRetrievers()) .build(); Function<BuildContext, StepsRunner> stepsRunnerFactory = buildContext -> StepsRunner.begin(buildContext).registryPushSteps(); return new Containerizer( DESCRIPTION_FOR_DOCKER_REGISTRY, imageConfiguration, stepsRunnerFactory, true); } /** * Gets a new {@link Containerizer} that containerizes to a Docker daemon. * * @param dockerDaemonImage the {@link DockerDaemonImage} that defines target Docker daemon * @return a new {@link Containerizer} */ public static Containerizer to(DockerDaemonImage dockerDaemonImage) { ImageConfiguration imageConfiguration = ImageConfiguration.builder(dockerDaemonImage.getImageReference()).build(); DockerClient dockerClient = new DockerClient( dockerDaemonImage.getDockerExecutable(), dockerDaemonImage.getDockerEnvironment()); Function<BuildContext, StepsRunner> stepsRunnerFactory = buildContext -> StepsRunner.begin(buildContext).dockerLoadSteps(dockerClient); return new Containerizer( DESCRIPTION_FOR_DOCKER_DAEMON, imageConfiguration, stepsRunnerFactory, false); } /** * Gets a new {@link Containerizer} that containerizes to a tarball archive. * * @param tarImage the {@link TarImage} that defines target output file * @return a new {@link Containerizer} */ public static Containerizer to(TarImage tarImage) { Optional<ImageReference> imageReference = tarImage.getImageReference(); if (!imageReference.isPresent()) { throw new IllegalArgumentException( "Image name must be set when building a TarImage; use TarImage#named(...) to set the name" + " of the target image"); } ImageConfiguration imageConfiguration = ImageConfiguration.builder(imageReference.get()).build(); Function<BuildContext, StepsRunner> stepsRunnerFactory = buildContext -> StepsRunner.begin(buildContext).tarBuildSteps(tarImage.getPath()); return new Containerizer( DESCRIPTION_FOR_TARBALL, imageConfiguration, stepsRunnerFactory, false); } private final String description; private final ImageConfiguration imageConfiguration; private final Function<BuildContext, StepsRunner> stepsRunnerFactory; private final boolean mustBeOnline; private final Set<String> additionalTags = new HashSet<>(); private final EventHandlers.Builder eventHandlersBuilder = EventHandlers.builder(); @Nullable private ExecutorService executorService; private Path baseImageLayersCacheDirectory = DEFAULT_BASE_CACHE_DIRECTORY; @Nullable private Path applicationLayersCacheDirectory; private boolean allowInsecureRegistries = false; private boolean offline = false; private String toolName = DEFAULT_TOOL_NAME; @Nullable private String toolVersion = DEFAULT_TOOL_VERSION; private boolean alwaysCacheBaseImage = false; private ListMultimap<String, String> registryMirrors = ArrayListMultimap.create(); /** Instantiate with {@link #to}. */ private Containerizer( String description, ImageConfiguration imageConfiguration, Function<BuildContext, StepsRunner> stepsRunnerFactory, boolean mustBeOnline) { this.description = description; this.imageConfiguration = imageConfiguration; this.stepsRunnerFactory = stepsRunnerFactory; this.mustBeOnline = mustBeOnline; } /** * Adds an additional tag to tag the target image with. For example, the following would * containerize to both {@code gcr.io/my-project/my-image:tag} and {@code * gcr.io/my-project/my-image:tag2}: * * <pre>{@code * Containerizer.to(RegistryImage.named("gcr.io/my-project/my-image:tag")).withAdditionalTag("tag2"); * }</pre> * * @param tag the additional tag to push to * @return this */ public Containerizer withAdditionalTag(String tag) { Preconditions.checkArgument(ImageReference.isValidTag(tag), "invalid tag '%s'", tag); additionalTags.add(tag); return this; } /** * Sets the {@link ExecutorService} Jib executes on. Jib, by default, uses {@link * Executors#newCachedThreadPool}. * * @param executorService the {@link ExecutorService} * @return this */ public Containerizer setExecutorService(@Nullable ExecutorService executorService) { this.executorService = executorService; return this; } /** * Sets the directory to use for caching base image layers. This cache can (and should) be shared * between multiple images. The default base image layers cache directory is {@code [user cache * home]/google-cloud-tools-java/jib} ({@link #DEFAULT_BASE_CACHE_DIRECTORY}. This directory can * be the same directory used for {@link #setApplicationLayersCache}. * * @param cacheDirectory the cache directory * @return this */ public Containerizer setBaseImageLayersCache(Path cacheDirectory) { baseImageLayersCacheDirectory = cacheDirectory; return this; } /** * Sets the directory to use for caching application layers. This cache can be shared between * multiple images. If not set, a temporary directory will be used as the application layers * cache. This directory can be the same directory used for {@link #setBaseImageLayersCache}. * * @param cacheDirectory the cache directory * @return this */ public Containerizer setApplicationLayersCache(Path cacheDirectory) { applicationLayersCacheDirectory = cacheDirectory; return this; } /** * Adds the {@code eventConsumer} to handle the {@link JibEvent} with class {@code eventType}. The * order in which handlers are added is the order in which they are called when the event is * dispatched. * * <p><b>Note: Implementations of {@code eventConsumer} must be thread-safe.</b> * * @param eventType the event type that {@code eventConsumer} should handle * @param eventConsumer the event handler * @param <E> the type of {@code eventType} * @return this */ public <E extends JibEvent> Containerizer addEventHandler( Class<E> eventType, Consumer<? super E> eventConsumer) { eventHandlersBuilder.add(eventType, eventConsumer); return this; } /** * Adds the {@code eventConsumer} to handle all {@link JibEvent} types. See {@link * #addEventHandler(Class, Consumer)} for more details. * * @param eventConsumer the event handler * @return this */ public Containerizer addEventHandler(Consumer<JibEvent> eventConsumer) { eventHandlersBuilder.add(JibEvent.class, eventConsumer); return this; } /** * Sets whether or not to allow communication over HTTP/insecure HTTPS. * * @param allowInsecureRegistries if {@code true}, insecure connections will be allowed * @return this */ public Containerizer setAllowInsecureRegistries(boolean allowInsecureRegistries) { this.allowInsecureRegistries = allowInsecureRegistries; return this; } /** * Sets whether or not to run the build in offline mode. In offline mode, the base image is * retrieved from the cache instead of pulled from a registry, and the build will fail if the base * image is not in the cache or if the target is an image registry. * * @param offline if {@code true}, the build will run in offline mode * @return this */ public Containerizer setOfflineMode(boolean offline) { if (mustBeOnline && offline) { throw new IllegalStateException("Cannot build to a container registry in offline mode"); } this.offline = offline; return this; } /** * Sets the name of the tool that is using Jib Core. The tool name is sent as part of the {@code * User-Agent} in registry requests and set as the {@code created_by} in the container layer * history. Defaults to {@code jib-core}. * * @param toolName the name of the tool using this library * @return this */ public Containerizer setToolName(String toolName) { this.toolName = toolName; return this; } /** * Sets the version of the tool that is using Jib Core. The tool version is sent as part of the * {@code User-Agent} in registry requests and set as the {@code created_by} in the container * layer history. Defaults to the current version of jib-core. * * @param toolVersion the name of the tool using this library * @return this */ public Containerizer setToolVersion(@Nullable String toolVersion) { this.toolVersion = toolVersion; return this; } /** * Controls the optimization which skips downloading base image layers that exist in a target * registry. If the user does not set this property, then read as false. * * @param alwaysCacheBaseImage if {@code true}, base image layers are always pulled and cached. If * {@code false}, base image layers will not be pulled/cached if they already exist on the * target registry. * @return this */ public Containerizer setAlwaysCacheBaseImage(boolean alwaysCacheBaseImage) { this.alwaysCacheBaseImage = alwaysCacheBaseImage; return this; } /** * Adds mirrors for a base image registry. Jib will try its mirrors in the given order before * finally trying the registry. * * @param registry base image registry for which mirrors are configured * @param mirrors a list of mirrors, where each element is in the form of {@code host[:port]} * @return this */ public Containerizer addRegistryMirrors(String registry, List<String> mirrors) { registryMirrors.putAll(registry, mirrors); return this; } Set<String> getAdditionalTags() { return ImmutableSet.copyOf(additionalTags); } ListMultimap<String, String> getRegistryMirrors() { return ImmutableListMultimap.copyOf(registryMirrors); } Optional<ExecutorService> getExecutorService() { return Optional.ofNullable(executorService); } Path getBaseImageLayersCacheDirectory() { return baseImageLayersCacheDirectory; } Path getApplicationLayersCacheDirectory() throws CacheDirectoryCreationException { if (applicationLayersCacheDirectory == null) { // Create a directory in temp if application layers cache directory is not set. try { Path tmp = Paths.get(System.getProperty("java.io.tmpdir")); applicationLayersCacheDirectory = tmp.resolve(DEFAULT_APPLICATION_CACHE_DIRECTORY_NAME); Files.createDirectories(applicationLayersCacheDirectory); } catch (IOException ex) { throw new CacheDirectoryCreationException(ex); } } return applicationLayersCacheDirectory; } EventHandlers buildEventHandlers() { return eventHandlersBuilder.build(); } boolean getAllowInsecureRegistries() { return allowInsecureRegistries; } boolean isOfflineMode() { return offline; } String getToolName() { return toolName; } @Nullable String getToolVersion() { return toolVersion; } boolean getAlwaysCacheBaseImage() { return alwaysCacheBaseImage; } String getDescription() { return description; } ImageConfiguration getImageConfiguration() { return imageConfiguration; } BuildResult run(BuildContext buildContext) throws ExecutionException, InterruptedException { return stepsRunnerFactory.apply(buildContext).run(); } }
// Generated from Filter.g4 by ANTLR 4.2.2 package io.rtr.alchemy.filtering; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; import java.util.Iterator; import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class FilterParser extends Parser { protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int T__1=1, T__0=2, AND=3, OR=4, NOT=5, NUMBER=6, STRING=7, BOOLEAN=8, IDENTIFIER=9, COMPARISON=10, WS=11; public static final String[] tokenNames = { "<INVALID>", "')'", "'('", "AND", "OR", "NOT", "NUMBER", "STRING", "BOOLEAN", "IDENTIFIER", "COMPARISON", "WS" }; public static final int RULE_exp = 0, RULE_term = 1, RULE_factor = 2, RULE_comparison = 3, RULE_constant = 4, RULE_value = 5; public static final String[] ruleNames = { "exp", "term", "factor", "comparison", "constant", "value" }; @Override public String getGrammarFileName() { return "Filter.g4"; } @Override public String[] getTokenNames() { return tokenNames; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String getSerializedATN() { return _serializedATN; } @Override public ATN getATN() { return _ATN; } public FilterParser(TokenStream input) { super(input); _interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } public static class ExpContext extends ParserRuleContext { public TermContext term() { return getRuleContext(TermContext.class,0); } public TerminalNode OR() { return getToken(FilterParser.OR, 0); } public ExpContext exp() { return getRuleContext(ExpContext.class,0); } public ExpContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_exp; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterExp(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitExp(this); } } public final ExpContext exp() throws RecognitionException { return exp(0); } private ExpContext exp(int _p) throws RecognitionException { ParserRuleContext _parentctx = _ctx; int _parentState = getState(); ExpContext _localctx = new ExpContext(_ctx, _parentState); ExpContext _prevctx = _localctx; int _startState = 0; enterRecursionRule(_localctx, 0, RULE_exp, _p); try { int _alt; enterOuterAlt(_localctx, 1); { { setState(13); term(); } _ctx.stop = _input.LT(-1); setState(20); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { { _localctx = new ExpContext(_parentctx, _parentState); pushNewRecursionContext(_localctx, _startState, RULE_exp); setState(15); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); setState(16); match(OR); setState(17); term(); } } } setState(22); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { unrollRecursionContexts(_parentctx); } return _localctx; } public static class TermContext extends ParserRuleContext { public TermContext term() { return getRuleContext(TermContext.class,0); } public TerminalNode AND() { return getToken(FilterParser.AND, 0); } public FactorContext factor() { return getRuleContext(FactorContext.class,0); } public TermContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_term; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterTerm(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitTerm(this); } } public final TermContext term() throws RecognitionException { TermContext _localctx = new TermContext(_ctx, getState()); enterRule(_localctx, 2, RULE_term); try { setState(28); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { setState(23); factor(); } break; case 2: enterOuterAlt(_localctx, 2); { setState(24); factor(); setState(25); match(AND); setState(26); term(); } break; } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class FactorContext extends ParserRuleContext { public ValueContext value() { return getRuleContext(ValueContext.class,0); } public ComparisonContext comparison() { return getRuleContext(ComparisonContext.class,0); } public ExpContext exp() { return getRuleContext(ExpContext.class,0); } public TerminalNode NOT() { return getToken(FilterParser.NOT, 0); } public FactorContext factor() { return getRuleContext(FactorContext.class,0); } public FactorContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_factor; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterFactor(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitFactor(this); } } public final FactorContext factor() throws RecognitionException { FactorContext _localctx = new FactorContext(_ctx, getState()); enterRule(_localctx, 4, RULE_factor); try { setState(38); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { setState(30); match(2); setState(31); exp(0); setState(32); match(1); } break; case 2: enterOuterAlt(_localctx, 2); { setState(34); value(); } break; case 3: enterOuterAlt(_localctx, 3); { setState(35); comparison(); } break; case 4: enterOuterAlt(_localctx, 4); { setState(36); match(NOT); setState(37); factor(); } break; } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ComparisonContext extends ParserRuleContext { public ValueContext value(int i) { return getRuleContext(ValueContext.class,i); } public TerminalNode COMPARISON() { return getToken(FilterParser.COMPARISON, 0); } public List<ValueContext> value() { return getRuleContexts(ValueContext.class); } public ComparisonContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_comparison; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterComparison(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitComparison(this); } } public final ComparisonContext comparison() throws RecognitionException { ComparisonContext _localctx = new ComparisonContext(_ctx, getState()); enterRule(_localctx, 6, RULE_comparison); try { enterOuterAlt(_localctx, 1); { setState(40); value(); setState(41); match(COMPARISON); setState(42); value(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ConstantContext extends ParserRuleContext { public TerminalNode BOOLEAN() { return getToken(FilterParser.BOOLEAN, 0); } public TerminalNode NUMBER() { return getToken(FilterParser.NUMBER, 0); } public TerminalNode STRING() { return getToken(FilterParser.STRING, 0); } public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterConstant(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitConstant(this); } } public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); enterRule(_localctx, 8, RULE_constant); int _la; try { enterOuterAlt(_localctx, 1); { setState(44); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << STRING) | (1L << BOOLEAN))) != 0)) ) { _errHandler.recoverInline(this); } consume(); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public static class ValueContext extends ParserRuleContext { public TerminalNode IDENTIFIER() { return getToken(FilterParser.IDENTIFIER, 0); } public ConstantContext constant() { return getRuleContext(ConstantContext.class,0); } public ValueContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_value; } @Override public void enterRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).enterValue(this); } @Override public void exitRule(ParseTreeListener listener) { if ( listener instanceof FilterListener ) ((FilterListener)listener).exitValue(this); } } public final ValueContext value() throws RecognitionException { ValueContext _localctx = new ValueContext(_ctx, getState()); enterRule(_localctx, 10, RULE_value); try { setState(48); switch (_input.LA(1)) { case NUMBER: case STRING: case BOOLEAN: enterOuterAlt(_localctx, 1); { setState(46); constant(); } break; case IDENTIFIER: enterOuterAlt(_localctx, 2); { setState(47); match(IDENTIFIER); } break; default: throw new NoViableAltException(this); } } catch (RecognitionException re) { _localctx.exception = re; _errHandler.reportError(this, re); _errHandler.recover(this, re); } finally { exitRule(); } return _localctx; } public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 0: return exp_sempred((ExpContext)_localctx, predIndex); } return true; } private boolean exp_sempred(ExpContext _localctx, int predIndex) { switch (predIndex) { case 0: return precpred(_ctx, 1); } return true; } public static final String _serializedATN = "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3\r\65\4\2\t\2\4\3"+ "\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\3\2\3\2\3\2\3\2\3\2\3\2\7\2\25\n"+ "\2\f\2\16\2\30\13\2\3\3\3\3\3\3\3\3\3\3\5\3\37\n\3\3\4\3\4\3\4\3\4\3\4"+ "\3\4\3\4\3\4\5\4)\n\4\3\5\3\5\3\5\3\5\3\6\3\6\3\7\3\7\5\7\63\n\7\3\7\2"+ "\3\2\b\2\4\6\b\n\f\2\3\3\2\b\n\64\2\16\3\2\2\2\4\36\3\2\2\2\6(\3\2\2\2"+ "\b*\3\2\2\2\n.\3\2\2\2\f\62\3\2\2\2\16\17\b\2\1\2\17\20\5\4\3\2\20\26"+ "\3\2\2\2\21\22\f\3\2\2\22\23\7\6\2\2\23\25\5\4\3\2\24\21\3\2\2\2\25\30"+ "\3\2\2\2\26\24\3\2\2\2\26\27\3\2\2\2\27\3\3\2\2\2\30\26\3\2\2\2\31\37"+ "\5\6\4\2\32\33\5\6\4\2\33\34\7\5\2\2\34\35\5\4\3\2\35\37\3\2\2\2\36\31"+ "\3\2\2\2\36\32\3\2\2\2\37\5\3\2\2\2 !\7\4\2\2!\"\5\2\2\2\"#\7\3\2\2#)"+ "\3\2\2\2$)\5\f\7\2%)\5\b\5\2&\'\7\7\2\2\')\5\6\4\2( \3\2\2\2($\3\2\2\2"+ "(%\3\2\2\2(&\3\2\2\2)\7\3\2\2\2*+\5\f\7\2+,\7\f\2\2,-\5\f\7\2-\t\3\2\2"+ "\2./\t\2\2\2/\13\3\2\2\2\60\63\5\n\6\2\61\63\7\13\2\2\62\60\3\2\2\2\62"+ "\61\3\2\2\2\63\r\3\2\2\2\6\26\36(\62"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
package org.ovirt.engine.api.restapi.resource.aaa; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.junit.Test; import org.ovirt.engine.api.model.Domain; import org.ovirt.engine.api.model.Fault; import org.ovirt.engine.api.model.Group; import org.ovirt.engine.api.model.User; import org.ovirt.engine.api.restapi.resource.AbstractBackendCollectionResourceTest; import org.ovirt.engine.api.restapi.utils.DirectoryEntryIdUtils; import org.ovirt.engine.core.aaa.DirectoryUser; import org.ovirt.engine.core.common.action.AddUserParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.businessentities.aaa.DbUser; import org.ovirt.engine.core.common.interfaces.SearchType; import org.ovirt.engine.core.common.queries.IdQueryParameters; import org.ovirt.engine.core.common.queries.VdcQueryParametersBase; import org.ovirt.engine.core.common.queries.VdcQueryType; public class BackendUsersResourceTest extends AbstractBackendCollectionResourceTest<User, DbUser, BackendUsersResource> { static final String NAMESPACE = "*"; static final String GROUPS = "Schema Admins@Maghreb/Users," + "Group Policy Creator Owners@Maghreb/Users," + "Enterprise Admins@Maghreb/Users"; static final String[] PARSED_GROUPS = { "Schema Admins@Maghreb/Users", "Group Policy Creator Owners@Maghreb/Users", "Enterprise Admins@Maghreb/Users", }; protected static final String SEARCH_QUERY = "name=s* AND id=*0 and usrname != \"\""; protected static final String QUERY = "usrname != \"\""; public BackendUsersResourceTest() { super(new BackendUsersResource(), SearchType.DBUser, "Users : "); } @Test public void testAddUser2() throws Exception { setUpEntityQueryExpectations(VdcQueryType.GetDomainList, VdcQueryParametersBase.class, new String[] {}, new Object[] {}, setUpDomains()); setUpAddUserExpectations("ADUSER@" + DOMAIN + ":: username=" + NAMES[0]); User model = new User(); Domain domain = new Domain(); domain.setName(DOMAIN); domain.setId(DirectoryEntryIdUtils.encode(domain.getName())); model.setDomain(domain); model.setUserName(NAMES[0]); Response response = collection.add(model); verifyAddUser(response); } @Test public void testAddUser3() throws Exception { setUpEntityQueryExpectations(VdcQueryType.GetDomainList, VdcQueryParametersBase.class, new String[] {}, new Object[] {}, setUpDomains()); setUpAddUserExpectations("ADUSER@" + DOMAIN + ":: username=" + NAMES[0]); User model = new User(); model.setUserName(NAMES[0] + "@" + DOMAIN); Response response = collection.add(model); verifyAddUser(response); } @Test public void testAddUser4() throws Exception { setUpEntityQueryExpectations(VdcQueryType.GetDomainList, VdcQueryParametersBase.class, new String[] { }, new Object[] { }, setUpDomains()); setUpAddUserExpectations("ADUSER@" + DOMAIN + ":: username=" + NAMES[0]); User model = new User(); model.setUserName(NAMES[0]); Domain domain = new Domain(); domain.setName(DOMAIN); domain.setId(DirectoryEntryIdUtils.encode(domain.getName())); model.setDomain(domain); Response response = collection.add(model); verifyAddUser(response); } private List<String> setUpDomains() { List<String> domains = new LinkedList<>(); domains.add("some.domain"); domains.add(DOMAIN); return domains; } private void verifyAddUser(Response response) { assertEquals(201, response.getStatus()); assertTrue(response.getEntity() instanceof User); verifyModel((User) response.getEntity(), 0); } private void setUpAddUserExpectations(String query) throws Exception { setUriInfo(setUpBasicUriExpectations()); setUpGetEntityExpectations( query, SearchType.DirectoryUser, getDirectoryUser(0) ); setUpCreationExpectations( VdcActionType.AddUser, AddUserParameters.class, new String[] { "UserToAdd" }, new Object[] { new DbUser(getDirectoryUser(0)) }, true, true, GUIDS[0], VdcQueryType.GetDbUserByUserId, IdQueryParameters.class, new String[] { "Id" }, new Object[] { GUIDS[0] }, getEntity(0) ); } @Override protected List<User> getCollection() { return collection.list().getUsers(); } @Override protected DbUser getEntity(int index) { DbUser entity = new DbUser(getDirectoryUser(index)); entity.setGroupNames(new LinkedList<>(Arrays.asList(GROUPS.split(",")))); entity.setId(GUIDS[index]); return entity; } private DirectoryUser getDirectoryUser(int index) { return new DirectoryUser(DOMAIN, NAMESPACE, EXTERNAL_IDS[index], NAMES[index], NAMES[index], ""); } @Override protected void verifyModel(User model, int index) { assertEquals(GUIDS[index].toString(), model.getId()); assertEquals(NAMES[index] + "@" + DOMAIN, model.getUserName()); assertNotNull(model.getDomain()); assertEquals(DirectoryEntryIdUtils.encode(DOMAIN), model.getDomain().getId()); assertTrue(model.isSetGroups()); assertEquals(PARSED_GROUPS.length, model.getGroups().getGroups().size()); HashSet<String> groupNames = new HashSet<>(); for (Group group : model.getGroups().getGroups()) { groupNames.add(group.getName()); } assertEquals(new HashSet<>(Arrays.asList(PARSED_GROUPS)), groupNames); verifyLinks(model); } @Override protected void setUpQueryExpectations(String query) throws Exception { setUpQueryExpectations(query, null); } @Override @Test public void testQuery() throws Exception { UriInfo uriInfo = setUpUriExpectations(SEARCH_QUERY); setUpQueryExpectations(SEARCH_QUERY); collection.setUriInfo(uriInfo); verifyCollection(getCollection()); } @Override @Test public void testList() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); setUpQueryExpectations(QUERY); collection.setUriInfo(uriInfo); verifyCollection(getCollection()); } @Override @Test public void testListFailure() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); setUpQueryExpectations(QUERY, FAILURE); collection.setUriInfo(uriInfo); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { assertTrue(wae.getResponse().getEntity() instanceof Fault); assertEquals(mockl10n(FAILURE), ((Fault) wae.getResponse().getEntity()).getDetail()); } } @Override @Test public void testListCrash() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); Throwable t = new RuntimeException(FAILURE); setUpQueryExpectations(QUERY, t); collection.setUriInfo(uriInfo); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { verifyFault(wae, BACKEND_FAILED_SERVER_LOCALE, t); } } @Override @Test public void testListCrashClientLocale() throws Exception { UriInfo uriInfo = setUpUriExpectations(null); locales.add(CLIENT_LOCALE); Throwable t = new RuntimeException(FAILURE); setUpQueryExpectations(QUERY, t); collection.setUriInfo(uriInfo); try { getCollection(); fail("expected WebApplicationException"); } catch (WebApplicationException wae) { verifyFault(wae, BACKEND_FAILED_CLIENT_LOCALE, t); } finally { locales.clear(); } } }
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov. // Jad home page: http://www.geocities.com/kpdus/jad.html // Decompiler options: braces fieldsfirst space lnc package android.support.v4.app; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.os.Parcelable; import android.text.Html; import java.util.ArrayList; // Referenced classes of package android.support.v4.app: // ShareCompat public static class mIntent { private Activity mActivity; private ArrayList mBccAddresses; private ArrayList mCcAddresses; private CharSequence mChooserTitle; private Intent mIntent; private ArrayList mStreams; private ArrayList mToAddresses; private void combineArrayExtra(String s, ArrayList arraylist) { String as[] = mIntent.getStringArrayExtra(s); String as1[]; int i; if (as != null) { i = as.length; } else { i = 0; } as1 = new String[arraylist.size() + i]; arraylist.toArray(as1); if (as != null) { System.arraycopy(as, 0, as1, arraylist.size(), i); } mIntent.putExtra(s, as1); } private void combineArrayExtra(String s, String as[]) { Intent intent = getIntent(); String as1[] = intent.getStringArrayExtra(s); String as2[]; int i; if (as1 != null) { i = as1.length; } else { i = 0; } as2 = new String[as.length + i]; if (as1 != null) { System.arraycopy(as1, 0, as2, 0, i); } System.arraycopy(as, 0, as2, i, as.length); intent.putExtra(s, as2); } public static getIntent from(Activity activity) { return new <init>(activity); } public <init> addEmailBcc(String s) { if (mBccAddresses == null) { mBccAddresses = new ArrayList(); } mBccAddresses.add(s); return this; } public mBccAddresses addEmailBcc(String as[]) { combineArrayExtra("android.intent.extra.BCC", as); return this; } public combineArrayExtra addEmailCc(String s) { if (mCcAddresses == null) { mCcAddresses = new ArrayList(); } mCcAddresses.add(s); return this; } public mCcAddresses addEmailCc(String as[]) { combineArrayExtra("android.intent.extra.CC", as); return this; } public combineArrayExtra addEmailTo(String s) { if (mToAddresses == null) { mToAddresses = new ArrayList(); } mToAddresses.add(s); return this; } public mToAddresses addEmailTo(String as[]) { combineArrayExtra("android.intent.extra.EMAIL", as); return this; } public combineArrayExtra addStream(Uri uri) { Uri uri1 = (Uri)mIntent.getParcelableExtra("android.intent.extra.STREAM"); if (uri1 == null) { return setStream(uri); } if (mStreams == null) { mStreams = new ArrayList(); } if (uri1 != null) { mIntent.removeExtra("android.intent.extra.STREAM"); mStreams.add(uri1); } mStreams.add(uri); return this; } public Intent createChooserIntent() { return Intent.createChooser(getIntent(), mChooserTitle); } Activity getActivity() { return mActivity; } public Intent getIntent() { boolean flag = true; if (mToAddresses != null) { combineArrayExtra("android.intent.extra.EMAIL", mToAddresses); mToAddresses = null; } if (mCcAddresses != null) { combineArrayExtra("android.intent.extra.CC", mCcAddresses); mCcAddresses = null; } if (mBccAddresses != null) { combineArrayExtra("android.intent.extra.BCC", mBccAddresses); mBccAddresses = null; } boolean flag1; if (mStreams == null || mStreams.size() <= 1) { flag = false; } flag1 = mIntent.getAction().equals("android.intent.action.SEND_MULTIPLE"); if (!flag && flag1) { mIntent.setAction("android.intent.action.SEND"); if (mStreams != null && !mStreams.isEmpty()) { mIntent.putExtra("android.intent.extra.STREAM", (Parcelable)mStreams.get(0)); } else { mIntent.removeExtra("android.intent.extra.STREAM"); } mStreams = null; } if (flag && !flag1) { mIntent.setAction("android.intent.action.SEND_MULTIPLE"); if (mStreams != null && !mStreams.isEmpty()) { mIntent.putParcelableArrayListExtra("android.intent.extra.STREAM", mStreams); } else { mIntent.removeExtra("android.intent.extra.STREAM"); } } return mIntent; } public mIntent setChooserTitle(int i) { return setChooserTitle(mActivity.getText(i)); } public mActivity setChooserTitle(CharSequence charsequence) { mChooserTitle = charsequence; return this; } public mChooserTitle setEmailBcc(String as[]) { mIntent.putExtra("android.intent.extra.BCC", as); return this; } public mIntent setEmailCc(String as[]) { mIntent.putExtra("android.intent.extra.CC", as); return this; } public mIntent setEmailTo(String as[]) { if (mToAddresses != null) { mToAddresses = null; } mIntent.putExtra("android.intent.extra.EMAIL", as); return this; } public mIntent setHtmlText(String s) { mIntent.putExtra("android.intent.extra.HTML_TEXT", s); if (!mIntent.hasExtra("android.intent.extra.TEXT")) { setText(Html.fromHtml(s)); } return this; } public setText setStream(Uri uri) { if (!mIntent.getAction().equals("android.intent.action.SEND")) { mIntent.setAction("android.intent.action.SEND"); } mStreams = null; mIntent.putExtra("android.intent.extra.STREAM", uri); return this; } public mIntent setSubject(String s) { mIntent.putExtra("android.intent.extra.SUBJECT", s); return this; } public mIntent setText(CharSequence charsequence) { mIntent.putExtra("android.intent.extra.TEXT", charsequence); return this; } public mIntent setType(String s) { mIntent.setType(s); return this; } public void startChooser() { mActivity.startActivity(createChooserIntent()); } private (Activity activity) { mActivity = activity; mIntent = (new Intent()).setAction("android.intent.action.SEND"); mIntent.putExtra("android.support.v4.app.EXTRA_CALLING_PACKAGE", activity.getPackageName()); mIntent.putExtra("android.support.v4.app.EXTRA_CALLING_ACTIVITY", activity.getComponentName()); mIntent.addFlags(0x80000); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hive.hcatalog.mapreduce; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.mapreduce.Job; import org.apache.hive.hcatalog.common.ErrorType; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.DefaultHCatRecord; import org.apache.hive.hcatalog.data.HCatRecord; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; import org.apache.hive.hcatalog.data.schema.HCatSchema; import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils; import org.junit.Test; import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; public class TestHCatPartitioned extends HCatMapReduceTest { private static List<HCatRecord> writeRecords; private static List<HCatFieldSchema> partitionColumns; public TestHCatPartitioned(String formatName, String serdeClass, String inputFormatClass, String outputFormatClass) throws Exception { super(formatName, serdeClass, inputFormatClass, outputFormatClass); tableName = "testHCatPartitionedTable_" + formatName; writeRecords = new ArrayList<HCatRecord>(); for (int i = 0; i < 20; i++) { List<Object> objList = new ArrayList<Object>(); objList.add(i); objList.add("strvalue" + i); writeRecords.add(new DefaultHCatRecord(objList)); } partitionColumns = new ArrayList<HCatFieldSchema>(); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); } @Override protected List<FieldSchema> getPartitionKeys() { List<FieldSchema> fields = new ArrayList<FieldSchema>(); //Defining partition names in unsorted order fields.add(new FieldSchema("PaRT1", serdeConstants.STRING_TYPE_NAME, "")); fields.add(new FieldSchema("part0", serdeConstants.INT_TYPE_NAME, "")); return fields; } @Override protected List<FieldSchema> getTableColumns() { List<FieldSchema> fields = new ArrayList<FieldSchema>(); fields.add(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, "")); fields.add(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, "")); return fields; } @Test public void testHCatPartitionedTable() throws Exception { Map<String, String> partitionMap = new HashMap<String, String>(); partitionMap.put("part1", "p1value1"); partitionMap.put("part0", "501"); runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true); partitionMap.clear(); partitionMap.put("PART1", "p1value2"); partitionMap.put("PART0", "502"); runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true); //Test for duplicate publish -- this will either fail on job creation time // and throw an exception, or will fail at runtime, and fail the job. IOException exc = null; try { Job j = runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true); assertEquals(!isTableImmutable(),j.isSuccessful()); } catch (IOException e) { exc = e; assertTrue(exc instanceof HCatException); assertTrue(ErrorType.ERROR_DUPLICATE_PARTITION.equals(((HCatException) exc).getErrorType())); } if (!isTableImmutable()){ assertNull(exc); } //Test for publish with invalid partition key name exc = null; partitionMap.clear(); partitionMap.put("px1", "p1value2"); partitionMap.put("px0", "502"); try { Job j = runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true); assertFalse(j.isSuccessful()); } catch (IOException e) { exc = e; assertNotNull(exc); assertTrue(exc instanceof HCatException); assertEquals(ErrorType.ERROR_MISSING_PARTITION_KEY, ((HCatException) exc).getErrorType()); } //Test for publish with missing partition key values exc = null; partitionMap.clear(); partitionMap.put("px", "512"); try { runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true); } catch (IOException e) { exc = e; } assertNotNull(exc); assertTrue(exc instanceof HCatException); assertEquals(ErrorType.ERROR_INVALID_PARTITION_VALUES, ((HCatException) exc).getErrorType()); //Test for null partition value map exc = null; try { runMRCreate(null, partitionColumns, writeRecords, 20, false); } catch (IOException e) { exc = e; } assertTrue(exc == null); // assertTrue(exc instanceof HCatException); // assertEquals(ErrorType.ERROR_PUBLISHING_PARTITION, ((HCatException) exc).getErrorType()); // With Dynamic partitioning, this isn't an error that the keyValues specified didn't values //Read should get 10 + 20 rows if immutable, 50 (10+20+20) if mutable if (isTableImmutable()){ runMRRead(30); } else { runMRRead(50); } //Read with partition filter runMRRead(10, "part1 = \"p1value1\""); runMRRead(10, "part0 = \"501\""); if (isTableImmutable()){ runMRRead(20, "part1 = \"p1value2\""); runMRRead(30, "part1 = \"p1value1\" or part1 = \"p1value2\""); runMRRead(20, "part0 = \"502\""); runMRRead(30, "part0 = \"501\" or part0 = \"502\""); } else { runMRRead(40, "part1 = \"p1value2\""); runMRRead(50, "part1 = \"p1value1\" or part1 = \"p1value2\""); runMRRead(40, "part0 = \"502\""); runMRRead(50, "part0 = \"501\" or part0 = \"502\""); } tableSchemaTest(); columnOrderChangeTest(); hiveReadTest(); } //test that new columns gets added to table schema private void tableSchemaTest() throws Exception { HCatSchema tableSchema = getTableSchema(); assertEquals(4, tableSchema.getFields().size()); //Update partition schema to have 3 fields partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList<HCatRecord>(); for (int i = 0; i < 20; i++) { List<Object> objList = new ArrayList<Object>(); objList.add(i); objList.add("strvalue" + i); objList.add("str2value" + i); writeRecords.add(new DefaultHCatRecord(objList)); } Map<String, String> partitionMap = new HashMap<String, String>(); partitionMap.put("part1", "p1value5"); partitionMap.put("part0", "505"); runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true); tableSchema = getTableSchema(); //assert that c3 has got added to table schema assertEquals(5, tableSchema.getFields().size()); assertEquals("c1", tableSchema.getFields().get(0).getName()); assertEquals("c2", tableSchema.getFields().get(1).getName()); assertEquals("c3", tableSchema.getFields().get(2).getName()); assertEquals("part1", tableSchema.getFields().get(3).getName()); assertEquals("part0", tableSchema.getFields().get(4).getName()); //Test that changing column data type fails partitionMap.clear(); partitionMap.put("part1", "p1value6"); partitionMap.put("part0", "506"); partitionColumns = new ArrayList<HCatFieldSchema>(); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.INT_TYPE_NAME, ""))); IOException exc = null; try { runMRCreate(partitionMap, partitionColumns, writeRecords, 20, true); } catch (IOException e) { exc = e; } assertTrue(exc != null); assertTrue(exc instanceof HCatException); assertEquals(ErrorType.ERROR_SCHEMA_TYPE_MISMATCH, ((HCatException) exc).getErrorType()); //Test that partition key is not allowed in data partitionColumns = new ArrayList<HCatFieldSchema>(); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("part1", serdeConstants.STRING_TYPE_NAME, ""))); List<HCatRecord> recordsContainingPartitionCols = new ArrayList<HCatRecord>(20); for (int i = 0; i < 20; i++) { List<Object> objList = new ArrayList<Object>(); objList.add(i); objList.add("c2value" + i); objList.add("c3value" + i); objList.add("p1value6"); recordsContainingPartitionCols.add(new DefaultHCatRecord(objList)); } exc = null; try { runMRCreate(partitionMap, partitionColumns, recordsContainingPartitionCols, 20, true); } catch (IOException e) { exc = e; } List<HCatRecord> records = runMRRead(20, "part1 = \"p1value6\""); assertEquals(20, records.size()); records = runMRRead(20, "part0 = \"506\""); assertEquals(20, records.size()); Integer i = 0; for (HCatRecord rec : records) { assertEquals(5, rec.size()); assertEquals(rec.get(0), i); assertEquals(rec.get(1), "c2value" + i); assertEquals(rec.get(2), "c3value" + i); assertEquals(rec.get(3), "p1value6"); assertEquals(rec.get(4), 506); i++; } } //check behavior while change the order of columns private void columnOrderChangeTest() throws Exception { HCatSchema tableSchema = getTableSchema(); assertEquals(5, tableSchema.getFields().size()); partitionColumns = new ArrayList<HCatFieldSchema>(); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c3", serdeConstants.STRING_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList<HCatRecord>(); for (int i = 0; i < 10; i++) { List<Object> objList = new ArrayList<Object>(); objList.add(i); objList.add("co strvalue" + i); objList.add("co str2value" + i); writeRecords.add(new DefaultHCatRecord(objList)); } Map<String, String> partitionMap = new HashMap<String, String>(); partitionMap.put("part1", "p1value8"); partitionMap.put("part0", "508"); Exception exc = null; try { runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true); } catch (IOException e) { exc = e; } assertTrue(exc != null); assertTrue(exc instanceof HCatException); assertEquals(ErrorType.ERROR_SCHEMA_COLUMN_MISMATCH, ((HCatException) exc).getErrorType()); partitionColumns = new ArrayList<HCatFieldSchema>(); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c1", serdeConstants.INT_TYPE_NAME, ""))); partitionColumns.add(HCatSchemaUtils.getHCatFieldSchema(new FieldSchema("c2", serdeConstants.STRING_TYPE_NAME, ""))); writeRecords = new ArrayList<HCatRecord>(); for (int i = 0; i < 10; i++) { List<Object> objList = new ArrayList<Object>(); objList.add(i); objList.add("co strvalue" + i); writeRecords.add(new DefaultHCatRecord(objList)); } runMRCreate(partitionMap, partitionColumns, writeRecords, 10, true); if (isTableImmutable()){ //Read should get 10 + 20 + 10 + 10 + 20 rows runMRRead(70); } else { runMRRead(90); // +20 from the duplicate publish } } //Test that data inserted through hcatoutputformat is readable from hive private void hiveReadTest() throws Exception { String query = "select * from " + tableName; driver.run(query); ArrayList<String> res = new ArrayList<String>(); driver.getResults(res); if (isTableImmutable()){ //Read should get 10 + 20 + 10 + 10 + 20 rows assertEquals(70, res.size()); } else { assertEquals(90, res.size()); // +20 from the duplicate publish } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.runtime; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.io.Writer; import java.net.URI; import java.nio.charset.Charset; import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.WatchEvent; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.Iterator; import groovy.lang.Writable; /** * A Writable Path. * * @author Paolo Di Tommaso <paolo.ditommaso@gmail.com> * @author John Wilson */ public class WritablePath implements Path, Writable { private final String encoding; private final Path delegate; public WritablePath(final Path delegate) { this(delegate, null); } public WritablePath(final Path delegate, final String encoding) { this.encoding = encoding; this.delegate = delegate; } public Writer writeTo(final Writer out) throws IOException { final Reader reader = (this.encoding == null) ? new InputStreamReader(Files.newInputStream(this)) : new InputStreamReader(Files.newInputStream(this), Charset.forName(this.encoding)); try { int c = reader.read(); while (c != -1) { out.write(c); c = reader.read(); } } finally { reader.close(); } return out; } @Override public FileSystem getFileSystem() { return delegate.getFileSystem(); } @Override public boolean isAbsolute() { return delegate.isAbsolute(); } @Override public Path getRoot() { return delegate.getRoot(); } @Override public Path getFileName() { return delegate.getFileName(); } @Override public Path getParent() { return delegate.getParent(); } @Override public int getNameCount() { return delegate.getNameCount(); } @Override public Path getName(int index) { return delegate.getName(index); } @Override public Path subpath(int beginIndex, int endIndex) { return delegate.subpath(beginIndex, endIndex); } @Override public boolean startsWith(Path other) { return delegate.startsWith(other); } @Override public boolean startsWith(String other) { return delegate.startsWith(other); } @Override public boolean endsWith(Path other) { return delegate.endsWith(other); } @Override public boolean endsWith(String other) { return delegate.endsWith(other); } @Override public Path normalize() { return delegate.normalize(); } @Override public Path resolve(Path other) { return delegate.resolve(other); } @Override public Path resolve(String other) { return delegate.resolve(other); } @Override public Path resolveSibling(Path other) { return delegate.resolveSibling(other); } @Override public Path resolveSibling(String other) { return delegate.resolveSibling(other); } @Override public Path relativize(Path other) { return delegate.relativize(other); } @Override public URI toUri() { return delegate.toUri(); } @Override public Path toAbsolutePath() { return delegate.toAbsolutePath(); } @Override public Path toRealPath(LinkOption... options) throws IOException { return delegate.toRealPath(options); } @Override public File toFile() { return delegate.toFile(); } @Override public WatchKey register(WatchService watcher, WatchEvent.Kind<?>[] events, WatchEvent.Modifier... modifiers) throws IOException { return delegate.register(watcher, events, modifiers); } @Override public WatchKey register(WatchService watcher, WatchEvent.Kind<?>... events) throws IOException { return delegate.register(watcher, events); } @Override public Iterator<Path> iterator() { return delegate.iterator(); } @Override public int compareTo(Path other) { return delegate.compareTo(other); } @Override public boolean equals(Object other) { return delegate.equals(other); } @Override public int hashCode() { return delegate.hashCode(); } @Override public String toString() { return delegate.toString(); } }
/* Copyright 2021 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.flowmonitoring; import static org.openkilda.wfm.topology.flowmonitoring.FlowMonitoringTopology.Stream.ACTION_STREAM_ID; import static org.openkilda.wfm.topology.flowmonitoring.FlowMonitoringTopology.Stream.FLOW_REMOVE_STREAM_ID; import static org.openkilda.wfm.topology.flowmonitoring.FlowMonitoringTopology.Stream.FLOW_UPDATE_STREAM_ID; import static org.openkilda.wfm.topology.flowmonitoring.FlowMonitoringTopology.Stream.ISL_UPDATE_STREAM_ID; import static org.openkilda.wfm.topology.flowmonitoring.FlowMonitoringTopology.Stream.STATS_STREAM_ID; import static org.openkilda.wfm.topology.flowmonitoring.bolt.FlowCacheBolt.FLOW_ID_FIELD; import static org.openkilda.wfm.topology.flowmonitoring.bolt.IslDataSplitterBolt.ISL_KEY_FIELD; import org.openkilda.persistence.PersistenceManager; import org.openkilda.wfm.LaunchEnvironment; import org.openkilda.wfm.share.zk.ZkStreams; import org.openkilda.wfm.share.zk.ZooKeeperBolt; import org.openkilda.wfm.share.zk.ZooKeeperSpout; import org.openkilda.wfm.topology.AbstractTopology; import org.openkilda.wfm.topology.flowmonitoring.bolt.ActionBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.FlowCacheBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.FlowSplitterBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.FlowStateCacheBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.IslCacheBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.IslDataSplitterBolt; import org.openkilda.wfm.topology.flowmonitoring.bolt.RerouteEncoder; import org.openkilda.wfm.topology.flowmonitoring.bolt.TickBolt; import org.apache.storm.generated.StormTopology; import org.apache.storm.kafka.bolt.KafkaBolt; import org.apache.storm.topology.TopologyBuilder; import org.apache.storm.tuple.Fields; import java.time.Duration; public class FlowMonitoringTopology extends AbstractTopology<FlowMonitoringTopologyConfig> { private static final Fields FLOW_ID_FIELDS = new Fields(FLOW_ID_FIELD); private static final Fields ISL_KEY_FIELDS = new Fields(ISL_KEY_FIELD); public FlowMonitoringTopology(LaunchEnvironment env) { super(env, "flowmonitoring-topology", FlowMonitoringTopologyConfig.class); } @Override public StormTopology createTopology() { TopologyBuilder tb = new TopologyBuilder(); flowSpout(tb); flowLatencySpout(tb); islSpout(tb); islLatencySpout(tb); zooKeeperSpout(tb); islSplitterBolt(tb); flowSplitterBolt(tb); tickBolt(tb); PersistenceManager persistenceManager = new PersistenceManager(configurationProvider); flowStateCacheBolt(tb, persistenceManager); flowCacheBolt(tb, persistenceManager); islCacheBolt(tb, persistenceManager); actionBolt(tb, persistenceManager); outputReroute(tb); statsBolt(tb); zooKeeperBolt(tb); return tb.createTopology(); } private void flowSpout(TopologyBuilder topologyBuilder) { declareKafkaSpout(topologyBuilder, getConfig().getKafkaFlowHsToFlowMonitoringTopic(), ComponentId.FLOW_SPOUT.name()); } private void flowLatencySpout(TopologyBuilder topologyBuilder) { declareKafkaSpout(topologyBuilder, getConfig().getServer42StatsFlowRttTopic(), ComponentId.FLOW_LATENCY_SPOUT.name()); } private void islSpout(TopologyBuilder topologyBuilder) { declareKafkaSpout(topologyBuilder, getConfig().getNetworkFlowMonitoringNotifyTopic(), ComponentId.ISL_SPOUT.name()); } private void islLatencySpout(TopologyBuilder topologyBuilder) { declareKafkaSpout(topologyBuilder, getConfig().getTopoIslLatencyTopic(), ComponentId.ISL_LATENCY_SPOUT.name()); } private void islSplitterBolt(TopologyBuilder topologyBuilder) { declareBolt(topologyBuilder, new IslDataSplitterBolt(), ComponentId.ISL_SPLITTER_BOLT.name()) .shuffleGrouping(ComponentId.ISL_SPOUT.name()) .shuffleGrouping(ComponentId.ISL_LATENCY_SPOUT.name()); } private void flowSplitterBolt(TopologyBuilder topologyBuilder) { declareBolt(topologyBuilder, new FlowSplitterBolt(), ComponentId.FLOW_SPLITTER_BOLT.name()) .shuffleGrouping(ComponentId.FLOW_SPOUT.name()) .shuffleGrouping(ComponentId.FLOW_LATENCY_SPOUT.name()); } private void tickBolt(TopologyBuilder topologyBuilder) { declareBolt(topologyBuilder, new TickBolt(getConfig().getFlowSlaCheckIntervalSeconds(), getConfig().getFlowSlaCheckIntervalSeconds() / getConfig().getFlowSlaCheckShardCount()), ComponentId.TICK_BOLT.name()); } private void flowStateCacheBolt(TopologyBuilder topologyBuilder, PersistenceManager persistenceManager) { FlowStateCacheBolt flowStateCacheBolt = new FlowStateCacheBolt(persistenceManager, ZooKeeperSpout.SPOUT_ID); declareBolt(topologyBuilder, flowStateCacheBolt, ComponentId.FLOW_STATE_CACHE_BOLT.name()) .allGrouping(ComponentId.FLOW_SPLITTER_BOLT.name(), FLOW_UPDATE_STREAM_ID.name()) .allGrouping(ComponentId.FLOW_SPLITTER_BOLT.name(), FLOW_REMOVE_STREAM_ID.name()) .allGrouping(ComponentId.TICK_BOLT.name()) .allGrouping(ZooKeeperSpout.SPOUT_ID); } private void flowCacheBolt(TopologyBuilder topologyBuilder, PersistenceManager persistenceManager) { FlowCacheBolt flowCacheBolt = new FlowCacheBolt( persistenceManager, ZooKeeperSpout.SPOUT_ID, Duration.ofSeconds(getConfig().getFlowRttStatsExpirationSeconds()), getConfig().getMetricPrefix()); declareBolt(topologyBuilder, flowCacheBolt, ComponentId.FLOW_CACHE_BOLT.name()) .fieldsGrouping(ComponentId.FLOW_STATE_CACHE_BOLT.name(), FLOW_UPDATE_STREAM_ID.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.FLOW_STATE_CACHE_BOLT.name(), FLOW_REMOVE_STREAM_ID.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.FLOW_STATE_CACHE_BOLT.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.FLOW_SPLITTER_BOLT.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.ISL_CACHE_BOLT.name(), FLOW_ID_FIELDS) .allGrouping(ZooKeeperSpout.SPOUT_ID); } private void islCacheBolt(TopologyBuilder topologyBuilder, PersistenceManager persistenceManager) { IslCacheBolt islCacheBolt = new IslCacheBolt(persistenceManager, Duration.ofSeconds(getConfig().getIslRttLatencyExpirationSeconds()), ZooKeeperSpout.SPOUT_ID); declareBolt(topologyBuilder, islCacheBolt, ComponentId.ISL_CACHE_BOLT.name()) .fieldsGrouping(ComponentId.ISL_SPLITTER_BOLT.name(), ISL_KEY_FIELDS) .fieldsGrouping(ComponentId.ISL_SPLITTER_BOLT.name(), ISL_UPDATE_STREAM_ID.name(), ISL_KEY_FIELDS) .fieldsGrouping(ComponentId.FLOW_CACHE_BOLT.name(), ISL_KEY_FIELDS) .allGrouping(ZooKeeperSpout.SPOUT_ID); } private void actionBolt(TopologyBuilder topologyBuilder, PersistenceManager persistenceManager) { declareBolt(topologyBuilder, new ActionBolt(persistenceManager, Duration.ofSeconds(getConfig().getFlowLatencySlaTimeoutSeconds()), getConfig().getFlowLatencySlaThresholdPercent(), ZooKeeperSpout.SPOUT_ID, getConfig().getFlowSlaCheckShardCount()), ComponentId.ACTION_BOLT.name()) .fieldsGrouping(ComponentId.FLOW_CACHE_BOLT.name(), ACTION_STREAM_ID.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.FLOW_CACHE_BOLT.name(), FLOW_UPDATE_STREAM_ID.name(), FLOW_ID_FIELDS) .fieldsGrouping(ComponentId.FLOW_CACHE_BOLT.name(), FLOW_REMOVE_STREAM_ID.name(), FLOW_ID_FIELDS) .allGrouping(ComponentId.TICK_BOLT.name()) .allGrouping(ZooKeeperSpout.SPOUT_ID); } private void outputReroute(TopologyBuilder topology) { RerouteEncoder bolt = new RerouteEncoder(); declareBolt(topology, bolt, RerouteEncoder.BOLT_ID) .shuffleGrouping(ComponentId.ACTION_BOLT.name()); KafkaBolt output = buildKafkaBolt(getConfig().getKafkaTopoRerouteTopic()); declareBolt(topology, output, ComponentId.REROUTE_BOLT.name()) .shuffleGrouping(RerouteEncoder.BOLT_ID); } private void statsBolt(TopologyBuilder topologyBuilder) { declareBolt(topologyBuilder, createKafkaBolt(getConfig().getKafkaOtsdbTopic()), ComponentId.STATS_BOLT.name()) .shuffleGrouping(ComponentId.FLOW_CACHE_BOLT.name(), STATS_STREAM_ID.name()); } private void zooKeeperSpout(TopologyBuilder topology) { ZooKeeperSpout zooKeeperSpout = new ZooKeeperSpout(getConfig().getBlueGreenMode(), getZkTopoName(), getZookeeperConfig()); declareSpout(topology, zooKeeperSpout, ZooKeeperSpout.SPOUT_ID); } private void zooKeeperBolt(TopologyBuilder topology) { ZooKeeperBolt zooKeeperBolt = new ZooKeeperBolt(getConfig().getBlueGreenMode(), getZkTopoName(), getZookeeperConfig(), getBoltInstancesCount(ComponentId.ISL_CACHE_BOLT.name(), ComponentId.FLOW_CACHE_BOLT.name(), ComponentId.ACTION_BOLT.name(), ComponentId.FLOW_STATE_CACHE_BOLT.name())); declareBolt(topology, zooKeeperBolt, ZooKeeperBolt.BOLT_ID) .allGrouping(ComponentId.ISL_CACHE_BOLT.name(), ZkStreams.ZK.toString()) .allGrouping(ComponentId.FLOW_STATE_CACHE_BOLT.name(), ZkStreams.ZK.toString()) .allGrouping(ComponentId.FLOW_CACHE_BOLT.name(), ZkStreams.ZK.toString()) .allGrouping(ComponentId.ACTION_BOLT.name(), ZkStreams.ZK.toString()); } @Override protected String getZkTopoName() { return "flowmonitoring"; } public enum ComponentId { FLOW_SPOUT("flow.spout"), FLOW_LATENCY_SPOUT("flow.latency.spout"), ISL_SPOUT("isl.spout"), ISL_LATENCY_SPOUT("isl.latency.spout"), ISL_SPLITTER_BOLT("isl.splitter.bolt"), FLOW_SPLITTER_BOLT("flow.splitter.bolt"), FLOW_STATE_CACHE_BOLT("flow.state.cache.bolt"), FLOW_CACHE_BOLT("flow.cache.bolt"), ISL_CACHE_BOLT("isl.cache.bolt"), ACTION_BOLT("action.bolt"), STATS_BOLT("stats.bolt"), REROUTE_ENCODER("reroute.encoder"), REROUTE_BOLT("reroute.bolt"), TICK_BOLT("tick.bolt"); private final String value; ComponentId(String value) { this.value = value; } @Override public String toString() { return value; } } public enum Stream { ACTION_STREAM_ID, STATS_STREAM_ID, FLOW_UPDATE_STREAM_ID, FLOW_REMOVE_STREAM_ID, ISL_UPDATE_STREAM_ID } /** * Launches and sets up the topology. * * @param args the command-line arguments. */ public static void main(String[] args) { try { LaunchEnvironment env = new LaunchEnvironment(args); new FlowMonitoringTopology(env).setup(); } catch (Exception e) { System.exit(handleLaunchException(e)); } } }
// Copyright (c) 2011, David J. Pearce (djp@ecs.vuw.ac.nz) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the <organization> nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL DAVID J. PEARCE BE LIABLE FOR ANY // DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package wyfs.io; import java.io.*; public class BinaryOutputStream extends OutputStream { protected OutputStream output; protected int value; protected int count; /** * Write out data in big-endian format. * @param output */ public BinaryOutputStream(OutputStream output) { this.output = output; } /** * Write an unsigned integer value using 8bits using a big-endian encoding. * * @param w * @throws IOException */ public void write(int i) throws IOException { if(count == 0) { output.write(i & 0xFF); } else { write_un(i & 0xFF,8); } } public void write(byte[] bytes) throws IOException { for(byte b : bytes) { write(b); } } public void write(byte[] bytes, int offset, int length) throws IOException { for(;offset < length;++offset) { write(bytes[offset]); } } /** * Write an unsigned integer value using 8bits using a big-endian encoding. * * @param w * @throws IOException */ public void write_u8(int w) throws IOException { if(count == 0) { output.write(w & 0xFF); } else { write_un(w & 0xFF,8); } } /** * Write an unsigned integer value using 16bits using a big-endian encoding. * * @param w * @throws IOException */ public void write_u16(int w) throws IOException { write_u8((w >> 8) & 0xFF); write_u8(w & 0xFF); } /** * Write an unsigned integer value using 32bits using a big-endian encoding. * * @param w * @throws IOException */ public void write_u32(int w) throws IOException { write_u8((w >> 24) & 0xFF); write_u8((w >> 16) & 0xFF); write_u8((w >> 8) & 0xFF); write_u8(w & 0xFF); } /** * Write an unsigned integer value using a variable amount of space. The * value is split into 4 bit (big-endian) chunks, where the msb of each * chunk is a flag indicating whether there are more chunks. Therefore, * values between 0 and 7 fit into 4 bits. Similarly, values between 8 and * 63 fit into 8 bits, etc * * @param w * --- number to convert (which cannot be negative) * @throws IOException */ public void write_uv(int w) throws IOException { if(w < 0) { throw new IllegalArgumentException("cannot write negative number in a variable amount of space"); } do { int t = w & 7; w = w >> 3; if(w != 0) { write_un(8|t,4); } else { write_un(t,4); } } while(w != 0); } /** * Write an unsigned integer value using n bits using a big-endian encoding. * * @param w * @throws IOException */ public void write_un(int bits, int n) throws IOException { int mask = 1; for(int i=0;i<n;++i) { boolean bit = (bits & mask) != 0; write_bit(bit); mask = mask << 1; } } public void write_bit(boolean bit) throws IOException { value = value >> 1; if(bit) { value |= 128; } count = count + 1; if(count == 8) { count = 0; output.write(value); value = 0; } } /** * Pad out stream to nearest byte boundary * @throws IOException */ public void pad_u8() throws IOException { if (count > 0) { output.write(value >>> (8-count)); value = 0; count = 0; } } public void close() throws IOException { flush(); output.close(); } public void flush() throws IOException { if(count != 0) { // In this case, we're closing but we have a number of bits left to // write. This means we have to pad out the remainder of a byte. // Instead of padding with zeros, I pad with ones. The reason for // this is that it forces an EOF when reading back in with read_uv(). value = value >>> (8-count); int mask = 0xff & ((~0) << count); value = value | mask; output.write(value); } } public static String bin2str(int v) { if(v == 0) { return "0"; } int mask = 1 << 31; String r = ""; boolean leading = true; for(int i=0;i!=32;++i) { if((v&mask) != 0) { r = r + "1"; leading=false; } else if(!leading) { r = r + "0"; } v = v << 1; } return r; } public static void main(String[] argss) { try { ByteArrayOutputStream bout = new ByteArrayOutputStream(); BinaryOutputStream binout = new BinaryOutputStream(bout); binout.write_bit(true); binout.write_bit(false); binout.write_bit(true); binout.pad_u8(); binout.write_bit(true); binout.write_bit(false); binout.write_bit(true); binout.write_bit(true); binout.close(); ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray()); BinaryInputStream binin = new BinaryInputStream(bin); System.out.println(binin.read_bit()); System.out.println(binin.read_bit()); System.out.println(binin.read_bit()); binin.pad_u8(); System.out.println(binin.read_bit()); System.out.println(binin.read_bit()); System.out.println(binin.read_bit()); System.out.println(binin.read_bit()); } catch(IOException e) { } } }
package com.example.android.sunshine.app.data; import android.content.ComponentName; import android.content.ContentUris; import android.content.ContentValues; import android.content.pm.PackageManager; import android.content.pm.ProviderInfo; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.Uri; import android.os.Build; import android.test.AndroidTestCase; import android.util.Log; import com.example.android.sunshine.app.data.WeatherContract.LocationEntry; import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry; /* Note: This is not a complete set of tests of the Sunshine ContentProvider, but it does test that at least the basic functionality has been implemented correctly. Students: Uncomment the tests in this class as you implement the functionality in your ContentProvider to make sure that you've implemented things reasonably correctly. */ public class TestProvider extends AndroidTestCase { public static final String LOG_TAG = TestProvider.class.getSimpleName(); /* This helper function deletes all records from both database tables using the ContentProvider. It also queries the ContentProvider to make sure that the database has been successfully deleted, so it cannot be used until the Query and Delete functions have been written in the ContentProvider. Students: Replace the calls to deleteAllRecordsFromDB with this one after you have written the delete functionality in the ContentProvider. */ public void deleteAllRecordsFromProvider() { mContext.getContentResolver().delete( WeatherEntry.CONTENT_URI, null, null ); mContext.getContentResolver().delete( LocationEntry.CONTENT_URI, null, null ); Cursor cursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, null, null, null, null ); assertEquals("Error: Records not deleted from Weather table during delete", 0, cursor.getCount()); cursor.close(); cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, null, null, null ); assertEquals("Error: Records not deleted from Location table during delete", 0, cursor.getCount()); cursor.close(); } /* This helper function deletes all records from both database tables using the database functions only. This is designed to be used to reset the state of the database until the delete functionality is available in the ContentProvider. */ /* public void deleteAllRecordsFromDB() { WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); db.delete(WeatherEntry.TABLE_NAME, null, null); db.delete(LocationEntry.TABLE_NAME, null, null); db.close(); }*/ /* Student: Refactor this function to use the deleteAllRecordsFromProvider functionality once you have implemented delete functionality there. */ public void deleteAllRecords() { deleteAllRecordsFromProvider(); } // Since we want each test to start with a clean slate, run deleteAllRecords // in setUp (called by the test runner before each test). @Override protected void setUp() throws Exception { super.setUp(); deleteAllRecords(); } /* This test checks to make sure that the content provider is registered correctly. Students: Uncomment this test to make sure you've correctly registered the WeatherProvider. */ public void testProviderRegistry() { PackageManager pm = mContext.getPackageManager(); // We define the component name based on the package name from the context and the // WeatherProvider class. ComponentName componentName = new ComponentName(mContext.getPackageName(), WeatherProvider.class.getName()); try { // Fetch the provider info using the component name from the PackageManager // This throws an exception if the provider isn't registered. ProviderInfo providerInfo = pm.getProviderInfo(componentName, 0); // Make sure that the registered authority matches the authority from the Contract. assertEquals("Error: WeatherProvider registered with authority: " + providerInfo.authority + " instead of authority: " + WeatherContract.CONTENT_AUTHORITY, providerInfo.authority, WeatherContract.CONTENT_AUTHORITY); } catch (PackageManager.NameNotFoundException e) { // I guess the provider isn't registered correctly. assertTrue("Error: WeatherProvider not registered at " + mContext.getPackageName(), false); } } /* This test doesn't touch the database. It verifies that the ContentProvider returns the correct type for each type of URI that it can handle. Students: Uncomment this test to verify that your implementation of GetType is functioning correctly. */ public void testGetType() { // content://com.example.android.sunshine.app/weather/ String type = mContext.getContentResolver().getType(WeatherEntry.CONTENT_URI); // vnd.android.cursor.dir/com.example.android.sunshine.app/weather assertEquals("Error: the WeatherEntry CONTENT_URI should return WeatherEntry.CONTENT_TYPE", WeatherEntry.CONTENT_TYPE, type); String testLocation = "94074"; // content://com.example.android.sunshine.app/weather/94074 type = mContext.getContentResolver().getType( WeatherEntry.buildWeatherLocation(testLocation)); // vnd.android.cursor.dir/com.example.android.sunshine.app/weather assertEquals("Error: the WeatherEntry CONTENT_URI with location should return WeatherEntry.CONTENT_TYPE", WeatherEntry.CONTENT_TYPE, type); long testDate = 1419120000L; // December 21st, 2014 // content://com.example.android.sunshine.app/weather/94074/20140612 type = mContext.getContentResolver().getType( WeatherEntry.buildWeatherLocationWithDate(testLocation, testDate)); // vnd.android.cursor.item/com.example.android.sunshine.app/weather/1419120000 assertEquals("Error: the WeatherEntry CONTENT_URI with location and date should return WeatherEntry.CONTENT_ITEM_TYPE", WeatherEntry.CONTENT_ITEM_TYPE, type); // content://com.example.android.sunshine.app/location/ type = mContext.getContentResolver().getType(LocationEntry.CONTENT_URI); // vnd.android.cursor.dir/com.example.android.sunshine.app/location assertEquals("Error: the LocationEntry CONTENT_URI should return LocationEntry.CONTENT_TYPE", LocationEntry.CONTENT_TYPE, type); } /* This test uses the database directly to insert and then uses the ContentProvider to read out the data. Uncomment this test to see if the basic weather query functionality given in the ContentProvider is working correctly. */ public void testBasicWeatherQuery() { // insert our test records into the database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext); // Fantastic. Now that we have a location, add some weather! ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId); long weatherRowId = db.insert(WeatherEntry.TABLE_NAME, null, weatherValues); assertTrue("Unable to Insert WeatherEntry into the Database", weatherRowId != -1); db.close(); // Test the basic content provider query Cursor weatherCursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, null, null, null, null ); // Make sure we get the correct cursor out of the database TestUtilities.validateCursor("testBasicWeatherQuery", weatherCursor, weatherValues); } /* This test uses the database directly to insert and then uses the ContentProvider to read out the data. Uncomment this test to see if your location queries are performing correctly. */ public void testBasicLocationQueries() { // insert our test records into the database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext); // Test the basic content provider query Cursor locationCursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, null, null, null ); // Make sure we get the correct cursor out of the database TestUtilities.validateCursor("testBasicLocationQueries, location query", locationCursor, testValues); // Has the NotificationUri been set correctly? --- we can only test this easily against API // level 19 or greater because getNotificationUri was added in API level 19. if ( Build.VERSION.SDK_INT >= 19 ) { assertEquals("Error: Location Query did not properly set NotificationUri", locationCursor.getNotificationUri(), LocationEntry.CONTENT_URI); } } /* This test uses the provider to insert and then update the data. Uncomment this test to see if your update location is functioning correctly. */ public void testUpdateLocation() { // Create a new map of values, where column names are the keys ContentValues values = TestUtilities.createNorthPoleLocationValues(); Uri locationUri = mContext.getContentResolver(). insert(LocationEntry.CONTENT_URI, values); long locationRowId = ContentUris.parseId(locationUri); // Verify we got a row back. assertTrue(locationRowId != -1); Log.d(LOG_TAG, "New row id: " + locationRowId); ContentValues updatedValues = new ContentValues(values); updatedValues.put(LocationEntry._ID, locationRowId); updatedValues.put(LocationEntry.COLUMN_CITY_NAME, "Santa's Village"); // Create a cursor with observer to make sure that the content provider is notifying // the observers as expected Cursor locationCursor = mContext.getContentResolver().query(LocationEntry.CONTENT_URI, null, null, null, null); TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver(); locationCursor.registerContentObserver(tco); int count = mContext.getContentResolver().update( LocationEntry.CONTENT_URI, updatedValues, LocationEntry._ID + "= ?", new String[] { Long.toString(locationRowId)}); assertEquals(count, 1); // Test to make sure our observer is called. If not, we throw an assertion. // // Students: If your code is failing here, it means that your content provider // isn't calling getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); locationCursor.unregisterContentObserver(tco); locationCursor.close(); // A cursor is your primary interface to the query results. Cursor cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, // projection LocationEntry._ID + " = " + locationRowId, null, // Values for the "where" clause null // sort order ); TestUtilities.validateCursor("testUpdateLocation. Error validating location entry update.", cursor, updatedValues); cursor.close(); } // Make sure we can still delete after adding/updating stuff // // Student: Uncomment this test after you have completed writing the insert functionality // in your provider. It relies on insertions with testInsertReadProvider, so insert and // query functionality must also be complete before this test can be used. public void testInsertReadProvider() { ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); // Register a content observer for our insert. This time, directly with the content resolver TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, tco); Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues); // Did our content observer get called? Students: If this fails, your insert location // isn't calling getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(tco); long locationRowId = ContentUris.parseId(locationUri); // Verify we got a row back. assertTrue(locationRowId != -1); // Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made // the round trip. // A cursor is your primary interface to the query results. Cursor cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating LocationEntry.", cursor, testValues); // Fantastic. Now that we have a location, add some weather! ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId); // The TestContentObserver is a one-shot class tco = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, tco); Uri weatherInsertUri = mContext.getContentResolver() .insert(WeatherEntry.CONTENT_URI, weatherValues); assertTrue(weatherInsertUri != null); // Did our content observer get called? Students: If this fails, your insert weather // in your ContentProvider isn't calling // getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(tco); // A cursor is your primary interface to the query results. Cursor weatherCursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, // Table to Query null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // columns to group by ); TestUtilities.validateCursor("testInsertReadProvider. Error validating WeatherEntry insert.", weatherCursor, weatherValues); // Add the location values in with the weather data so that we can make // sure that the join worked and we actually get all the values back weatherValues.putAll(testValues); // Get the joined Weather and Location data weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocation(TestUtilities.TEST_LOCATION), null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data.", weatherCursor, weatherValues); // Get the joined Weather and Location data with a start date weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocationWithStartDate( TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE), null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data with start date.", weatherCursor, weatherValues); // Get the joined Weather data for a specific date weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocationWithDate(TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE), null, null, null, null ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location data for a specific date.", weatherCursor, weatherValues); } // Make sure we can still delete after adding/updating stuff // // Student: Uncomment this test after you have completed writing the delete functionality // in your provider. It relies on insertions with testInsertReadProvider, so insert and // query functionality must also be complete before this test can be used. public void testDeleteRecords() { testInsertReadProvider(); // Register a content observer for our location delete. TestUtilities.TestContentObserver locationObserver = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, locationObserver); // Register a content observer for our weather delete. TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver); deleteAllRecordsFromProvider(); // Students: If either of these fail, you most-likely are not calling the // getContext().getContentResolver().notifyChange(uri, null); in the ContentProvider // delete. (only if the insertReadProvider is succeeding) locationObserver.waitForNotificationOrFail(); weatherObserver.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(locationObserver); mContext.getContentResolver().unregisterContentObserver(weatherObserver); } static private final int BULK_INSERT_RECORDS_TO_INSERT = 10; static ContentValues[] createBulkInsertWeatherValues(long locationRowId) { long currentTestDate = TestUtilities.TEST_DATE; long millisecondsInADay = 1000*60*60*24; ContentValues[] returnContentValues = new ContentValues[BULK_INSERT_RECORDS_TO_INSERT]; for ( int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, currentTestDate+= millisecondsInADay ) { ContentValues weatherValues = new ContentValues(); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_LOC_KEY, locationRowId); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DATE, currentTestDate); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DEGREES, 1.1); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_HUMIDITY, 1.2 + 0.01 * (float) i); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_PRESSURE, 1.3 - 0.01 * (float) i); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, 75 + i); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, 65 - i); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, "Asteroids"); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WIND_SPEED, 5.5 + 0.2 * (float) i); weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, 321); returnContentValues[i] = weatherValues; } return returnContentValues; } // Student: Uncomment this test after you have completed writing the BulkInsert functionality // in your provider. Note that this test will work with the built-in (default) provider // implementation, which just inserts records one-at-a-time, so really do implement the // BulkInsert ContentProvider function. // public void testBulkInsert() { // // first, let's create a location value // ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); // Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues); // long locationRowId = ContentUris.parseId(locationUri); // // // Verify we got a row back. // assertTrue(locationRowId != -1); // // // Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made // // the round trip. // // // A cursor is your primary interface to the query results. // Cursor cursor = mContext.getContentResolver().query( // LocationEntry.CONTENT_URI, // null, // leaving "columns" null just returns all the columns. // null, // cols for "where" clause // null, // values for "where" clause // null // sort order // ); // // TestUtilities.validateCursor("testBulkInsert. Error validating LocationEntry.", // cursor, testValues); // // // Now we can bulkInsert some weather. In fact, we only implement BulkInsert for weather // // entries. With ContentProviders, you really only have to implement the features you // // use, after all. // ContentValues[] bulkInsertContentValues = createBulkInsertWeatherValues(locationRowId); // // // Register a content observer for our bulk insert. // TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver(); // mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver); // // int insertCount = mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, bulkInsertContentValues); // // // Students: If this fails, it means that you most-likely are not calling the // // getContext().getContentResolver().notifyChange(uri, null); in your BulkInsert // // ContentProvider method. // weatherObserver.waitForNotificationOrFail(); // mContext.getContentResolver().unregisterContentObserver(weatherObserver); // // assertEquals(insertCount, BULK_INSERT_RECORDS_TO_INSERT); // // // A cursor is your primary interface to the query results. // cursor = mContext.getContentResolver().query( // WeatherEntry.CONTENT_URI, // null, // leaving "columns" null just returns all the columns. // null, // cols for "where" clause // null, // values for "where" clause // WeatherEntry.COLUMN_DATE + " ASC" // sort order == by DATE ASCENDING // ); // // // we should have as many records in the database as we've inserted // assertEquals(cursor.getCount(), BULK_INSERT_RECORDS_TO_INSERT); // // // and let's make sure they match the ones we created // cursor.moveToFirst(); // for ( int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, cursor.moveToNext() ) { // TestUtilities.validateCurrentRecord("testBulkInsert. Error validating WeatherEntry " + i, // cursor, bulkInsertContentValues[i]); // } // cursor.close(); // } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.hamcrest.Matchers; import org.junit.Test; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.core.IsNull.notNullValue; /** * */ @ElasticsearchIntegrationTest.SuiteScopeTest public class HistogramTests extends ElasticsearchIntegrationTest { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; static int numDocs; static int interval; static int numValueBuckets, numValuesBuckets; static long[] valueCounts, valuesCounts; @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); interval = randomIntBetween(2, 5); numValueBuckets = numDocs / interval + 1; valueCounts = new long[numValueBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket = (i + 1) / interval; valueCounts[bucket]++; } numValuesBuckets = (numDocs + 1) / interval + 1; valuesCounts = new long[numValuesBuckets]; for (int i = 0; i < numDocs; i++) { final int bucket1 = (i + 1) / interval; final int bucket2 = (i + 2) / interval; valuesCounts[bucket1]++; if (bucket1 != bucket2) { valuesCounts[bucket2]++; } } List<IndexRequestBuilder> builders = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i + 1) .startArray(MULTI_VALUED_FIELD_NAME).value(i + 1).value(i + 2).endArray() .field("tag", "tag" + i) .endObject())); } assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder() .startObject() .field(SINGLE_VALUED_FIELD_NAME, i * 2) .endObject())); } indexRandom(true, builders); ensureSearchable(); } @Test public void singleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } public void singleValuedField_withOffset() throws Exception { int interval1 = 10; int offset = 5; SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) .execute().actionGet(); // from setup we have between 6 and 20 documents, each with value 1 in test field int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); // first bucket should start at -5, contain 4 documents Histogram.Bucket bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(-5L)); assertThat(bucket.getDocCount(), equalTo(4L)); // last bucket should have (numDocs % interval + 1) docs bucket = histo.getBuckets().get(0); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(numDocs%interval1 + 5L)); assertThat(bucket.getDocCount(), equalTo((numDocs % interval) + 1L)); } /** * Shift buckets by random offset between [2..interval]. From setup we have 1 doc per values from 1..numdocs. * Special care needs to be taken for expecations on counts in first and last bucket. */ @Test public void singleValuedField_withRandomOffset() throws Exception { int offset = randomIntBetween(2, interval); SearchResponse response = client() .prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) .execute().actionGet(); assertSearchResponse(response); // shifting by offset>2 creates new extra bucket [0,offset-1] // if offset is >= number of values in original last bucket, that effect is canceled int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(expectedNumberOfBuckets)); int docsCounted = 0; for (int i = 0; i < expectedNumberOfBuckets; ++i) { Histogram.Bucket bucket = histo.getBuckets().get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) ((i-1) * interval + offset))); if (i==0) { // first bucket long expectedFirstBucketCount = offset-1; assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount)); docsCounted += expectedFirstBucketCount; } else if(i<expectedNumberOfBuckets-1) { assertThat(bucket.getDocCount(), equalTo((long) interval)); docsCounted += interval; } else { assertThat(bucket.getDocCount(), equalTo((long) numDocs - docsCounted)); } } } @Test public void singleValuedField_OrderedByKeyAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void singleValuedField_OrderedByCountAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_ASC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MIN_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), greaterThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_OrderedByCountDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet buckets = new LongHashSet(); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); long previousCount = Long.MAX_VALUE; for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = histoBuckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertEquals(0, key % interval); assertTrue(buckets.add(key)); assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)])); assertThat(bucket.getDocCount(), lessThanOrEqualTo(previousCount)); previousCount = bucket.getDocCount(); } } @Test public void singleValuedField_WithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); Object[] propertiesKeys = (Object[]) histo.getProperty("_key"); Object[] propertiesDocCounts = (Object[]) histo.getProperty("_count"); Object[] propertiesCounts = (Object[]) histo.getProperty("sum.value"); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); assertThat((double) propertiesCounts[i], equalTo((double) s)); } } @Test public void singleValuedField_WithSubAggregation_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void singleValuedField_OrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", true)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", false)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); assertThat(sum.getValue(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationAsc_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", true)) .subAggregation(stats("stats"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.NEGATIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), greaterThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", false)) .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double previousSum = Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Stats stats = bucket.getAggregations().get("stats"); assertThat(stats, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == b) { s += j + 1; } } assertThat(stats.getSum(), equalTo((double) s)); assertThat(stats.getSum(), lessThanOrEqualTo(previousSum)); previousSum = s; } } @Test public void singleValuedField_OrderedBySubAggregationDesc_DeepOrderPath() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("filter>max", asc)) .subAggregation(filter("filter").filter(matchAllQuery()) .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); LongHashSet visited = new LongHashSet(); double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); long key = ((Number) bucket.getKey()).longValue(); assertTrue(visited.add(key)); int b = (int) (key / interval); assertThat(bucket.getDocCount(), equalTo(valueCounts[b])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Filter filter = bucket.getAggregations().get("filter"); assertThat(filter, notNullValue()); assertThat(bucket.getDocCount(), equalTo(filter.getDocCount())); Max max = filter.getAggregations().get("max"); assertThat(max, Matchers.notNullValue()); assertThat(max.getValue(), asc ? greaterThanOrEqualTo(prevMax) : lessThanOrEqualTo(prevMax)); prevMax = max.getValue(); } } @Test public void singleValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).script("_value + 1").interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 1) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 1) / interval + 1]; for (int i = 0; i < numDocs; ++i) { ++counts[(i + 2) / interval]; } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_OrderedByKeyDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void multiValuedField_WithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).script("_value + 1").interval(interval)) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); } } @Test public void multiValuedField_WithValueScript_WithInheritedSubAggregator() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).script("_value + 1").interval(interval) .subAggregation(terms(MULTI_VALUED_FIELD_NAME) .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))) .execute().actionGet(); assertSearchResponse(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; for (int i = 0; i < numDocs; ++i) { final int bucket1 = (i + 2) / interval; final int bucket2 = (i + 3) / interval; ++counts[bucket1]; if (bucket1 != bucket2) { ++counts[bucket2]; } } Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numBuckets)); for (int i = 0; i < numBuckets; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); int key = ((2 / interval) + i) * interval; assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key)); assertThat(bucket.getDocCount(), equalTo(counts[key / interval])); Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo(MULTI_VALUED_FIELD_NAME)); int minTerm = Math.max(2, key - 1); int maxTerm = Math.min(numDocs + 2, (key / interval + 1) * interval); assertThat(terms.getBuckets().size(), equalTo(maxTerm - minTerm + 1)); Iterator<Terms.Bucket> iter = terms.getBuckets().iterator(); for (int j = minTerm; j <= maxTerm; ++j) { assertThat(iter.next().getKeyAsNumber().longValue(), equalTo((long) j)); } } } @Test public void script_SingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value").interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void script_SingleValue_WithSubAggregator_Inherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value").interval(interval) .subAggregation(sum("sum"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(numValueBuckets)); // TODO: use diamond once JI-9019884 is fixed List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets()); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i) { s += j + 1; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void script_MultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script("doc['" + MULTI_VALUED_FIELD_NAME + "']").interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); } } @Test public void script_MultiValued_WithAggregatorInherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").script("doc['" + MULTI_VALUED_FIELD_NAME + "']").interval(interval) .subAggregation(sum("sum"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValuesBuckets)); for (int i = 0; i < numValuesBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valuesCounts[i])); assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); Sum sum = bucket.getAggregations().get("sum"); assertThat(sum, notNullValue()); long s = 0; for (int j = 0; j < numDocs; ++j) { if ((j + 1) / interval == i || (j + 2) / interval == i) { s += j + 1; s += j + 2; } } assertThat(sum.getValue(), equalTo((double) s)); } } @Test public void unmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); assertThat(histo.getBuckets().size(), equalTo(0)); } @Test public void partiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(numValueBuckets)); for (int i = 0; i < numValueBuckets; ++i) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); } } @Test public void emptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1l).minDocCount(0) .subAggregation(histogram("sub_histo").interval(1l))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); Histogram histo = searchResponse.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List<? extends Bucket> buckets = histo.getBuckets(); Histogram.Bucket bucket = buckets.get(1); assertThat(bucket, Matchers.notNullValue()); histo = bucket.getAggregations().get("sub_histo"); assertThat(histo, Matchers.notNullValue()); assertThat(histo.getName(), equalTo("sub_histo")); assertThat(histo.getBuckets().isEmpty(), is(true)); } @Test public void singleValuedField_WithExtendedBounds() throws Exception { int lastDataBucketKey = (numValueBuckets - 1) * interval; // randomizing the number of buckets on the min bound // (can sometimes fall within the data range, but more frequently will fall before the data range) int addedBucketsLeft = randomIntBetween(0, numValueBuckets); long boundsMinKey = addedBucketsLeft * interval; if (frequently()) { boundsMinKey = -boundsMinKey; } else { addedBucketsLeft = 0; } long boundsMin = boundsMinKey + randomIntBetween(0, interval - 1); // randomizing the number of buckets on the max bound // (can sometimes fall within the data range, but more frequently will fall after the data range) int addedBucketsRight = randomIntBetween(0, numValueBuckets); long boundsMaxKeyDelta = addedBucketsRight * interval; if (rarely()) { addedBucketsRight = 0; boundsMaxKeyDelta = -boundsMaxKeyDelta; } long boundsMaxKey = lastDataBucketKey + boundsMaxKeyDelta; long boundsMax = boundsMaxKey + randomIntBetween(0, interval - 1); // it could be that the random bounds.min we chose ended up greater than bounds.max - this should cause an // error boolean invalidBoundsError = boundsMin > boundsMax; // constructing the newly expected bucket list int bucketsCount = numValueBuckets + addedBucketsLeft + addedBucketsRight; long[] extendedValueCounts = new long[bucketsCount]; System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); SearchResponse response = null; try { response = client().prepareSearch("idx") .addAggregation(histogram("histo") .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .minDocCount(0) .extendedBounds(boundsMin, boundsMax)) .execute().actionGet(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); return; } } catch (Exception e) { if (invalidBoundsError) { // expected return; } else { throw e; } } assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); List<? extends Bucket> buckets = histo.getBuckets(); assertThat(buckets.size(), equalTo(bucketsCount)); long key = Math.min(boundsMinKey, 0); for (int i = 0; i < bucketsCount; i++) { Histogram.Bucket bucket = buckets.get(i); assertThat(bucket, notNullValue()); assertThat(((Number) bucket.getKey()).longValue(), equalTo(key)); assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i])); key += interval; } } /** * see issue #9634, negative interval in histogram should raise exception */ public void testExeptionOnNegativerInterval() { try { client().prepareSearch("empty_bucket_idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)).execute().actionGet(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("Missing required field [interval]")); } } }
package support; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; // TODO convert to enum public class Genres { private static HashMap<String, Integer> nameToId; private static void initializeMapping() { nameToId = new HashMap<>(); nameToId.put("Blues", 0); nameToId.put("Classic Rock", 1); nameToId.put("Country", 2); nameToId.put("Dance", 3); nameToId.put("Disco", 4); nameToId.put("Funk", 5); nameToId.put("Grunge", 6); nameToId.put("Hip-Hop", 7); nameToId.put("Jazz", 8); nameToId.put("Metal", 9); nameToId.put("New Age", 10); nameToId.put("Oldies", 11); nameToId.put("Other", 12); nameToId.put("Pop", 13); nameToId.put("R&B", 14); nameToId.put("Rap", 15); nameToId.put("Reggae", 16); nameToId.put("Rock", 17); nameToId.put("Techno", 18); nameToId.put("Industrial", 19); nameToId.put("Alternative", 20); nameToId.put("Ska", 21); nameToId.put("Death Metal", 22); nameToId.put("Pranks", 23); nameToId.put("Soundtrack", 24); nameToId.put("Euro-Techno", 25); nameToId.put("Ambient", 26); nameToId.put("Trip-Hop", 27); nameToId.put("Vocal", 28); nameToId.put("Jazz+Funk", 29); nameToId.put("Fusion", 30); nameToId.put("Trance", 31); nameToId.put("Classical", 32); nameToId.put("Instrumental", 33); nameToId.put("Acid", 34); nameToId.put("House", 35); nameToId.put("Game", 36); nameToId.put("Sound Clip", 37); nameToId.put("Gospel", 38); nameToId.put("Noise", 39); nameToId.put("Alternative Rock", 40); nameToId.put("Bass", 41); nameToId.put("Soul", 42); nameToId.put("Punk", 43); nameToId.put("Space", 44); nameToId.put("Meditative", 45); nameToId.put("Instrumental Pop", 46); nameToId.put("Instrumental Rock", 47); nameToId.put("Ethnic", 48); nameToId.put("Gothic", 49); nameToId.put("Darkwave", 50); nameToId.put("Techno-Industrial", 51); nameToId.put("Electronic", 52); nameToId.put("Pop-Folk", 53); nameToId.put("Eurodance", 54); nameToId.put("Dream", 55); nameToId.put("Southern Rock", 56); nameToId.put("Comedy", 57); nameToId.put("Cult", 58); nameToId.put("Gangsta", 59); nameToId.put("Top 40", 60); nameToId.put("Christian Rap", 61); nameToId.put("Pop/Funk", 62); nameToId.put("Jungle", 63); nameToId.put("Native US", 64); nameToId.put("Cabaret", 65); nameToId.put("New Wave", 66); nameToId.put("Psychadelic", 67); nameToId.put("Rave", 68); nameToId.put("Showtunes", 69); nameToId.put("Trailer", 70); nameToId.put("Lo-Fi", 71); nameToId.put("Tribal", 72); nameToId.put("Acid Punk", 73); nameToId.put("Acid Jazz", 74); nameToId.put("Polka", 75); nameToId.put("Retro", 76); nameToId.put("Musical", 77); nameToId.put("Rock & Roll", 78); nameToId.put("Hard Rock", 79); nameToId.put("Folk", 80); nameToId.put("Folk-Rock", 81); nameToId.put("National Folk", 82); nameToId.put("Swing", 83); nameToId.put("Fast Fusion", 84); nameToId.put("Bebob", 85); nameToId.put("Latin", 86); nameToId.put("Revival", 87); nameToId.put("Celtic", 88); nameToId.put("Bluegrass", 89); nameToId.put("Avantgarde", 90); nameToId.put("Gothic Rock", 91); nameToId.put("Progressive Rock", 92); nameToId.put("Psychedelic Rock", 93); nameToId.put("Symphonic Rock", 94); nameToId.put("Slow Rock", 95); nameToId.put("Big Band", 96); nameToId.put("Chorus", 97); nameToId.put("Easy Listening", 98); nameToId.put("Acoustic", 99); nameToId.put("Humour", 100); nameToId.put("Speech", 101); nameToId.put("Chanson", 102); nameToId.put("Opera", 103); nameToId.put("Chamber Music", 104); nameToId.put("Sonata", 105); nameToId.put("Symphony", 106); nameToId.put("Booty Bass", 107); nameToId.put("Primus", 108); nameToId.put("Porn Groove", 109); nameToId.put("Satire", 110); nameToId.put("Slow Jam", 111); nameToId.put("Club", 112); nameToId.put("Tango", 113); nameToId.put("Samba", 114); nameToId.put("Folklore", 115); nameToId.put("Ballad", 116); nameToId.put("Power Ballad", 117); nameToId.put("Rhytmic Soul", 118); nameToId.put("Freestyle", 119); nameToId.put("Duet", 120); nameToId.put("Punk Rock", 121); nameToId.put("Drum Solo", 122); nameToId.put("Acapella", 123); nameToId.put("Euro-House", 124); nameToId.put("Dance Hall", 125); nameToId.put("Goa", 126); nameToId.put("Drum & Bass", 127); nameToId.put("Club-House", 128); nameToId.put("Hardcore", 129); nameToId.put("Terror", 130); nameToId.put("Indie", 131); nameToId.put("BritPop", 132); nameToId.put("Negerpunk", 133); nameToId.put("Polsk Punk", 134); nameToId.put("Beat", 135); nameToId.put("Christian Gangsta", 136); nameToId.put("Heavy Metal", 137); nameToId.put("Black Metal", 138); nameToId.put("Crossover", 139); nameToId.put("Contemporary C", 140); nameToId.put("Christian Rock", 141); nameToId.put("Merengue", 142); nameToId.put("Salsa", 143); nameToId.put("Thrash Metal", 144); nameToId.put("Anime", 145); nameToId.put("JPop", 146); nameToId.put("SynthPop", 147); } public static int getIDForName(String name) { if(nameToId == null) { initializeMapping(); } Integer id = nameToId.get(name); return id == null ? -1 : id; } public static List<String> containsIgnoreCase(String name) { List<String> possibles = new ArrayList<>(); if(nameToId == null) { initializeMapping(); } for(String str : nameToId.keySet()) { if(str.toLowerCase().contains(name.toLowerCase())) { possibles.add(str); } } possibles.sort(new Comparator<String>() { @Override public int compare(String o1, String o2) { return o1.compareToIgnoreCase(o2); } }); return possibles; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Forked from OkHttp 2.5.0 */ package io.grpc.okhttp.internal; import java.security.cert.Certificate; import java.security.cert.CertificateParsingException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.regex.Pattern; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLException; import javax.net.ssl.SSLSession; import javax.security.auth.x500.X500Principal; /** * A HostnameVerifier consistent with <a * href="http://www.ietf.org/rfc/rfc2818.txt">RFC 2818</a>. */ public final class OkHostnameVerifier implements HostnameVerifier { public static final OkHostnameVerifier INSTANCE = new OkHostnameVerifier(); /** * Quick and dirty pattern to differentiate IP addresses from hostnames. This * is an approximation of Android's private InetAddress#isNumeric API. * * <p>This matches IPv6 addresses as a hex string containing at least one * colon, and possibly including dots after the first colon. It matches IPv4 * addresses as strings containing only decimal digits and dots. This pattern * matches strings like "a:.23" and "54" that are neither IP addresses nor * hostnames; they will be verified as IP addresses (which is a more strict * verification). */ private static final Pattern VERIFY_AS_IP_ADDRESS = Pattern.compile( "([0-9a-fA-F]*:[0-9a-fA-F:.]*)|([\\d.]+)"); private static final int ALT_DNS_NAME = 2; private static final int ALT_IPA_NAME = 7; private OkHostnameVerifier() { } @Override public boolean verify(String host, SSLSession session) { try { Certificate[] certificates = session.getPeerCertificates(); return verify(host, (X509Certificate) certificates[0]); } catch (SSLException e) { return false; } } public boolean verify(String host, X509Certificate certificate) { return verifyAsIpAddress(host) ? verifyIpAddress(host, certificate) : verifyHostName(host, certificate); } static boolean verifyAsIpAddress(String host) { return VERIFY_AS_IP_ADDRESS.matcher(host).matches(); } /** * Returns true if {@code certificate} matches {@code ipAddress}. */ private boolean verifyIpAddress(String ipAddress, X509Certificate certificate) { List<String> altNames = getSubjectAltNames(certificate, ALT_IPA_NAME); for (int i = 0, size = altNames.size(); i < size; i++) { if (ipAddress.equalsIgnoreCase(altNames.get(i))) { return true; } } return false; } /** * Returns true if {@code certificate} matches {@code hostName}. */ private boolean verifyHostName(String hostName, X509Certificate certificate) { hostName = hostName.toLowerCase(Locale.US); boolean hasDns = false; List<String> altNames = getSubjectAltNames(certificate, ALT_DNS_NAME); for (int i = 0, size = altNames.size(); i < size; i++) { hasDns = true; if (verifyHostName(hostName, altNames.get(i))) { return true; } } if (!hasDns) { X500Principal principal = certificate.getSubjectX500Principal(); // RFC 2818 advises using the most specific name for matching. String cn = new DistinguishedNameParser(principal).findMostSpecific("cn"); if (cn != null) { return verifyHostName(hostName, cn); } } return false; } @SuppressWarnings("MixedMutabilityReturnType") public static List<String> allSubjectAltNames(X509Certificate certificate) { List<String> altIpaNames = getSubjectAltNames(certificate, ALT_IPA_NAME); List<String> altDnsNames = getSubjectAltNames(certificate, ALT_DNS_NAME); List<String> result = new ArrayList<>(altIpaNames.size() + altDnsNames.size()); result.addAll(altIpaNames); result.addAll(altDnsNames); return result; } @SuppressWarnings("MixedMutabilityReturnType") private static List<String> getSubjectAltNames(X509Certificate certificate, int type) { List<String> result = new ArrayList<>(); try { Collection<?> subjectAltNames = certificate.getSubjectAlternativeNames(); if (subjectAltNames == null) { return Collections.emptyList(); } for (Object subjectAltName : subjectAltNames) { List<?> entry = (List<?>) subjectAltName; if (entry == null || entry.size() < 2) { continue; } Integer altNameType = (Integer) entry.get(0); if (altNameType == null) { continue; } if (altNameType == type) { String altName = (String) entry.get(1); if (altName != null) { result.add(altName); } } } return result; } catch (CertificateParsingException e) { return Collections.emptyList(); } } /** * Returns {@code true} iff {@code hostName} matches the domain name {@code pattern}. * * @param hostName lower-case host name. * @param pattern domain name pattern from certificate. May be a wildcard pattern such as * {@code *.android.com}. */ private boolean verifyHostName(String hostName, String pattern) { // Basic sanity checks // Check length == 0 instead of .isEmpty() to support Java 5. if (hostName == null || hostName.length() == 0 || hostName.startsWith(".") || hostName.endsWith("..")) { // Invalid domain name return false; } if (pattern == null || pattern.length() == 0 || pattern.startsWith(".") || pattern.endsWith("..")) { // Invalid pattern/domain name return false; } // Normalize hostName and pattern by turning them into absolute domain names if they are not // yet absolute. This is needed because server certificates do not normally contain absolute // names or patterns, but they should be treated as absolute. At the same time, any hostName // presented to this method should also be treated as absolute for the purposes of matching // to the server certificate. // www.android.com matches www.android.com // www.android.com matches www.android.com. // www.android.com. matches www.android.com. // www.android.com. matches www.android.com if (!hostName.endsWith(".")) { hostName += '.'; } if (!pattern.endsWith(".")) { pattern += '.'; } // hostName and pattern are now absolute domain names. pattern = pattern.toLowerCase(Locale.US); // hostName and pattern are now in lower case -- domain names are case-insensitive. if (!pattern.contains("*")) { // Not a wildcard pattern -- hostName and pattern must match exactly. return hostName.equals(pattern); } // Wildcard pattern // WILDCARD PATTERN RULES: // 1. Asterisk (*) is only permitted in the left-most domain name label and must be the // only character in that label (i.e., must match the whole left-most label). // For example, *.example.com is permitted, while *a.example.com, a*.example.com, // a*b.example.com, a.*.example.com are not permitted. // 2. Asterisk (*) cannot match across domain name labels. // For example, *.example.com matches test.example.com but does not match // sub.test.example.com. // 3. Wildcard patterns for single-label domain names are not permitted. if (!pattern.startsWith("*.") || pattern.indexOf('*', 1) != -1) { // Asterisk (*) is only permitted in the left-most domain name label and must be the only // character in that label return false; } // Optimization: check whether hostName is too short to match the pattern. hostName must be at // least as long as the pattern because asterisk must match the whole left-most label and // hostName starts with a non-empty label. Thus, asterisk has to match one or more characters. if (hostName.length() < pattern.length()) { // hostName too short to match the pattern. return false; } if ("*.".equals(pattern)) { // Wildcard pattern for single-label domain name -- not permitted. return false; } // hostName must end with the region of pattern following the asterisk. String suffix = pattern.substring(1); if (!hostName.endsWith(suffix)) { // hostName does not end with the suffix return false; } // Check that asterisk did not match across domain name labels. int suffixStartIndexInHostName = hostName.length() - suffix.length(); if (suffixStartIndexInHostName > 0 && hostName.lastIndexOf('.', suffixStartIndexInHostName - 1) != -1) { // Asterisk is matching across domain name labels -- not permitted. return false; } // hostName matches pattern return true; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.addthis.hydra.job.web.resources; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import com.addthis.codec.json.CodecJSON; import com.addthis.hydra.job.HostFailWorker; import com.addthis.hydra.job.RebalanceOutcome; import com.addthis.hydra.job.auth.PermissionsManager; import com.addthis.hydra.job.spawn.Spawn; import com.addthis.hydra.job.mq.HostState; import com.addthis.maljson.JSONArray; import com.addthis.maljson.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Path("/host") public class HostResource { private static final Logger log = LoggerFactory.getLogger(HostResource.class); private final Spawn spawn; private final HostFailWorker hostFailWorker; private final PermissionsManager permissionsManager; public HostResource(Spawn spawn) { this.spawn = spawn; this.hostFailWorker = spawn.getHostFailWorker(); this.permissionsManager = spawn.getPermissionsManager(); } @GET @Path("/rebalance") @Produces(MediaType.APPLICATION_JSON) public Response rebalanceHost(@QueryParam("id") String hostUuid, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { return Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } String[] hostUuids = hostUuid.split(","); JSONArray outcomes = new JSONArray(); for (String uuid : hostUuids) { emitLogLineForAction(user, "host rebalance on " + hostUuid); RebalanceOutcome outcome = spawn.rebalanceHost(uuid); JSONObject json = CodecJSON.encodeJSON(outcome); outcomes.put(json); } return Response.ok(outcomes.toString()).build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Rebalance Error: " + ex.getMessage()).build(); } } @GET @Path("/fail") @Produces(MediaType.APPLICATION_JSON) public Response failHost(@QueryParam("id") String hostUuids, @QueryParam("deadFs") boolean filesystemDead, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { return Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } emitLogLineForAction(user, "fail host on " + hostUuids); HostFailWorker.FailState failureMode; if (filesystemDead) { failureMode = HostFailWorker.FailState.FAILING_FS_DEAD; } else { failureMode = HostFailWorker.FailState.FAILING_FS_OKAY; } hostFailWorker.markHostsToFail(hostUuids, failureMode); JSONObject json = new JSONObject(); json.put("success", hostUuids.split(",").length); return Response.ok(json.toString()).build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Fail Error: " + ex.getMessage()).build(); } } @GET @Path("/failcancel") @Produces(MediaType.APPLICATION_JSON) public Response cancelFailHost(@QueryParam("id") String hostUuids, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { return Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } hostFailWorker.removeHostsForFailure(hostUuids); JSONObject json = new JSONObject(); json.put("success", hostUuids.split(",").length); return Response.ok(json.toString()).build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Fail Error: " + ex.getMessage()).build(); } } @GET @Path("/failinfo") @Produces(MediaType.APPLICATION_JSON) public Response hostFailInfo(@QueryParam("id") String hostUuids, @QueryParam("deadFs") boolean filesystemDead, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } return Response.ok(spawn.getHostFailWorker().getInfoForHostFailure(hostUuids, filesystemDead).toString()).build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Fail Error: " + ex.getMessage()).build(); } } @GET @Path("/drop") @Produces(MediaType.APPLICATION_JSON) public Response dropHosts(@QueryParam("id") String hostUuid, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { return Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } String[] hostUuids = hostUuid.split(","); for (String uuid : hostUuids) { emitLogLineForAction(user, "delete host on " + uuid); spawn.deleteHost(uuid); } return Response.ok().build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Drop Error: " + ex.getMessage()).build(); } } @GET @Path("/toggle") @Produces(MediaType.APPLICATION_JSON) public Response enableHosts(@QueryParam("id") String hostUuid, @QueryParam("user") String user, @QueryParam("token") String token, @QueryParam("sudo") String sudo, @QueryParam("disable") boolean disable) throws Exception { try { if (!permissionsManager.adminAction(user, token, sudo)) { return Response.status(Response.Status.UNAUTHORIZED).entity("insufficient privileges").build(); } emitLogLineForAction(user, "toggle hosts"); spawn.toggleHosts(hostUuid, disable); return Response.ok().build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity("Host Disable Error: " + ex.getMessage()).build(); } } @GET @Path("/list") @Produces(MediaType.APPLICATION_JSON) public Response listHosts() { JSONArray hosts = new JSONArray(); try { for (HostState host : spawn.hostManager.listHostStatus(null)) { hosts.put(spawn.getHostStateUpdateEvent(host)); } return Response.ok(hosts.toString()).build(); } catch (Exception ex) { log.warn("", ex); return Response.serverError().entity(ex.toString()).build(); } } private static void emitLogLineForAction(String user, String desc) { log.warn("User " + user + " initiated action: " + desc); } }
package org.broadinstitute.hellbender.tools.spark.sv.integration; import htsjdk.samtools.util.CloseableIterator; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.vcf.VCFFileReader; import org.apache.hadoop.fs.Path; import org.broadinstitute.hellbender.CommandLineProgramTest; import org.broadinstitute.hellbender.GATKBaseTest; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.gcs.BucketUtils; import org.broadinstitute.hellbender.utils.io.IOUtils; import org.broadinstitute.hellbender.testutils.ArgumentsBuilder; import org.broadinstitute.hellbender.testutils.BaseTest; import org.broadinstitute.hellbender.testutils.MiniClusterUtils; import org.broadinstitute.hellbender.testutils.VariantContextTestUtils; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import static org.broadinstitute.hellbender.tools.spark.sv.integration.DiscoverVariantsFromContigAlignmentsSAMSparkIntegrationTest.annotationsToIgnoreWhenComparingVariants; public class StructuralVariationDiscoveryPipelineSparkIntegrationTest extends CommandLineProgramTest { private static final class StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs { final String bamLoc; final String kmerIgnoreListLoc; final String alignerRefIndexImgLoc; final String outputDir; final String cnvCallsLoc; StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs(final String bamLoc, final String kmerIgnoreListLoc, final String alignerRefIndexImgLoc, final String cnvCallsLoc, final String outputDir) { this.bamLoc = bamLoc; this.kmerIgnoreListLoc = kmerIgnoreListLoc; this.alignerRefIndexImgLoc = alignerRefIndexImgLoc; this.outputDir = outputDir; this.cnvCallsLoc = cnvCallsLoc; } String getCommandLine() { return " -R " + SVIntegrationTestDataProvider.reference_2bit + " -I " + bamLoc + " -O " + outputDir + "/StructuralVariationDiscoveryPipelineSparkIntegrationTest/" + " --aligner-index-image " + alignerRefIndexImgLoc + " --kmers-to-ignore " + kmerIgnoreListLoc + " --contig-sam-file " + outputDir + "/assemblies.bam" + " --breakpoint-intervals " + outputDir + "/intervals" + " --fastq-dir " + outputDir + "/fastq" + (cnvCallsLoc == null ? "" : " --cnv-calls " + cnvCallsLoc) + " --exp-interpret"; } @Override public String toString() { return "StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs{" + "bam-loc='" + bamLoc + '\'' + ", kmer-ignore-list-loc='" + kmerIgnoreListLoc + '\'' + ", aligner-fef-index-img-loc='" + alignerRefIndexImgLoc + '\'' + ", cnv-calls-loc='" + cnvCallsLoc + '\'' + ", output-dir='" + outputDir + '\'' + '}'; } } @DataProvider(name = "svDiscoverPipelineSparkIntegrationTest") public Object[][] createTestData() throws IOException { List<Object[]> tests = new ArrayList<>(); final File tempDirNew = BaseTest.createTempDir("new"); tempDirNew.deleteOnExit(); Files.createDirectories(Paths.get(tempDirNew.getAbsolutePath()+"/fastq")); tests.add(new Object[]{ new StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs( SVIntegrationTestDataProvider.TEST_BAM, SVIntegrationTestDataProvider.KMER_KILL_LIST, SVIntegrationTestDataProvider.ALIGNER_INDEX_IMG, SVIntegrationTestDataProvider.EXTERNAL_CNV_CALLS, tempDirNew.getAbsolutePath() ) }); return tests.toArray(new Object[][]{}); } @Test(dataProvider = "svDiscoverPipelineSparkIntegrationTest", groups = "sv") public void testSVDiscoverPipelineRunnableLocal(final StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs params) throws IOException { final List<String> args = Arrays.asList( new ArgumentsBuilder().addRaw(params.getCommandLine()).getArgsArray() ); runCommandLine(args); svDiscoveryVCFEquivalenceTest( args.get(args.indexOf("-O")+1) + "sample_inv_del_ins.vcf", SVIntegrationTestDataProvider.EXPECTED_SIMPLE_DEL_VCF, args.get(args.indexOf("-O")+1).concat("sample_experimentalInterpretation_NonComplex.vcf"), annotationsToIgnoreWhenComparingVariants, false); Assert.assertTrue(Files.exists(IOUtils.getPath( args.get(args.indexOf("--contig-sam-file") + 1).replace(".bam", ".bai") ))); } @Test(dataProvider = "svDiscoverPipelineSparkIntegrationTest", groups = "sv") public void testSVDiscoverPipelineRunnableMiniCluster(final StructuralVariationDiscoveryPipelineSparkIntegrationTestArgs params) throws Exception { MiniClusterUtils.runOnIsolatedMiniCluster(cluster -> { final List<String> argsToBeModified = Arrays.asList( new ArgumentsBuilder().addRaw(params.getCommandLine()).getArgsArray() ); final Path workingDirectory = MiniClusterUtils.getWorkingDir(cluster); int idx = 0; // inputs, copy to mini cluster idx = argsToBeModified.indexOf("-I"); Path path = new Path(workingDirectory, "hdfs.bam"); File file = new File(argsToBeModified.get(idx+1)); cluster.getFileSystem().copyFromLocalFile(new Path(file.toURI()), path); argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("-R"); path = new Path(workingDirectory, "reference.2bit"); file = new File(argsToBeModified.get(idx+1)); cluster.getFileSystem().copyFromLocalFile(new Path(file.toURI()), path); argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("--kmers-to-ignore"); path = new Path(workingDirectory, "dummy.kill.kmers"); file = new File(argsToBeModified.get(idx+1)); cluster.getFileSystem().copyFromLocalFile(new Path(file.toURI()), path); argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("--cnv-calls"); path = new Path(workingDirectory, "cnvVariants"); file = new File(argsToBeModified.get(idx+1)); cluster.getFileSystem().copyFromLocalFile(new Path(file.toURI()), path); argsToBeModified.set(idx+1, path.toUri().toString()); // outputs, prefix with hdfs address idx = argsToBeModified.indexOf("-O"); path = new Path(workingDirectory, "test"); final String vcfOnHDFS = path.toUri().toString() + "/sample_inv_del_ins.vcf"; argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("--contig-sam-file"); path = new Path(workingDirectory, "assemblies.bam"); argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("--breakpoint-intervals"); path = new Path(workingDirectory, "intervals"); argsToBeModified.set(idx+1, path.toUri().toString()); idx = argsToBeModified.indexOf("--fastq-dir"); path = new Path(workingDirectory, "fastq"); argsToBeModified.set(idx+1, path.toUri().toString()); runCommandLine(argsToBeModified); svDiscoveryVCFEquivalenceTest(vcfOnHDFS, SVIntegrationTestDataProvider.EXPECTED_SIMPLE_DEL_VCF, vcfOnHDFS.replace("_inv_del_ins.vcf", "_experimentalInterpretation_NonComplex.vcf"), annotationsToIgnoreWhenComparingVariants, true); Assert.assertTrue(cluster.getFileSystem().exists(new Path(workingDirectory, "assemblies.bai"))); }); } // TODO: 8/27/18 swap the when making the switch to the new interpretation tool /** * Exists because testing equivalence between VCF is hard, so we do some customization here. * @param generatedVCFPath path to VCF generated by integration tested tool * @param expectedVCFPath path to VCF holding expected results * @param experimentalOutputPathForNonComplex path to an VCF that is produced by experimental interpretation tool, can be {@code null} if not applicable * @param attributesToIgnore attributes to ignore when comparing actual and expected VariantContexts, applied to all variants * @param onHDFS whether {@code generatedVCFPath} and {@code experimentalOutputPathForNonComplex} are on HDFS system * @throws IOException if reading from any of the VCF fails */ static void svDiscoveryVCFEquivalenceTest(final String generatedVCFPath, final String expectedVCFPath, final String experimentalOutputPathForNonComplex, final List<String> attributesToIgnore, final boolean onHDFS) throws IOException { final List<VariantContext> expectedVcs; if (expectedVCFPath == null) { expectedVcs = Collections.emptyList(); } else { try (final VCFFileReader fileReader = new VCFFileReader(new File(expectedVCFPath), false); final CloseableIterator<VariantContext> iterator = fileReader.iterator()) { expectedVcs = Utils.stream(iterator).collect(Collectors.toList()); } } List<VariantContext> actualVcs = extractActualVCs(generatedVCFPath, onHDFS); if (expectedVCFPath == null) Assert.assertTrue(actualVcs.isEmpty()); GATKBaseTest.assertCondition(actualVcs, expectedVcs, (a, e) -> VariantContextTestUtils.assertVariantContextsAreEqual(a, e, attributesToIgnore, Collections.emptyList())); if ( experimentalOutputPathForNonComplex != null ) { final java.nio.file.Path path = IOUtils.getPath(experimentalOutputPathForNonComplex); final String experimentalInsDelVcf = onHDFS ? path.toUri().toString() : path.toString(); actualVcs = extractActualVCs(experimentalInsDelVcf, onHDFS); // TODO: 1/28/18 see ticket #4228 final List<String> moreAttributesToIgnoreForNow = new ArrayList<>(attributesToIgnore); moreAttributesToIgnoreForNow.addAll(Collections.singletonList("EXTERNAL_CNV_CALLS")); GATKBaseTest.assertCondition(actualVcs, expectedVcs, (a, e) -> VariantContextTestUtils.assertVariantContextsAreEqual(a, e, moreAttributesToIgnoreForNow, Collections.emptyList())); } } static List<VariantContext> extractActualVCs(final String generatedVCFPath, final boolean onHDFS) throws IOException { final File appropriateVCF; if (onHDFS) { appropriateVCF = GATKBaseTest.createTempFile("variants", "vcf"); appropriateVCF.deleteOnExit(); BucketUtils.copyFile(generatedVCFPath, appropriateVCF.getAbsolutePath()); } else { appropriateVCF = new File(generatedVCFPath); } try (final VCFFileReader fileReader = new VCFFileReader(appropriateVCF, false)) { try (final CloseableIterator<VariantContext> iterator = fileReader.iterator()) { return Utils.stream(iterator).collect(Collectors.toList()); } } } }
package no.noen.lms; import java.io.Serializable; import org.komsa.domain.*; /** * LMS load product. */ public class LoadProduct extends Entity implements Serializable { /* Attribute names */ public static final String F_PORTIO = "portIO"; public static final String F_PORTNR = "portNr"; public static final String F_TERMINALID = "termId"; public static final String F_TERMINALNAME = "termName"; public static final String F_TERMINALIPADDR = "ipAddr"; public static final String F_BUILDINGID = "buildingId"; public static final String F_BUILDINGNAME = "buildingName"; public static final String F_METERID = "meterId"; public static final String F_METERNAME = "meterName"; public static final String F_METEROUTPUT = "meterOutput"; protected static final String F_METERFACTOR = "meterFactor"; protected static final String F_MAINTAINSTART = "mpStart"; protected static final String F_MAINTAINEND = "mpEnd"; public static final String F_CONTRACTID = "contractId"; protected static final String F_CONTRACTNAME = "contractName"; public static final String F_CATEGORY = "category"; public static final String F_LOADGROUPID = "loadGroupId"; public static final String F_CUSTOMERID = "customerId"; protected static final String F_CUSTOMERNAME = "customerName"; public static final String F_OUTPUTLIMIT = "outputLimit"; protected static final String F_OWNERID = "ownerId"; /** Constructor. */ public LoadProduct() { super("lms", "LoadProduct"); Cell[] defaultCells = { new Cell(F_PORTIO, new StringDomain("DO")), new Cell(F_PORTNR, new IntegerDomain()), new Cell(F_TERMINALID, new IDDomain()), new Cell(F_TERMINALNAME, new StringDomain()), new Cell(F_TERMINALIPADDR, new StringDomain()), new Cell(F_BUILDINGID, new IDDomain()), new Cell(F_BUILDINGNAME, new StringDomain()), new Cell(F_METERID, new IDDomain()), new Cell(F_METERNAME, new StringDomain()), new Cell(F_METEROUTPUT, new IntegerDomain()), new Cell(F_METERFACTOR, new FloatDomain((float)1.0)), new Cell(F_MAINTAINSTART, new TimestampDomain(null)), new Cell(F_MAINTAINEND, new TimestampDomain(null)), new Cell(F_CONTRACTID, new IDDomain()), new Cell(F_CONTRACTNAME, new StringDomain()), new Cell(F_CATEGORY, new ShortDomain(Contract.CAT_RK)), new Cell(F_LOADGROUPID, new IDDomain()), new Cell(F_CUSTOMERID, new IDDomain()), new Cell(F_OWNERID, new IDDomain()), new Cell(F_CUSTOMERNAME, new StringDomain()), new Cell(F_OUTPUTLIMIT, new IntegerDomain(0)) }; for (int i = 0; i < defaultCells.length; i++) addCell(defaultCells[i]); } /** @return Port IO type as string. */ public final String getPortIO() { return get(F_PORTIO); } /** @return Port nr as short integer. */ public final short getPortNr() { return getShort(F_PORTNR); } /** Set port nr. */ public final void setPortNr(short nr) { set(F_PORTNR, new Short(nr)); } /** @return Terminal ID as long integer. */ public final long getTerminalId() { return getLong(F_TERMINALID); } /** @return Terminal name as string. */ public final String getTerminalName() { return get(F_TERMINALNAME); } /** @return IP-address for terminal. */ public final String getIPAddr() { return get(F_TERMINALIPADDR); } /** @return Building ID as long integer. */ public final long getBuildingId() { return getLong(F_BUILDINGID); } /** @return Building name. */ public final String getBuildingName() { return get(F_BUILDINGNAME); } /** @return Meter ID as long integer. */ public final long getMeterId() { return getLong(F_METERID); } /** @return Meter name. */ public final String getMeterName() { return get(F_METERNAME); } /** @return Meter output value as integer. */ public final int getMeterOutput() { return getInt(F_METEROUTPUT); } /** @return Factor as float. */ public final float getMeterFactor() { return getFloat(F_METERFACTOR); } /** @return Terminal mainteinance start time. */ public final java.util.Date getMaintenanceStart() { return (java.util.Date)getObj(F_MAINTAINSTART); } /** @return Terminal mainteinance end time. */ public final java.util.Date getMaintenanceEnd() { return (java.util.Date)getObj(F_MAINTAINEND); } /** @return true if terminal is on mainteinance. */ public final boolean isOnMaintenance() { boolean fVal = false; java.util.Date startTime = getMaintenanceStart(); java.util.Date endTime = getMaintenanceEnd(); if (startTime != null && endTime != null) { java.util.Date now = new java.util.Date(); fVal = now.after(startTime) && now.before(endTime); } return fVal; } /** * @param dateFrm Date format * @return Mainteinance periode, if defined, as string. */ public final String getMaintenancePeriodStr(String dateFrm, String strSep) { StringBuilder buf = new StringBuilder(); java.util.Date startTime = getMaintenanceStart(); java.util.Date endTime = getMaintenanceEnd(); if (startTime != null && endTime != null) { String sep = (strSep != null) ? strSep : " - "; java.util.Locale loc = new java.util.Locale("no", "NO", ""); java.text.DateFormat df = new java.text.SimpleDateFormat(dateFrm, loc); buf.append(df.format(startTime)).append(sep) .append(df.format(endTime)); } return buf.toString(); } /** @return Contract ID as long integer. */ public final long getContractId() { return getLong(F_CONTRACTID); } /** @return Contract name as string. */ public final String getContractName() { return get(F_CONTRACTNAME); } /** @return Category as short integer. */ public final short getCategory() { return getShort(F_CATEGORY); } /** @return Loadgroup ID as long integer. */ public final long getLoadGroupId() { return getLong(F_LOADGROUPID); } /** Set loadgroup ID as integer. */ public final void setLoadGroupId(long groupId) { set(F_LOADGROUPID, new Long(groupId)); } /** @return Customer ID as long integer. */ public final long getCustomerId() { return getLong(F_CUSTOMERID); } /** @return Customer name. */ public final String getCustomerName() { return get(F_CUSTOMERNAME); } /** @return Net owner/operator ID as long integer. */ public final long getNetOwnerId() { return getLong(F_OWNERID); } /** @return Max output value valid on activation. */ public final int getOutputLimit() { return getInt(F_OUTPUTLIMIT); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations; import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.CompositeReaderContext; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.Weight; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener; import org.elasticsearch.index.cache.query.DisabledQueryCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.junit.After; import org.mockito.Matchers; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; /** * Base class for testing {@link Aggregator} implementations. * Provides helpers for constructing and searching an {@link Aggregator} implementation based on a provided * {@link AggregationBuilder} instance. */ public abstract class AggregatorTestCase extends ESTestCase { private static final String NESTEDFIELD_PREFIX = "nested_"; private List<Releasable> releasables = new ArrayList<>(); private static final String TYPE_NAME = "type"; protected AggregatorFactory<?> createAggregatorFactory(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException { return createAggregatorFactory(aggregationBuilder, indexSearcher, createIndexSettings(), new MultiBucketConsumer(DEFAULT_MAX_BUCKETS), fieldTypes); } /** Create a factory for the given aggregation builder. */ protected AggregatorFactory<?> createAggregatorFactory(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, IndexSettings indexSettings, MultiBucketConsumer bucketConsumer, MappedFieldType... fieldTypes) throws IOException { SearchContext searchContext = createSearchContext(indexSearcher, indexSettings); CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); when(searchContext.aggregations()) .thenReturn(new SearchContextAggregations(AggregatorFactories.EMPTY, bucketConsumer)); when(searchContext.bigArrays()).thenReturn(new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService)); // TODO: now just needed for top_hits, this will need to be revised for other agg unit tests: MapperService mapperService = mapperServiceMock(); when(mapperService.getIndexSettings()).thenReturn(indexSettings); when(mapperService.hasNested()).thenReturn(false); when(mapperService.types()).thenReturn(Collections.singleton(TYPE_NAME)); when(searchContext.mapperService()).thenReturn(mapperService); IndexFieldDataService ifds = new IndexFieldDataService(indexSettings, new IndicesFieldDataCache(Settings.EMPTY, new IndexFieldDataCache.Listener() { }), circuitBreakerService, mapperService); when(searchContext.getForField(Mockito.any(MappedFieldType.class))) .thenAnswer(invocationOnMock -> ifds.getForField((MappedFieldType) invocationOnMock.getArguments()[0])); SearchLookup searchLookup = new SearchLookup(mapperService, ifds::getForField, new String[]{TYPE_NAME}); when(searchContext.lookup()).thenReturn(searchLookup); QueryShardContext queryShardContext = queryShardContextMock(mapperService, fieldTypes, circuitBreakerService); when(queryShardContext.getIndexSettings()).thenReturn(indexSettings); when(searchContext.getQueryShardContext()).thenReturn(queryShardContext); for (MappedFieldType fieldType : fieldTypes) { when(searchContext.smartNameFieldType(fieldType.name())).thenReturn(fieldType); } return aggregationBuilder.build(searchContext, null); } protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException { return createAggregator(aggregationBuilder, indexSearcher, createIndexSettings(), new MultiBucketConsumer(DEFAULT_MAX_BUCKETS), fieldTypes); } protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, IndexSettings indexSettings, MappedFieldType... fieldTypes) throws IOException { return createAggregator(aggregationBuilder, indexSearcher, indexSettings, new MultiBucketConsumer(DEFAULT_MAX_BUCKETS), fieldTypes); } protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, MultiBucketConsumer bucketConsumer, MappedFieldType... fieldTypes) throws IOException { return createAggregator(aggregationBuilder, indexSearcher, createIndexSettings(), bucketConsumer, fieldTypes); } protected <A extends Aggregator> A createAggregator(AggregationBuilder aggregationBuilder, IndexSearcher indexSearcher, IndexSettings indexSettings, MultiBucketConsumer bucketConsumer, MappedFieldType... fieldTypes) throws IOException { @SuppressWarnings("unchecked") A aggregator = (A) createAggregatorFactory(aggregationBuilder, indexSearcher, indexSettings, bucketConsumer, fieldTypes) .create(null, true); return aggregator; } protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) { Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher); QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override public void onUse(Query query) { } @Override public boolean shouldCache(Query query) throws IOException { // never cache a query return false; } }; ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(searcher, queryCache, queryCachingPolicy); SearchContext searchContext = mock(SearchContext.class); when(searchContext.numberOfShards()).thenReturn(1); when(searchContext.searcher()).thenReturn(contextIndexSearcher); when(searchContext.fetchPhase()) .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); when(searchContext.getObjectMapper(anyString())).thenAnswer(invocation -> { String fieldName = (String) invocation.getArguments()[0]; if (fieldName.startsWith(NESTEDFIELD_PREFIX)) { BuilderContext context = new BuilderContext(indexSettings.getSettings(), new ContentPath()); return new ObjectMapper.Builder<>(fieldName).nested(Nested.newNested(false, false)).build(context); } return null; }); when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class))); doAnswer(invocation -> { /* Store the releasables so we can release them at the end of the test case. This is important because aggregations don't * close their sub-aggregations. This is fairly similar to what the production code does. */ releasables.add((Releasable) invocation.getArguments()[0]); return null; }).when(searchContext).addReleasable(anyObject(), anyObject()); return searchContext; } protected IndexSettings createIndexSettings() { return new IndexSettings( IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1) .numberOfReplicas(0) .creationDate(System.currentTimeMillis()) .build(), Settings.EMPTY ); } /** * sub-tests that need a more complex mock can overwrite this */ protected MapperService mapperServiceMock() { return mock(MapperService.class); } /** * sub-tests that need a more complex mock can overwrite this */ protected QueryShardContext queryShardContextMock(MapperService mapperService, MappedFieldType[] fieldTypes, CircuitBreakerService circuitBreakerService) { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.getMapperService()).thenReturn(mapperService); for (MappedFieldType fieldType : fieldTypes) { when(queryShardContext.fieldMapper(fieldType.name())).thenReturn(fieldType); when(queryShardContext.getForField(fieldType)).then(invocation -> fieldType.fielddataBuilder(mapperService.getIndexSettings() .getIndex().getName()) .build(mapperService.getIndexSettings(), fieldType, new IndexFieldDataCache.None(), circuitBreakerService, mapperService)); } NestedScope nestedScope = new NestedScope(); when(queryShardContext.isFilter()).thenCallRealMethod(); Mockito.doCallRealMethod().when(queryShardContext).setIsFilter(Matchers.anyBoolean()); when(queryShardContext.nestedScope()).thenReturn(nestedScope); return queryShardContext; } protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSearcher searcher, Query query, AggregationBuilder builder, MappedFieldType... fieldTypes) throws IOException { return search(searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); } protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSearcher searcher, Query query, AggregationBuilder builder, int maxBucket, MappedFieldType... fieldTypes) throws IOException { MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket); C a = createAggregator(builder, searcher, bucketConsumer, fieldTypes); a.preCollection(); searcher.search(query, a); a.postCollection(); @SuppressWarnings("unchecked") A internalAgg = (A) a.buildAggregation(0L); InternalAggregationTestCase.assertMultiBucketConsumer(internalAgg, bucketConsumer); return internalAgg; } protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher, Query query, AggregationBuilder builder, MappedFieldType... fieldTypes) throws IOException { return searchAndReduce(searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); } /** * Divides the provided {@link IndexSearcher} in sub-searcher, one for each segment, * builds an aggregator for each sub-searcher filtered by the provided {@link Query} and * returns the reduced {@link InternalAggregation}. */ protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher, Query query, AggregationBuilder builder, int maxBucket, MappedFieldType... fieldTypes) throws IOException { final IndexReaderContext ctx = searcher.getTopReaderContext(); final ShardSearcher[] subSearchers; if (ctx instanceof LeafReaderContext) { subSearchers = new ShardSearcher[1]; subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx); } else { final CompositeReaderContext compCTX = (CompositeReaderContext) ctx; final int size = compCTX.leaves().size(); subSearchers = new ShardSearcher[size]; for(int searcherIDX=0;searcherIDX<subSearchers.length;searcherIDX++) { final LeafReaderContext leave = compCTX.leaves().get(searcherIDX); subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX); } } List<InternalAggregation> aggs = new ArrayList<> (); Query rewritten = searcher.rewrite(query); Weight weight = searcher.createWeight(rewritten, true, 1f); MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket); C root = createAggregator(builder, searcher, bucketConsumer, fieldTypes); for (ShardSearcher subSearcher : subSearchers) { MultiBucketConsumer shardBucketConsumer = new MultiBucketConsumer(maxBucket); C a = createAggregator(builder, subSearcher, shardBucketConsumer, fieldTypes); a.preCollection(); subSearcher.search(weight, a); a.postCollection(); InternalAggregation agg = a.buildAggregation(0L); aggs.add(agg); InternalAggregationTestCase.assertMultiBucketConsumer(agg, shardBucketConsumer); } if (aggs.isEmpty()) { return null; } else { if (randomBoolean() && aggs.size() > 1) { // sometimes do an incremental reduce int toReduceSize = aggs.size(); Collections.shuffle(aggs, random()); int r = randomIntBetween(1, toReduceSize); List<InternalAggregation> toReduce = aggs.subList(0, r); MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket); InternalAggregation.ReduceContext context = new InternalAggregation.ReduceContext(root.context().bigArrays(), null, reduceBucketConsumer, false); A reduced = (A) aggs.get(0).doReduce(toReduce, context); InternalAggregationTestCase.assertMultiBucketConsumer(reduced, reduceBucketConsumer); aggs = new ArrayList<>(aggs.subList(r, toReduceSize)); aggs.add(reduced); } // now do the final reduce MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket); InternalAggregation.ReduceContext context = new InternalAggregation.ReduceContext(root.context().bigArrays(), null, reduceBucketConsumer, true); @SuppressWarnings("unchecked") A internalAgg = (A) aggs.get(0).doReduce(aggs, context); InternalAggregationTestCase.assertMultiBucketConsumer(internalAgg, reduceBucketConsumer); return internalAgg; } } private static class ShardSearcher extends IndexSearcher { private final List<LeafReaderContext> ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); this.ctx = Collections.singletonList(ctx); } public void search(Weight weight, Collector collector) throws IOException { search(ctx, weight, collector); } @Override public String toString() { return "ShardSearcher(" + ctx.get(0) + ")"; } } protected static DirectoryReader wrap(DirectoryReader directoryReader) throws IOException { return ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("_index", "_na_"), 0)); } /** * Added to randomly run with more assertions on the index searcher level, * like {@link org.apache.lucene.util.LuceneTestCase#newSearcher(IndexReader)}, which can't be used because it also * wraps in the IndexSearcher's IndexReader with other implementations that we can't handle. (e.g. ParallelCompositeReader) */ protected static IndexSearcher newIndexSearcher(IndexReader indexReader) { if (randomBoolean()) { return new AssertingIndexSearcher(random(), indexReader); } else { return new IndexSearcher(indexReader); } } /** * Added to randomly run with more assertions on the index reader level, * like {@link org.apache.lucene.util.LuceneTestCase#wrapReader(IndexReader)}, which can't be used because it also * wraps in the IndexReader with other implementations that we can't handle. (e.g. ParallelCompositeReader) */ protected static IndexReader maybeWrapReaderEs(DirectoryReader reader) throws IOException { if (randomBoolean()) { return new AssertingDirectoryReader(reader); } else { return reader; } } @After private void cleanupReleasables() { Releasables.close(releasables); releasables.clear(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.wasp.messagequeue; import com.alibaba.wasp.EntityGroupInfo; import com.alibaba.wasp.FConstants; import com.alibaba.wasp.fserver.EntityGroup; import com.alibaba.wasp.fserver.LeaseException; import com.alibaba.wasp.fserver.LeaseListener; import com.alibaba.wasp.fserver.Leases; import com.alibaba.wasp.fserver.Leases.LeaseStillHeldException; import com.alibaba.wasp.fserver.OnlineEntityGroups; import com.alibaba.wasp.storage.StorageTableNotFoundException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.io.hfile.Compression; import java.io.Closeable; import java.io.IOException; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentHashMap; /** * Message broker, it receives message from message queue and send message to * subscriber. * */ public class MessageBroker extends Thread implements Closeable, Broker { public static final Log LOG = LogFactory.getLog(MessageBroker.class); /** entityGroup server **/ private final OnlineEntityGroups service; /** for register **/ protected final ConcurrentHashMap<String, Subscriber> subscribers = new ConcurrentHashMap<String, Subscriber>(); private boolean closed = false; private Leases leases; private final int subscriberLeaseTimeoutPeriod; private final RenewRunnable renew; private final Configuration conf; /** * @param service * entityGroup server instance * @param conf * configuration */ public MessageBroker(OnlineEntityGroups service, Configuration conf) { this.service = service; this.conf = conf; this.leases = new Leases(conf.getInt(FConstants.THREAD_WAKE_FREQUENCY, 10 * 1000)); this.subscriberLeaseTimeoutPeriod = conf.getInt( FConstants.WASP_FSEVER_SUBSCRIBER_TIMEOUT_PERIOD, FConstants.DEFAULT_WASP_FSEVER_SUBSCRIBER_TIMEOUT_PERIOD); this.renew = new RenewRunnable(); this.renew.start(); } public void initlize() throws IOException { HBaseAdmin admin = new HBaseAdmin(this.conf); if (!admin.tableExists(FConstants.MESSAGEQUEUE_TABLENAME)) { HColumnDescriptor family = new HColumnDescriptor(FConstants.MESSAGEQUEUE_FAMILIY); family.setCompressionType(Compression.Algorithm.GZ); HTableDescriptor tableDes = new HTableDescriptor(FConstants.MESSAGEQUEUE_TABLENAME); tableDes.addFamily(family); admin.createTable(tableDes); } admin.close(); } private class SubscriberListener implements LeaseListener { private Subscriber subscriber; /** * @param subscriber */ public SubscriberListener(Subscriber subscriber) { super(); this.subscriber = subscriber; } /** * @see com.alibaba.wasp.fserver.LeaseListener#leaseExpired() */ @Override public void leaseExpired() { Subscriber s = subscribers.remove(this.subscriber.getEntityGroup() .getEntityGroupNameAsString()); LOG.info("Subscriber " + s.getEntityGroup().getEntityGroupNameAsString() + " lease expired"); } } private class RenewRunnable extends Thread implements Closeable { private boolean closed = false; @Override public void run() { while (!closed) { // renew lease,if some entityGroups hang then remove it by lease // manager. Collection<EntityGroup> entityGroups; try { entityGroups = service.getOnlineEntityGroups(); for (EntityGroup entityGroup : entityGroups) { leases.renewLease(entityGroup.getEntityGroupNameAsString()); } Thread.sleep(60 * 1000); } catch (Exception e) { LOG.error("RenewRunnable running.", e); try { close(); } catch (IOException e1) { LOG.error("RenewRunnable doClosing.", e1); } return; } } } /** * @see java.io.Closeable#close() */ @Override public void close() throws IOException { closed = true; } } /** * @see */ @Override public void register(Subscriber subscriber) throws LeaseStillHeldException { String subscriberName = subscriber.getEntityGroup() .getEntityGroupNameAsString(); Subscriber existing = subscribers.putIfAbsent(subscriberName, subscriber); if (existing == null) { this.leases .createLease(subscriberName, this.subscriberLeaseTimeoutPeriod, new SubscriberListener(subscriber)); } } /** * @see */ @Override public void remove(Subscriber subscriber) throws LeaseException { String sbuscriberName = subscriber.getEntityGroup() .getEntityGroupNameAsString(); subscribers.remove(sbuscriberName); leases.removeLease(sbuscriberName); } /** * @see */ @Override public void remove(EntityGroupInfo entityGroupInfo) throws LeaseException { String sbuscriberName = entityGroupInfo.getEntityGroupNameAsString(); subscribers.remove(sbuscriberName); leases.removeLease(sbuscriberName); } /** * @see Thread#run() */ @Override public void run() { while (!closed) { try { // fetch message from message queue then notify entityGroup. selectCurrentMessages(); // wait for 500ms Thread.sleep(500); } catch (Exception e) { LOG.error("Messagebroker running.", e); try { close(); } catch (IOException e1) { LOG.error("Messagebroker doClosing.", e1); } return; } } } /** * Fetch messages which were subscribed. * * @throws HBaseTableNotFoundException * @throws java.io.IOException */ private void selectCurrentMessages() throws StorageTableNotFoundException, IOException { Iterator<Subscriber> iter = subscribers.values().iterator(); while (iter.hasNext()) { Subscriber subscriber = iter.next(); List<Message> messages = subscriber.receive(); for (Message message : messages) { subscriber.setCurrentMessageRow(message.getMessageID().getMessageId()); subscriber.doAsynchronous(message); } } } @Override public void close() throws IOException { this.closed = true; this.renew.close(); this.interrupt(); this.renew.interrupt(); } }
/* * Copyright (c) 2002, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.plaf.synth; import javax.swing.*; import javax.swing.colorchooser.*; import javax.swing.plaf.*; import javax.swing.plaf.basic.BasicColorChooserUI; import java.awt.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; /** * Provides the Synth L&amp;F UI delegate for * {@link javax.swing.JColorChooser}. * * @author Tom Santos * @author Steve Wilson * @since 1.7 */ public class SynthColorChooserUI extends BasicColorChooserUI implements PropertyChangeListener, SynthUI { private SynthStyle style; /** * * Constructs a {@code SynthColorChooserUI}. */ public SynthColorChooserUI() {} /** * Creates a new UI object for the given component. * * @param c component to create UI object for * @return the UI object */ public static ComponentUI createUI(JComponent c) { return new SynthColorChooserUI(); } /** * {@inheritDoc} */ @Override protected AbstractColorChooserPanel[] createDefaultChoosers() { SynthContext context = getContext(chooser, ENABLED); AbstractColorChooserPanel[] panels = (AbstractColorChooserPanel[]) context.getStyle().get(context, "ColorChooser.panels"); if (panels == null) { panels = ColorChooserComponentFactory.getDefaultChooserPanels(); } return panels; } /** * {@inheritDoc} */ @Override protected void installDefaults() { super.installDefaults(); updateStyle(chooser); } private void updateStyle(JComponent c) { SynthContext context = getContext(c, ENABLED); style = SynthLookAndFeel.updateStyle(context, this); } /** * {@inheritDoc} */ @Override protected void uninstallDefaults() { SynthContext context = getContext(chooser, ENABLED); style.uninstallDefaults(context); style = null; super.uninstallDefaults(); } /** * {@inheritDoc} */ @Override protected void installListeners() { super.installListeners(); chooser.addPropertyChangeListener(this); } /** * {@inheritDoc} */ @Override protected void uninstallListeners() { chooser.removePropertyChangeListener(this); super.uninstallListeners(); } /** * {@inheritDoc} */ @Override public SynthContext getContext(JComponent c) { return getContext(c, getComponentState(c)); } private SynthContext getContext(JComponent c, int state) { return SynthContext.getContext(c, style, state); } private int getComponentState(JComponent c) { return SynthLookAndFeel.getComponentState(c); } /** * Notifies this UI delegate to repaint the specified component. * This method paints the component background, then calls * the {@link #paint(SynthContext,Graphics)} method. * * <p>In general, this method does not need to be overridden by subclasses. * All Look and Feel rendering code should reside in the {@code paint} method. * * @param g the {@code Graphics} object used for painting * @param c the component being painted * @see #paint(SynthContext,Graphics) */ @Override public void update(Graphics g, JComponent c) { SynthContext context = getContext(c); SynthLookAndFeel.update(context, g); context.getPainter().paintColorChooserBackground(context, g, 0, 0, c.getWidth(), c.getHeight()); paint(context, g); } /** * Paints the specified component according to the Look and Feel. * <p>This method is not used by Synth Look and Feel. * Painting is handled by the {@link #paint(SynthContext,Graphics)} method. * * @param g the {@code Graphics} object used for painting * @param c the component being painted * @see #paint(SynthContext,Graphics) */ @Override public void paint(Graphics g, JComponent c) { SynthContext context = getContext(c); paint(context, g); } /** * Paints the specified component. * This implementation does not perform any actions. * * @param context context for the component being painted * @param g the {@code Graphics} object used for painting * @see #update(Graphics,JComponent) */ protected void paint(SynthContext context, Graphics g) { } /** * {@inheritDoc} */ @Override public void paintBorder(SynthContext context, Graphics g, int x, int y, int w, int h) { context.getPainter().paintColorChooserBorder(context, g, x, y,w,h); } /** * {@inheritDoc} */ @Override public void propertyChange(PropertyChangeEvent e) { if (SynthLookAndFeel.shouldUpdateStyle(e)) { updateStyle((JColorChooser)e.getSource()); } } }
/* * #%L * ELK Proofs Package * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2014 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.semanticweb.elk.owlapi.proofs; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; import org.liveontologies.owlapi.proof.OWLProver; import org.liveontologies.puli.Inference; import org.liveontologies.puli.InferenceJustifier; import org.liveontologies.puli.InferenceJustifiers; import org.liveontologies.puli.Proof; import org.liveontologies.puli.Proofs; import org.liveontologies.puli.pinpointing.InterruptMonitor; import org.liveontologies.puli.pinpointing.MinimalSubsetCollector; import org.liveontologies.puli.pinpointing.MinimalSubsetEnumerators; import org.semanticweb.elk.owl.inferences.TestUtils; import org.semanticweb.elk.owlapi.ElkProver; import org.semanticweb.elk.owlapi.ElkReasoner; import org.semanticweb.elk.reasoner.completeness.TestIncompleteness; import org.semanticweb.owlapi.model.AddAxiom; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChange; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.model.parameters.Imports; import org.semanticweb.owlapi.reasoner.Node; /** * TODO this is adapted from {@link TestUtils}, see if we can get rid of * copy-paste. * * @author Pavel Klinov * * pavel.klinov@uni-ulm.de * @author Peter Skocovsky * * @author Yevgeny Kazakov * */ public class ProofTestUtils { public static void provabilityTest(final Proof<?> proof, final Object conclusion) { assertTrue(String.format("Conclusion %s not derivable!", conclusion), isDerivable(proof, conclusion)); } public static boolean isDerivable(final Proof<?> proof, final Object conclusion) { return Proofs.isDerivable(proof, conclusion); } public static void provabilityTest(OWLProver prover, final OWLAxiom axiom) { assertTrue(String.format("Entailment %s not derivable!", axiom), isDerivable(prover.getProof(axiom), axiom)); } public static void visitAllSubsumptionsForProofTests( final ElkReasoner reasoner, final OWLDataFactory factory, final ProofTestVisitor visitor) { if (!TestIncompleteness.getValue(reasoner.checkIsConsistent())) { visitor.visit(factory.getOWLThing(), factory.getOWLNothing()); return; } Set<Node<OWLClass>> visited = new HashSet<Node<OWLClass>>(); Queue<Node<OWLClass>> toDo = new LinkedList<Node<OWLClass>>(); toDo.add(TestIncompleteness.getValue(reasoner.computeTopClassNode())); visited.add( TestIncompleteness.getValue(reasoner.computeTopClassNode())); for (;;) { Node<OWLClass> nextNode = toDo.poll(); if (nextNode == null) { break; } List<OWLClass> membersList = new ArrayList<OWLClass>( nextNode.getEntities()); if (nextNode.isBottomNode()) { // do not check inconsistent concepts for now continue; } // else visit all subsumptions within the node for (int i = 0; i < membersList.size() - 1; i++) { for (int j = i + 1; j < membersList.size(); j++) { OWLClass sub = membersList.get(i); OWLClass sup = membersList.get(j); if (!sub.equals(sup)) { if (!sup.equals(factory.getOWLThing()) && !sub.equals(factory.getOWLNothing())) { visitor.visit(sub, sup); } if (!sub.equals(factory.getOWLThing()) && !sup.equals(factory.getOWLNothing())) { visitor.visit(sup, sub); } } } } // go one level down for (Node<OWLClass> subNode : TestIncompleteness .getValue(reasoner.computeSubClasses( nextNode.getRepresentativeElement(), true))) { if (visited.add(subNode)) { toDo.add(subNode); } } for (OWLClass sup : nextNode.getEntities()) { for (Node<OWLClass> subNode : TestIncompleteness .getValue(reasoner.computeSubClasses(sup, true))) { if (subNode.isBottomNode()) continue; for (OWLClass sub : subNode.getEntitiesMinusBottom()) { if (!sup.equals(factory.getOWLThing())) { visitor.visit(sub, sup); } } } } } } public static void proofCompletenessTest(final ElkProver prover, final OWLAxiom conclusion) { proofCompletenessTest(prover, conclusion, false); } public static void proofCompletenessTest(final ElkProver prover, final OWLAxiom conclusion, final boolean mustNotBeATautology) { final OWLOntology ontology = prover.getRootOntology(); Proof<Inference<OWLAxiom>> proof = Proofs.removeAssertedInferences( prover.getProof(conclusion), ontology.getAxioms(Imports.INCLUDED)); final InferenceJustifier<Inference<OWLAxiom>, ? extends Set<? extends OWLAxiom>> justifier = InferenceJustifiers .justifyAssertedInferences(); proofCompletenessTest(prover.getDelegate(), conclusion, conclusion, proof, justifier, mustNotBeATautology); } public static <I extends Inference<?>> void proofCompletenessTest( final ElkReasoner reasoner, final OWLAxiom entailment, final Object conclusion, final Proof<? extends I> proof, final InferenceJustifier<? super I, ? extends Set<? extends OWLAxiom>> justifier) { proofCompletenessTest(reasoner, entailment, conclusion, proof, justifier, false); } public static <I extends Inference<?>> void proofCompletenessTest( final ElkReasoner reasoner, final OWLAxiom entailment, final Object conclusion, final Proof<? extends I> proof, final InferenceJustifier<? super I, ? extends Set<? extends OWLAxiom>> justifier, final boolean mustNotBeATautology) { final OWLOntology ontology = reasoner.getRootOntology(); final OWLOntologyManager manager = ontology.getOWLOntologyManager(); // compute repairs final Set<Set<? extends OWLAxiom>> repairs = new HashSet<Set<? extends OWLAxiom>>(); MinimalSubsetEnumerators.enumerateRepairs(conclusion, proof, justifier, InterruptMonitor.DUMMY, new MinimalSubsetCollector<OWLAxiom>(repairs)); if (mustNotBeATautology) { assertFalse("Entailment is a tautology; there are no repairs!", repairs.isEmpty()); } for (final Set<? extends OWLAxiom> repair : repairs) { final List<OWLOntologyChange> deletions = new ArrayList<OWLOntologyChange>(); final List<OWLOntologyChange> additions = new ArrayList<OWLOntologyChange>(); for (final OWLAxiom axiom : repair) { deletions.add(new RemoveAxiom(ontology, axiom)); additions.add(new AddAxiom(ontology, axiom)); } manager.applyChanges(deletions); final boolean conclusionDerived = TestIncompleteness .getValue(reasoner.checkEntailment(entailment)); manager.applyChanges(additions); assertFalse("Not all proofs were found!\n" + "Conclusion: " + conclusion + "\n" + "Repair: " + repair, conclusionDerived); } } }
package algorithms.graphs; import algorithms.imageProcessing.ImageExt; import algorithms.misc.Misc; import algorithms.util.PairInt; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * * class to hold a list of region nodes and calculate an adjacency map. * The classes that extend it hold the edge values. * * @author nichole */ public class RegionAdjacencyGraph { //NOTE: may change o use more compact structures in future protected final List<Region> regions; protected final Map<Integer, Set<Integer>> adjacencyMap; protected final int imageWidth; protected final int imageHeight; //NOTE: these are transcribed to format [row][col] protected final int[][] labels; /** * constructor * @param img * @param labels double array of labels for each pixel using the convention * labels[pixelIndex]. Note that the largest label must be less than * the number of pixels in the image. * Also note that labels isn't copied and will be modified as the graph changes. */ public RegionAdjacencyGraph(ImageExt img, int[] labels1D) { imageWidth = img.getWidth(); imageHeight = img.getHeight(); this.labels = new int[imageHeight][]; for (int i = 0; i < imageHeight; ++i) { labels[i] = new int[imageWidth]; for (int j = 0; j < imageWidth; ++j) { int pixIdx = img.getInternalIndex(j, i); labels[i][j] = labels1D[pixIdx]; } } this.regions = createRegionsList(img, labels); this.adjacencyMap = createAdjacencyMap(this.regions); } /* public void mergeRegions(int regionIndex1, int regionIndex2) { Region region1 = regions.get(regionIndex1); Region region2 = regions.get(regionIndex2); // update region2 pixel labels to regionIndex1 for (PairInt p : region2.getPoints()) { labels[p.getX()][p.getY()] = regionIndex1; } // update the regions region1.mergeIntoThis(region2); Integer index1 = Integer.valueOf(regionIndex1); Integer index2 = Integer.valueOf(regionIndex2); // update the adjacency map Set<Integer> indexes1 = adjacencyMap.get(index1); Set<Integer> indexes2 = adjacencyMap.get(index2); indexes1.addAll(indexes2); indexes1.remove(index2); for (Integer index3 : indexes2) { int idx3 = index3.intValue(); if (regionIndex1 == idx3 || regionIndex2 == idx3) { continue; } Set<Integer> indexes4 = adjacencyMap.get(index3); if (indexes4 != null) { indexes4.remove(index2); indexes4.add(index1); } } adjacencyMap.remove(index2); } */ public Map<Integer, Set<Integer>> createAdjacencyMap(List<Region> aRegion) { Map<Integer, Set<Integer>> map = new HashMap<Integer, Set<Integer>>(); int[] dxs = Misc.dx8; int[] dys = Misc.dy8; Map<PairInt, Integer> pToIMap = createPerimetersPointToIndexMap(aRegion); for (Map.Entry<PairInt, Integer> entry : pToIMap.entrySet()) { PairInt p = entry.getKey(); Integer index = entry.getValue(); Set<Integer> indexes = map.get(index); if (indexes == null) { indexes = new HashSet<Integer>(); map.put(index, indexes); } int x = p.getX(); int y = p.getY(); for (int k = 0; k < dxs.length; ++k) { int x2 = x + dxs[k]; int y2 = y + dys[k]; PairInt p2 = new PairInt(x2, y2); Integer index2 = pToIMap.get(p2); if ((index2 != null) && !index.equals(index2)) { indexes.add(index2); } } } return map; } private Map<PairInt, Integer> createPerimetersPointToIndexMap(List<Region> regionsList) { Map<PairInt, Integer> map = new HashMap<PairInt, Integer>(); for (int i = 0; i < regionsList.size(); ++i) { Integer index = Integer.valueOf(i); Region region = regionsList.get(i); for (PairInt p : region.getPerimeter()) { map.put(p, index); } } return map; } /** * @param img * @param labels array of format [xcoord][ycoord] = label where label * is less than the number of pixels in an image. * @return */ private List<Region> createRegionsList(ImageExt img, int[][] labels) { int nPix = img.getNPixels(); int w = img.getWidth(); int h = img.getHeight(); int maxLabel = Integer.MIN_VALUE; for (int i = 0; i < w; ++i) { for (int j = 0; j < h; ++j) { int label = labels[j][i]; if (label > maxLabel) { maxLabel = label; } } } Map<Integer, Set<PairInt>> map = createRegionsMap(img, labels); List<Region> regionList = new ArrayList<Region>(); for (int i = 0; i <= maxLabel; ++i) { Integer label = Integer.valueOf(i); Set<PairInt> set = map.get(label); if (set == null) { regionList.add(new Region(new HashSet<PairInt>())); } else { regionList.add(new Region(set)); } } return regionList; } /** * expecting labels[row][col], row major format * @param img * @param labels * @return */ private Map<Integer, Set<PairInt>> createRegionsMap(ImageExt img, int[][] labels) { Map<Integer, Set<PairInt>> map = new HashMap<Integer, Set<PairInt>>(); int w = img.getWidth(); int h = img.getHeight(); for (int i = 0; i < w; ++i) { for (int j = 0; j < h; ++j) { int label = labels[j][i]; Integer index = Integer.valueOf(label); Set<PairInt> set = map.get(index); if (set == null) { set = new HashSet<PairInt>(); map.put(index, set); } set.add(new PairInt(i, j)); } } return map; } public Set<Integer> getAdjacentIndexes(Integer index) { return adjacencyMap.get(index); } public int getNumberOfRegions() { return regions.size(); } }
package com.inepex.ineForm.client.form.widgets.datetime; import com.google.gwt.dom.client.Style.Cursor; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.InlineLabel; import com.google.gwt.user.client.ui.Widget; import com.inepex.ineForm.client.form.widgets.datetime.IneDateGWT.Precision; import com.inepex.ineForm.client.resources.ResourceHelper; import com.inepex.ineFrame.client.misc.HandlerAwareComposite; abstract class AbstractField extends HandlerAwareComposite implements DateTimeFieldInterface { protected final FlowPanel panel_main = new FlowPanel(); protected final Precision PRECISION; protected final int stepcount; protected final IneDateGWT inedate; protected final DateTimeFieldParentInterface parent; protected final boolean usetextbox; protected InlineLabel lbl_datetime; protected DateTimeTextBox tb_datetime; protected final boolean showstepbuttons; protected Image img_step_fwd; protected Image img_step_bck; protected boolean enabled = true; protected String prevValue = ""; public AbstractField( IneDateGWT date, Precision precision, boolean showstepbuttons, int stepcount, boolean usetextbox, DateTimeFieldParentInterface parent, boolean enableselectmanager) { this.PRECISION = precision; this.inedate = date; this.usetextbox = usetextbox; this.parent = parent; this.enabled = true; this.showstepbuttons = showstepbuttons; this.stepcount = stepcount; initWidget(panel_main); if (showstepbuttons) { img_step_bck = new Image(); img_step_fwd = new Image(); img_step_bck.setResource(ResourceHelper.ineformRes().arrowLeft()); img_step_fwd.setResource(ResourceHelper.ineformRes().arrowRight()); img_step_bck.addStyleName(ResourceHelper.ineformRes().style().clickable()); img_step_fwd.addStyleName(ResourceHelper.ineformRes().style().clickable()); } if (usetextbox) { tb_datetime = new DateTimeTextBox(precision, enableselectmanager, this); } else { lbl_datetime = new InlineLabel(); lbl_datetime .setStyleName(ResourceHelper.ineformRes().style().abstractField_dateLabel()); } if (showstepbuttons) panel_main.add(img_step_bck); if (usetextbox) panel_main.add(tb_datetime); else panel_main.add(lbl_datetime); if (showstepbuttons) panel_main.add(img_step_fwd); setEnabled(true); } @Override public boolean isInReadOnlyMode() { return !showstepbuttons && !usetextbox; } @Override public void refresh(boolean empty, boolean initialValue) { if (usetextbox) { if (empty) { tb_datetime.setStringValue(""); } else { tb_datetime.setStringValue(inedate.getText(PRECISION)); } prevValue = tb_datetime.getStringValue(); } else { lbl_datetime.setText(inedate.getText(PRECISION)); } } @Override protected void onAttach() { super.onAttach(); if (showstepbuttons) { registerHandler(img_step_bck.addClickHandler(new StepDateTimeClickHandler(false))); registerHandler(img_step_fwd.addClickHandler(new StepDateTimeClickHandler(true))); } if (usetextbox) { registerHandler( tb_datetime.addDateTimeTextBoxEventHandler(new DateTimeTextBoxEventHandler())); } } @Override public Widget asWidget() { return this; } private class StepDateTimeClickHandler implements ClickHandler { private final boolean forward; public StepDateTimeClickHandler(boolean forward) { this.forward = forward; } @Override public void onClick(ClickEvent event) { if (!enabled) return; if (forward) { inedate.stepForward(PRECISION, stepcount); parent.childValueChanged(true, false); } else { inedate.stepForward(PRECISION, -stepcount); parent.childValueChanged(true, false); } } } @Override public void setEnabled(boolean enabled) { this.enabled = enabled; if (usetextbox) { tb_datetime.setEnabled(enabled); } if (showstepbuttons) { if (enabled) { img_step_bck.getElement().getStyle().setOpacity(1); img_step_bck.getElement().getStyle().setCursor(Cursor.POINTER); img_step_fwd.getElement().getStyle().setOpacity(1); img_step_fwd.getElement().getStyle().setCursor(Cursor.POINTER); } else { img_step_bck.getElement().getStyle().setOpacity(0.3); img_step_bck.getElement().getStyle().setCursor(Cursor.DEFAULT); img_step_fwd.getElement().getStyle().setOpacity(0.3); img_step_fwd.getElement().getStyle().setCursor(Cursor.DEFAULT); } } } private class DateTimeTextBoxEventHandler implements DateTimeTextBox.DateTimeTextBoxEventHandler { public DateTimeTextBoxEventHandler() {} @Override public void onChange() { textBoxChange(); } } public void textBoxChange() { if (prevValue.equals(tb_datetime.getStringValue())) return; if (tb_datetime.getStringValue().length() == 0) { inedate.setDateNull(PRECISION); } else { try { inedate.setDate( PRECISION, PRECISION.getFormatter().parse(tb_datetime.getStringValue())); } catch (Exception e) { e.printStackTrace(); } } parent.childValueChanged(true, false); } @Override public boolean isEmpty() { return inedate.isEmpty(PRECISION); } @Override public boolean isNull() { // TODO implement more suitable behave return inedate.isEmpty(PRECISION); } @Override public boolean isFocusable() { return usetextbox; } @Override public void setFocus(boolean focused) { tb_datetime.setFocus(focused); } @Override public boolean isTextBox() { return usetextbox; } }
package org.basex.query.value.item; import static org.basex.query.QueryError.*; import java.math.*; import java.util.*; import java.util.regex.*; import javax.xml.datatype.*; import org.basex.query.*; import org.basex.query.util.collation.*; import org.basex.query.value.type.*; import org.basex.util.*; /** * Abstract super class for date items. * * @author BaseX Team 2005-16, BSD License * @author Christian Gruen */ public abstract class ADate extends ADateDur { /** Maximum value for computations on year value based on long range. */ static final long MAX_YEAR = (long) (Long.MAX_VALUE / 365.2425) - 2; /** Minimum year value. */ static final long MIN_YEAR = -MAX_YEAR; /** Constant for counting negative years (divisible by 400). */ private static final long ADD_NEG = (MAX_YEAR / 400 + 1) * 400; /** Pattern for two digits. */ static final String DD = "(\\d{2})"; /** Year pattern. */ static final String YEAR = "(-?(000[1-9]|00[1-9]\\d|0[1-9]\\d{2}|[1-9]\\d{3,}))"; /** Date pattern. */ static final String ZONE = "((\\+|-)" + DD + ':' + DD + "|Z)?"; /** Day per months. */ static final byte[] DAYS = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }; /** Date pattern. */ private static final Pattern DATE = Pattern.compile(YEAR + '-' + DD + '-' + DD + ZONE); /** Time pattern. */ private static final Pattern TIME = Pattern.compile( DD + ':' + DD + ':' + "(\\d{2}(\\.\\d+)?)" + ZONE); /** Year. * <ul> * <li> 1 - {@code Long#MAX_VALUE}-1: AD</li> * <li> 0 - {@link Long#MIN_VALUE}: BC, +1 added</li> * <li> {@link Long#MAX_VALUE}: undefined</li> * </ul> */ long yea = Long.MAX_VALUE; /** Month ({@code 0-11}). {@code -1}: undefined. */ byte mon = -1; /** Day ({@code 0-30}). {@code -1}: undefined. */ byte day = -1; /** Hour ({@code 0-59}). {@code -1}: undefined. */ byte hou = -1; /** Minute ({@code 0-59}). {@code -1}: undefined. */ byte min = -1; /** Timezone in minutes ({@code -14*60-14*60}). {@link Short#MAX_VALUE}: undefined. */ short tz = Short.MAX_VALUE; /** Data factory. */ static final DatatypeFactory DF; static { try { DF = DatatypeFactory.newInstance(); } catch(final Exception ex) { throw Util.notExpected(ex); } } /** * Constructor. * @param type item type * @param date date reference */ ADate(final Type type, final ADate date) { super(type); yea = date.yea; mon = date.mon; day = date.day; hou = date.hou; min = date.min; sec = date.sec; tz = date.tz; } /** * Constructor. * @param type item type */ ADate(final Type type) { super(type); } /** * Initializes the date format. * @param d input * @param e example format * @param ii input info * @throws QueryException query exception */ final void date(final byte[] d, final String e, final InputInfo ii) throws QueryException { final Matcher mt = DATE.matcher(Token.string(d).trim()); if(!mt.matches()) throw dateError(d, e, ii); yea = toLong(mt.group(1), false, ii); // +1 is added to BC values to simplify computations if(yea < 0) yea++; mon = (byte) (Strings.toInt(mt.group(3)) - 1); day = (byte) (Strings.toInt(mt.group(4)) - 1); if(mon < 0 || mon >= 12 || day < 0 || day >= dpm(yea, mon)) throw dateError(d, e, ii); if(yea <= MIN_YEAR || yea > MAX_YEAR) throw DATERANGE_X_X.get(ii, type, chop(d, ii)); zone(mt, 5, d, ii); } /** * Initializes the time format. * @param d input format * @param e expected format * @param ii input info * @throws QueryException query exception */ final void time(final byte[] d, final String e, final InputInfo ii) throws QueryException { final Matcher mt = TIME.matcher(Token.string(d).trim()); if(!mt.matches()) throw dateError(d, e, ii); hou = (byte) Strings.toInt(mt.group(1)); min = (byte) Strings.toInt(mt.group(2)); sec = toDecimal(mt.group(3), false, ii); if(min >= 60 || sec.compareTo(BD60) >= 0 || hou > 24 || hou == 24 && (min > 0 || sec.compareTo(BigDecimal.ZERO) > 0)) throw dateError(d, e, ii); zone(mt, 5, d, ii); if(hou == 24) { hou = 0; add(DAYSECONDS); } } /** * Initializes the timezone. * @param matcher matcher * @param pos first matching position * @param value value * @param ii input info * @throws QueryException query exception */ final void zone(final Matcher matcher, final int pos, final byte[] value, final InputInfo ii) throws QueryException { final String z = matcher.group(pos); if(z == null) return; if("Z".equals(z)) { tz = 0; } else { final int th = Strings.toInt(matcher.group(pos + 2)); final int tm = Strings.toInt(matcher.group(pos + 3)); if(th > 14 || tm > 59 || th == 14 && tm != 0) throw INVALIDZONE_X.get(ii, value); final int mn = th * 60 + tm; tz = (short) ("-".equals(matcher.group(pos + 1)) ? -mn : mn); } } /** * Adds/subtracts the specified dayTime duration. * @param dur duration * @param plus plus/minus flag */ final void calc(final DTDur dur, final boolean plus) { add(plus ? dur.sec : dur.sec.negate()); } /** * Adds/subtracts the specified yearMonth duration. * @param dur duration * @param plus plus/minus flag * @param ii input info * @throws QueryException query exception */ final void calc(final YMDur dur, final boolean plus, final InputInfo ii) throws QueryException { final long m = plus ? dur.mon : -dur.mon; final long mn = mon + m; mon = (byte) mod(mn, 12); yea += div(mn, 12); day = (byte) Math.min(dpm(yea, mon) - 1, day); if(yea <= MIN_YEAR || yea > MAX_YEAR) throw YEARRANGE_X.get(ii, yea); } /** * Adds the specified dayTime duration. * @param add value to be added */ private void add(final BigDecimal add) { // normalized modulo: sc % 60 vs. (-sc + sc % 60 + 60 + sc) % 60 final BigDecimal sc = sec().add(add); sec = sc.signum() >= 0 ? sc.remainder(BD60) : sc.negate().add(sc.remainder(BD60)).add(BD60).add(sc).remainder(BD60); final long mn = Math.max(min(), 0) + div(sc.longValue(), 60); min = (byte) mod(mn, 60); final long ho = Math.max(hou, 0) + div(mn, 60); hou = (byte) mod(ho, 24); final long da = div(ho, 24); final long[] ymd = ymd(days().add(BigDecimal.valueOf(da))); yea = ymd[0]; mon = (byte) ymd[1]; day = (byte) ymd[2]; } /** * Returns a normalized module value for negative and positive values. * @param value input value * @param mod modulo * @return result */ private static long mod(final long value, final int mod) { return value > 0 ? value % mod : (Long.MAX_VALUE / mod * mod + value) % mod; } /** * Returns a normalized division value for negative and positive values. * @param value input value * @param div divisor * @return result */ private static long div(final long value, final int div) { return value < 0 ? (value + 1) / div - 1 : value / div; } /** * Adjusts the timezone. * @param zone timezone * @param spec indicates if zone has been specified (may be {@code null}) * @param ii input info * @throws QueryException query exception */ public abstract void timeZone(final DTDur zone, final boolean spec, final InputInfo ii) throws QueryException; /** * Adjusts the timezone. * @param zone timezone * @param spec indicates if zone has been specified (may be {@code null}) * @param ii input info * @throws QueryException query exception */ void tz(final DTDur zone, final boolean spec, final InputInfo ii) throws QueryException { final short t; if(spec && zone == null) { t = Short.MAX_VALUE; } else { if(zone == null) { final Calendar c = Calendar.getInstance(); t = (short) ((c.get(Calendar.ZONE_OFFSET) + c.get(Calendar.DST_OFFSET)) / 60000); } else { t = (short) (zone.min() + zone.hou() * 60); if(zone.sec().signum() != 0) throw ZONESEC_X.get(ii, zone); if(Math.abs(t) > 60 * 14 || zone.day() != 0) throw INVALZONE_X.get(ii, zone); } // change time if two competing time zones exist if(tz != Short.MAX_VALUE) add(BigDecimal.valueOf(60L * (t - tz))); } tz = t; } @Override public final long yea() { return yea > 0 ? yea : yea - 1; } @Override public final long mon() { return mon + 1; } @Override public final long day() { return day + 1; } @Override public final long hou() { return hou; } @Override public final long min() { return min; } @Override public final BigDecimal sec() { return sec == null ? BigDecimal.ZERO : sec; } /** * Returns the timezone in minutes. * @return time zone */ public final int tz() { return tz; } /** * Returns if the timezone is defined. * @return time zone */ public final boolean hasTz() { return tz != Short.MAX_VALUE; } @Override public byte[] string(final InputInfo ii) { final TokenBuilder tb = new TokenBuilder(); final boolean ymd = yea != Long.MAX_VALUE; if(ymd) { if(yea <= 0) tb.add('-'); prefix(tb, Math.abs(yea()), 4); tb.add('-'); prefix(tb, mon(), 2); tb.add('-'); prefix(tb, day(), 2); } if(hou >= 0) { if(ymd) tb.add('T'); prefix(tb, hou(), 2); tb.add(':'); prefix(tb, min(), 2); tb.add(':'); if(sec.intValue() < 10) tb.add('0'); tb.addExt(Token.chopNumber(Token.token(sec().abs().toPlainString()))); } zone(tb); return tb.finish(); } /** * Adds the time zone to the specified token builder. * @param tb token builder */ void zone(final TokenBuilder tb) { if(tz == Short.MAX_VALUE) return; if(tz == 0) { tb.add('Z'); } else { tb.add(tz > 0 ? '+' : '-'); prefix(tb, Math.abs(tz) / 60, 2); tb.add(':'); prefix(tb, Math.abs(tz) % 60, 2); } } /** * Prefixes the specified number of zero digits before a number. * @param tb token builder * @param number number to be printed * @param zero maximum number of zero digits */ static void prefix(final TokenBuilder tb, final long number, final int zero) { final byte[] t = Token.token(number); for(int i = t.length; i < zero; i++) tb.add('0'); tb.add(t); } @Override public final boolean eq(final Item it, final Collation coll, final StaticContext sc, final InputInfo ii) throws QueryException { final ADate d = (ADate) (it instanceof ADate ? it : type.cast(it, null, null, ii)); final BigDecimal d1 = seconds().add(days().multiply(DAYSECONDS)); final BigDecimal d2 = d.seconds().add(d.days().multiply(DAYSECONDS)); return d1.compareTo(d2) == 0; } @Override public boolean sameKey(final Item it, final InputInfo ii) throws QueryException { return it instanceof ADate && hasTz() == ((ADate) it).hasTz() && eq(it, null, null, ii); } @Override public int hash(final InputInfo ii) throws QueryException { return seconds().add(days().multiply(DAYSECONDS)).intValue(); } @Override public int diff(final Item it, final Collation coll, final InputInfo ii) throws QueryException { final ADate d = (ADate) (it instanceof ADate ? it : type.cast(it, null, null, ii)); final BigDecimal d1 = seconds().add(days().multiply(DAYSECONDS)); final BigDecimal d2 = d.seconds().add(d.days().multiply(DAYSECONDS)); return d1.compareTo(d2); } @Override public final XMLGregorianCalendar toJava() { return DF.newXMLGregorianCalendar( yea == Long.MAX_VALUE ? null : BigInteger.valueOf(yea > 0 ? yea : yea - 1), mon >= 0 ? mon + 1 : Integer.MIN_VALUE, day >= 0 ? day + 1 : Integer.MIN_VALUE, hou >= 0 ? hou : Integer.MIN_VALUE, min >= 0 ? min : Integer.MIN_VALUE, sec != null ? sec.intValue() : Integer.MIN_VALUE, sec != null ? sec.remainder(BigDecimal.ONE) : null, tz == Short.MAX_VALUE ? Integer.MIN_VALUE : tz); } /** * Returns the date in seconds. * @return seconds */ final BigDecimal seconds() { int z = tz; if(z == Short.MAX_VALUE) { // [CG] XQuery, DateTime: may be removed final long n = System.currentTimeMillis(); z = Calendar.getInstance().getTimeZone().getOffset(n) / 60000; } return (sec == null ? BigDecimal.ZERO : sec).add( BigDecimal.valueOf(Math.max(0, hou) * 3600 + Math.max(0, min) * 60 - z * 60)); } /** * Returns a day count. * @return days */ final BigDecimal days() { final long y = yea == Long.MAX_VALUE ? 1 : yea; return days(y + ADD_NEG, Math.max(mon, 0), Math.max(day, 0)); } /** * Returns a day count for the specified years, months and days. * All values must be specified in their internal representation * (undefined values are supported, too). * Algorithm is derived from J R Stockton (http://www.merlyn.demon.co.uk/daycount.htm). * @param year year * @param month month * @param day days * @return days */ private static BigDecimal days(final long year, final int month, final int day) { final long y = year - (month < 2 ? 1 : 0); final int m = month + (month < 2 ? 13 : 1); final int d = day + 1; return BD365.multiply(BigDecimal.valueOf(y)).add( BigDecimal.valueOf(y / 4 - y / 100 + y / 400 - 92 + d + (153 * m - 2) / 5)); } /** * Converts a day count into year, month and day components. * Algorithm is derived from J R Stockton (http://www.merlyn.demon.co.uk/daycount.htm). * @param days day count * @return result array */ private static long[] ymd(final BigDecimal days) { BigDecimal d = days; BigDecimal t = d.add(BD36525).multiply(BD4). divideToIntegralValue(BD146097).subtract(BigDecimal.ONE); BigDecimal y = BD100.multiply(t); d = d.subtract(BD36524.multiply(t).add(t.divideToIntegralValue(BD4))); t = d.add(BD366).multiply(BD4).divideToIntegralValue(BD1461).subtract(BigDecimal.ONE); y = y.add(t); d = d.subtract(BD365.multiply(t).add(t.divideToIntegralValue(BD4))); final BigDecimal m = BD5.multiply(d).add(BD2).divideToIntegralValue(BD153); d = d.subtract(BD153.multiply(m).add(BD2).divideToIntegralValue(BD5)); long mm = m.longValue(); if(mm > 9) { mm -= 12; y = y.add(BigDecimal.ONE); } return new long[] { y.subtract(BigDecimal.valueOf(ADD_NEG)).longValue(), mm + 2, d.longValue() }; } /** * Returns days per month, considering leap years. * @param yea year * @param mon month * @return days */ public static int dpm(final long yea, final int mon) { final byte l = DAYS[mon]; return mon == 1 && yea % 4 == 0 && (yea % 100 != 0 || yea % 400 == 0) ? l + 1 : l; } @Override public final String toString() { return Util.info("\"%\"", string(null)); } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.util; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; import com.google.cloud.dataflow.sdk.TestUtils.KvMatcher; import com.google.cloud.dataflow.sdk.WindowMatchers; import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory; import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn; import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow; import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows; import com.google.cloud.dataflow.sdk.transforms.windowing.IntervalWindow; import com.google.cloud.dataflow.sdk.transforms.windowing.OutputTimeFn; import com.google.cloud.dataflow.sdk.transforms.windowing.OutputTimeFns; import com.google.cloud.dataflow.sdk.transforms.windowing.PaneInfo; import com.google.cloud.dataflow.sdk.transforms.windowing.Sessions; import com.google.cloud.dataflow.sdk.transforms.windowing.SlidingWindows; import com.google.cloud.dataflow.sdk.util.common.CounterSet; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.dataflow.sdk.values.TupleTag; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.joda.time.Duration; import org.joda.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; /** * Properties of {@link GroupAlsoByWindowsDoFn}. * * <p>Some properties may not hold of some implementations, due to restrictions on the context * in which the implementation is applicable. For example, * {@link GroupAlsoByWindowsViaIteratorsDoFn} does not support merging window functions. */ public class GroupAlsoByWindowsProperties { /** * A factory of {@link GroupAlsoByWindowsDoFn} so that the various properties can provide * the appropriate windowing strategy under test. */ public interface GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> { <W extends BoundedWindow> GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> forStrategy(WindowingStrategy<?, W> strategy); } /** * Tests that for empty input and the given {@link WindowingStrategy}, the provided GABW * implementation produces no output. * * <p>The input type is deliberately left as a wildcard, since it is not relevant. */ public static <K, InputT, OutputT> void emptyInputEmptyOutput( GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))); List<?> result = runGABW( gabwFactory, windowingStrategy, (K) null, // key should never be used Collections.<WindowedValue<InputT>>emptyList()); assertThat(result.size(), equalTo(0)); } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups them according to fixed windows. */ public static void groupsElementsIntoFixedWindows( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of( "v1", new Instant(1), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(2), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(13), Arrays.asList(window(10, 20)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 10), new Instant(1), "v1", "v2"), gabwResult(window(10, 20), new Instant(13), "v3"))); } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups them into sliding windows. * * <p>In the input here, each element occurs in multiple windows. */ public static void groupsElementsIntoSlidingWindows( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of( SlidingWindows.of(Duration.millis(20)).every(Duration.millis(10))); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of( "v1", new Instant(5), Arrays.asList(window(-10, 10), window(0, 20)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(15), Arrays.asList(window(0, 20), window(10, 30)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(3)); assertThat(result, containsInAnyOrder( gabwResult(window(-10, 10), new Instant(5), "v1"), gabwResult(window(0, 20), new Instant(10), "v1", "v2"), gabwResult(window(10, 30), new Instant(20), "v2"))); } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups and combines them according to sliding windows. * * <p>In the input here, each element occurs in multiple windows. */ public static void combinesElementsInSlidingWindows( GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory, CombineFn<Long, ?, Long> combineFn) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of( SlidingWindows.of(Duration.millis(20)).every(Duration.millis(10))); List<WindowedValue<KV<String, Long>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( 1L, new Instant(5), Arrays.asList(window(-10, 10), window(0, 20)), PaneInfo.NO_FIRING), WindowedValue.of( 2L, new Instant(15), Arrays.asList(window(0, 20), window(10, 30)), PaneInfo.NO_FIRING), WindowedValue.of( 4L, new Instant(18), Arrays.asList(window(0, 20), window(10, 30)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(3)); assertThat(result, contains( WindowMatchers.isSingleWindowedValue( KvMatcher.isKv( equalTo("k"), equalTo(combineFn.apply(ImmutableList.of(1L)))), 5, // aggregate timestamp -10, // window start 10), // window end WindowMatchers.isSingleWindowedValue( KvMatcher.isKv( equalTo("k"), equalTo(combineFn.apply(ImmutableList.of(1L, 2L, 4L)))), 10, // aggregate timestamp 0, // window start 20), // window end WindowMatchers.isSingleWindowedValue( KvMatcher.isKv( equalTo("k"), equalTo(combineFn.apply(ImmutableList.of(2L, 4L)))), 20, // aggregate timestamp 10, // window start 30))); // window end } /** * Tests that the given GABW implementation correctly groups elements that fall into overlapping * windows that are not merged. */ public static void groupsIntoOverlappingNonmergingWindows( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of( "v1", new Instant(1), Arrays.asList(window(0, 5)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(4), Arrays.asList(window(1, 5)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(4), Arrays.asList(window(0, 5)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder(gabwResult(window(0, 5), new Instant(1), "v1", "v3"), gabwResult(window(1, 5), new Instant(4), "v2"))); } /** * Tests that the given GABW implementation correctly groups elements into merged sessions. */ public static void groupsElementsInMergedSessions( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of( "v1", new Instant(0), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(5), Arrays.asList(window(5, 15)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(15), Arrays.asList(window(15, 25)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 15), new Instant(0), "v1", "v2"), gabwResult(window(15, 25), new Instant(15), "v3"))); } /** * Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per * session window correctly according to the provided {@link CombineFn}. */ public static void combinesElementsPerSession( GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory, CombineFn<Long, ?, Long> combineFn) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))); List<WindowedValue<KV<String, Long>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( 1L, new Instant(0), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( 2L, new Instant(5), Arrays.asList(window(5, 15)), PaneInfo.NO_FIRING), WindowedValue.of( 4L, new Instant(15), Arrays.asList(window(15, 25)), PaneInfo.NO_FIRING)); assertThat(result, contains( WindowMatchers.isSingleWindowedValue( KvMatcher.isKv( equalTo("k"), equalTo(combineFn.apply(ImmutableList.of(1L, 2L)))), 0, // aggregate timestamp 0, // window start 15), // window end WindowMatchers.isSingleWindowedValue( KvMatcher.isKv( equalTo("k"), equalTo(combineFn.apply(ImmutableList.of(4L)))), 15, // aggregate timestamp 15, // window start 25))); // window end } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups them according to fixed windows and also sets the output timestamp * according to the policy {@link OutputTimeFns#outputAtEndOfWindow()}. */ public static void groupsElementsIntoFixedWindowsWithEndOfWindowTimestamp( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))) .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of( "v1", new Instant(1), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(2), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(13), Arrays.asList(window(10, 20)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 10), window(0, 10).maxTimestamp(), "v1", "v2"), gabwResult(window(10, 20), window(10, 20).maxTimestamp(), "v3"))); } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups them according to fixed windows and also sets the output timestamp * according to a custom {@link OutputTimeFn}. */ public static void groupsElementsIntoFixedWindowsWithCustomTimestamp( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))) .withOutputTimeFn(new OutputTimeFn.Defaults<IntervalWindow>() { @Override public Instant assignOutputTime(Instant inputTimestamp, IntervalWindow window) { return inputTimestamp.isBefore(window.maxTimestamp()) ? inputTimestamp.plus(1) : window.maxTimestamp(); } @Override public Instant combine(Instant outputTime, Instant otherOutputTime) { return outputTime.isBefore(otherOutputTime) ? outputTime : otherOutputTime; } @Override public boolean dependsOnlyOnEarliestInputTimestamp() { return true; } }); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "key", WindowedValue.of("v1", new Instant(1), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of("v2", new Instant(2), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of("v3", new Instant(13), Arrays.asList(window(10, 20)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 10), new Instant(2), "v1", "v2"), gabwResult(window(10, 20), new Instant(14), "v3"))); } /** * Tests that for a simple sequence of elements on the same key, the given GABW implementation * correctly groups them according to fixed windows and also sets the output timestamp * according to the policy {@link OutputTimeFns#outputAtLatestInputTimestamp()}. */ public static void groupsElementsIntoFixedWindowsWithLatestTimestamp( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(Duration.millis(10))) .withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp()); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( "v1", new Instant(1), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(2), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(13), Arrays.asList(window(10, 20)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 10), new Instant(2), "v1", "v2"), gabwResult(window(10, 20), new Instant(13), "v3"))); } /** * Tests that the given GABW implementation correctly groups elements into merged sessions * with output timestamps at the end of the merged window. */ public static void groupsElementsInMergedSessionsWithEndOfWindowTimestamp( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))) .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( "v1", new Instant(0), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(5), Arrays.asList(window(5, 15)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(15), Arrays.asList(window(15, 25)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 15), window(0, 15).maxTimestamp(), "v1", "v2"), gabwResult(window(15, 25), window(15, 25).maxTimestamp(), "v3"))); } /** * Tests that the given GABW implementation correctly groups elements into merged sessions * with output timestamps at the end of the merged window. */ public static void groupsElementsInMergedSessionsWithLatestTimestamp( GroupAlsoByWindowsDoFnFactory<String, String, Iterable<String>> gabwFactory) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))) .withOutputTimeFn(OutputTimeFns.outputAtLatestInputTimestamp()); List<WindowedValue<KV<String, Iterable<String>>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( "v1", new Instant(0), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( "v2", new Instant(5), Arrays.asList(window(5, 15)), PaneInfo.NO_FIRING), WindowedValue.of( "v3", new Instant(15), Arrays.asList(window(15, 25)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); assertThat(result, containsInAnyOrder( gabwResult(window(0, 15), new Instant(5), "v1", "v2"), gabwResult(window(15, 25), new Instant(15), "v3"))); } /** * Tests that the given {@link GroupAlsoByWindowsDoFn} implementation combines elements per * session window correctly according to the provided {@link CombineFn}. */ public static void combinesElementsPerSessionWithEndOfWindowTimestamp( GroupAlsoByWindowsDoFnFactory<String, Long, Long> gabwFactory, CombineFn<Long, ?, Long> combineFn) throws Exception { WindowingStrategy<?, IntervalWindow> windowingStrategy = WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10))) .withOutputTimeFn(OutputTimeFns.outputAtEndOfWindow()); List<WindowedValue<KV<String, Long>>> result = runGABW(gabwFactory, windowingStrategy, "k", WindowedValue.of( 1L, new Instant(0), Arrays.asList(window(0, 10)), PaneInfo.NO_FIRING), WindowedValue.of( 2L, new Instant(5), Arrays.asList(window(5, 15)), PaneInfo.NO_FIRING), WindowedValue.of( 4L, new Instant(15), Arrays.asList(window(15, 25)), PaneInfo.NO_FIRING)); assertThat(result.size(), equalTo(2)); // TODO: Rewrite to use matchers rather than order-based inspection WindowedValue<KV<String, Long>> item0; WindowedValue<KV<String, Long>> item1; if (result.get(0).getWindows().iterator().next().equals(window(0, 15))) { item0 = result.get(0); item1 = result.get(1); } else { item0 = result.get(1); item1 = result.get(0); } assertThat(item0.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(1L, 2L)))); assertThat(item0.getWindows(), contains(window(0, 15))); assertThat(item0.getTimestamp(), equalTo(Iterables.getOnlyElement(item0.getWindows()).maxTimestamp())); assertThat(item1.getValue().getValue(), equalTo(combineFn.apply(ImmutableList.of(4L)))); assertThat(item1.getWindows(), contains(window(15, 25))); assertThat(item1.getTimestamp(), equalTo(Iterables.getOnlyElement(item1.getWindows()).maxTimestamp())); } @SafeVarargs private static <K, InputT, OutputT, W extends BoundedWindow> List<WindowedValue<KV<K, OutputT>>> runGABW( GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory, WindowingStrategy<?, W> windowingStrategy, K key, WindowedValue<InputT>... values) { return runGABW(gabwFactory, windowingStrategy, key, Arrays.asList(values)); } private static <K, InputT, OutputT, W extends BoundedWindow> List<WindowedValue<KV<K, OutputT>>> runGABW( GroupAlsoByWindowsDoFnFactory<K, InputT, OutputT> gabwFactory, WindowingStrategy<?, W> windowingStrategy, K key, Collection<WindowedValue<InputT>> values) { TupleTag<KV<K, OutputT>> outputTag = new TupleTag<>(); DoFnRunnerBase.ListOutputManager outputManager = new DoFnRunnerBase.ListOutputManager(); DoFnRunner<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> runner = makeRunner( gabwFactory.forStrategy(windowingStrategy), windowingStrategy, outputTag, outputManager); runner.startBundle(); if (values.size() > 0) { runner.processElement(WindowedValue.valueInEmptyWindows( KV.of(key, (Iterable<WindowedValue<InputT>>) values))); } runner.finishBundle(); List<WindowedValue<KV<K, OutputT>>> result = outputManager.getOutput(outputTag); // Sanity check for corruption for (WindowedValue<KV<K, OutputT>> elem : result) { assertThat(elem.getValue().getKey(), equalTo(key)); } return result; } private static <K, InputT, OutputT, W extends BoundedWindow> DoFnRunner<KV<K, Iterable<WindowedValue<InputT>>>, KV<K, OutputT>> makeRunner( GroupAlsoByWindowsDoFn<K, InputT, OutputT, W> fn, WindowingStrategy<?, W> windowingStrategy, TupleTag<KV<K, OutputT>> outputTag, DoFnRunners.OutputManager outputManager) { ExecutionContext executionContext = DirectModeExecutionContext.create(); CounterSet counters = new CounterSet(); return DoFnRunners.simpleRunner( PipelineOptionsFactory.create(), fn, NullSideInputReader.empty(), outputManager, outputTag, new ArrayList<TupleTag<?>>(), executionContext.getOrCreateStepContext("GABWStep", "GABWTransform", null), counters.getAddCounterMutator(), windowingStrategy); } private static BoundedWindow window(long start, long end) { return new IntervalWindow(new Instant(start), new Instant(end)); } private static <T> Matcher<WindowedValue<KV<String, Iterable<T>>>> gabwResult( BoundedWindow window, Instant timestamp, T... values) { return new GroupAlsoByWindowResultMatcher<>(values, new BoundedWindow[] {window}, timestamp); } private static class GroupAlsoByWindowResultMatcher<T> extends BaseMatcher<WindowedValue<KV<String, Iterable<T>>>> { private final T[] values; private final BoundedWindow[] windows; private final Instant timestamp; private GroupAlsoByWindowResultMatcher(T[] values, BoundedWindow[] windows, Instant timestamp) { this.values = values; this.windows = windows; this.timestamp = timestamp; } @Override public boolean matches(Object item) { if (item instanceof WindowedValue && ((WindowedValue) item).getValue() instanceof KV) { WindowedValue<KV<String, Iterable<T>>> that = (WindowedValue<KV<String, Iterable<T>>>) item; return containsInAnyOrder(values).matches(that.getValue().getValue()) && timestamp.equals(that.getTimestamp()) && containsInAnyOrder(windows).matches(that.getWindows()); } return false; } @Override public void describeTo(Description description) { description.appendText("A Windowed Value containing values ") .appendValueList("[", ", ", "]", values) .appendText(" in windows ") .appendValueList("[", ", ", "]", windows) .appendText(" with timestamp ") .appendValue(timestamp); } } }
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved. package jodd.db.oom; import jodd.db.DbQuery; import jodd.db.DbSession; import jodd.db.DbUtil; import jodd.db.oom.mapper.DefaultResultSetMapper; import jodd.db.oom.mapper.ResultSetMapper; import jodd.db.oom.sqlgen.ParameterValue; import jodd.db.type.SqlType; import jodd.util.StringUtil; import jodd.log.Logger; import jodd.log.LoggerFactory; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.Map; import static jodd.db.oom.DbOomUtil.initialCollectionSize; /** * A simple ORM extension for {@link DbQuery}. * <p> * OOM extension may map results to objects in two ways: * <ul> * <li><i>auto</i> mode - when result set is mapped to provided types, and</li> * <li><i>mapped</i> mode - requires explicit mapping definitions.</li> * </ul> */ public class DbOomQuery extends DbQuery { private static final Logger log = LoggerFactory.getLogger(DbOomQuery.class); // ---------------------------------------------------------------- default ctors public DbOomQuery(Connection conn, String sqlString) { super(conn, sqlString); } public static DbOomQuery query(Connection conn, String sqlString) { return new DbOomQuery(conn, sqlString); } public DbOomQuery(DbSession session, String sqlString) { super(session, sqlString); } public static DbOomQuery query(DbSession session, String sqlString) { return new DbOomQuery(session, sqlString); } public DbOomQuery(String sqlString) { super(sqlString); } public static DbOomQuery query(String sqlString) { return new DbOomQuery(sqlString); } // ---------------------------------------------------------------- sqlgen ctors protected DbSqlGenerator sqlgen; public DbOomQuery(Connection conn, DbSqlGenerator sqlgen) { super(conn, sqlgen.generateQuery()); this.sqlgen = sqlgen; } public static DbOomQuery query(Connection conn, DbSqlGenerator sqlgen) { return new DbOomQuery(conn, sqlgen); } public DbOomQuery(DbSession session, DbSqlGenerator sqlgen) { super(session, sqlgen.generateQuery()); this.sqlgen = sqlgen; } public static DbOomQuery query(DbSession session, DbSqlGenerator sqlgen) { return new DbOomQuery(session, sqlgen); } public DbOomQuery(DbSqlGenerator sqlgen) { super(sqlgen.generateQuery()); this.sqlgen = sqlgen; } public static DbOomQuery query(DbSqlGenerator sqlgen) { return new DbOomQuery(sqlgen); } // ---------------------------------------------------------------- initialization protected DbOomManager dbOomManager = DbOomManager.getInstance(); /** * Returns used ORM manager. */ public DbOomManager getManager() { return dbOomManager; } /** * Prepares the query after initialization. Besides default work, it checks if sql generator * is used, and if so, generator hints and query parameters will be used for this query. * Note regarding hints: since hints can be added manually, generators hints will be ignored * if there exists some manually set hints. */ @Override protected void prepareQuery() { super.prepareQuery(); if (sqlgen == null) { return; } if (hints == null) { String[] joinHints = sqlgen.getJoinHints(); if (joinHints != null) { withHints(joinHints); } } // insert parameters Map<String, ParameterValue> parameters = sqlgen.getQueryParameters(); if (parameters == null) { return; } for (Map.Entry<String, ParameterValue> entry : parameters.entrySet()) { String paramName = entry.getKey(); ParameterValue param = entry.getValue(); DbEntityColumnDescriptor dec = param.getColumnDescriptor(); if (dec == null) { setObject(paramName, param.getValue()); } else { resolveColumnDbSqlType(connection, dec); setObject(paramName, param.getValue(), dec.getSqlTypeClass(), dec.getDbSqlType()); } } } /** * Resolves column db sql type and populates it in column descriptor if missing. */ protected void resolveColumnDbSqlType(Connection connection, DbEntityColumnDescriptor dec) { if (dec.dbSqlType != SqlType.DB_SQLTYPE_UNKNOWN) { return; } ResultSet rs = null; DbEntityDescriptor ded = dec.getDbEntityDescriptor(); try { DatabaseMetaData dmd = connection.getMetaData(); rs = dmd.getColumns(null, ded.getSchemaName(), ded.getTableName(), dec.getColumnName()); if (rs.next()) { dec.dbSqlType = rs.getInt("DATA_TYPE"); } else { dec.dbSqlType = SqlType.DB_SQLTYPE_NOT_AVAILABLE; if (log.isWarnEnabled()) { log.warn("Column SQL type not available: " + ded.toString() + '.' + dec.getColumnName()); } } } catch (SQLException sex) { dec.dbSqlType = SqlType.DB_SQLTYPE_NOT_AVAILABLE; if (log.isWarnEnabled()) { log.warn("Column SQL type not resolved: " + ded.toString() + '.' + dec.getColumnName(), sex); } } finally { DbUtil.close(rs); } } // ---------------------------------------------------------------- join hints protected String[] hints; protected JoinHintResolver hintResolver = dbOomManager.getHintResolver(); /** * Specifies hints for the query. Provided string is * split on ',' separator. */ public DbOomQuery withHints(String hint) { this.hints = StringUtil.splitc(hint, ','); return this; } /** * Specifies multiple hints for the query. */ public DbOomQuery withHints(String... hints) { this.hints = hints; return this; } /** * Prepares a row (array of rows mapped object) using hints. * Returns either single object or objects array. */ protected Object resolveRowResults(Object[] row) { row = hintResolver.join(row, hints); return row.length == 1 ? row[0] : row; } // ---------------------------------------------------------------- result set protected boolean cacheEntities = dbOomManager.isCacheEntitiesInResultSet(); /** * Defines if entities should be cached in {@link ResultSetMapper}. * Overrides default value in {@link DbOomManager}. */ public DbOomQuery cacheEntities(boolean cacheEntities) { this.cacheEntities = cacheEntities; return this; } /** * Executes the query and returns {@link #createResultSetMapper(java.sql.ResultSet) builded ResultSet mapper}. */ protected ResultSetMapper executeAndBuildResultSetMapper() { ResultSet resultSet = execute(); return createResultSetMapper(resultSet); } /** * Factory for result sets mapper. */ protected ResultSetMapper createResultSetMapper(ResultSet resultSet) { Map<String, ColumnData> columnAliases = sqlgen != null ? sqlgen.getColumnData() : null; return new DefaultResultSetMapper(resultSet, columnAliases, cacheEntities, this); } // ---------------------------------------------------------------- db list protected boolean entityAwareMode = dbOomManager.isEntityAwareMode(); /** * Defines entity-aware mode for entities tracking in result collection. * @see DbOomManager#setEntityAwareMode(boolean) */ public DbOomQuery entityAwareMode(boolean entityAware) { if (entityAware) { this.cacheEntities = true; } this.entityAwareMode = entityAware; return this; } // ---------------------------------------------------------------- iterator public <T> Iterator<T> iterate(Class... types) { return iterate(types, false); } public <T> Iterator<T> iterateAndClose(Class... types) { return iterate(types, true); } public <T> Iterator<T> iterate() { return iterate(null, false); } public <T> Iterator<T> iterateAndClose() { return iterate(null, true); } protected <T> Iterator<T> iterate(Class[] types, boolean close) { return new DbListIterator<T>(this, types, close); } // ---------------------------------------------------------------- list public <T> List<T> list(Class... types) { return list(types, -1, false); } public <T> List<T> listAndClose(Class... types) { return list(types, -1, true); } public <T> List<T> list() { return list(null, -1, false); } public <T> List<T> listAndClose() { return list(null, -1, true); } public <T> List<T> list(int max, Class... types) { return list(types, max, false); } public <T> List<T> listAndClose(int max, Class... types) { return list(types, max, true); } public <T> List<T> list(int max) { return list(null, max, false); } public <T> List<T> listAndClose(int max) { return list(null, max, true); } /** * Iterates result set, maps rows to classes and populates resulting array list. * @param types mapping types * @param max max number of rows to collect, <code>-1</code> for all * @param close <code>true</code> if query is closed at the end, otherwise <code>false</code>. * @return list of mapped entities or array of entities */ @SuppressWarnings({"unchecked"}) protected <T> List<T> list(Class[] types, int max, boolean close) { List<T> result = new ArrayList<T>(initialCollectionSize(max)); ResultSetMapper rsm = executeAndBuildResultSetMapper(); if (types == null) { types = rsm.resolveTables(); } Object previousElement = null; while (rsm.next()) { Object[] objects = rsm.parseObjects(types); Object row = resolveRowResults(objects); int size = result.size(); T newElement = (T) row; if (entityAwareMode && size > 0) { if (previousElement != null && newElement != null) { boolean equals; if (newElement.getClass().isArray()) { equals = Arrays.equals((Object[]) previousElement, (Object[]) newElement); } else { equals = previousElement.equals(newElement); } if (equals) { continue; } } } if (size == max) { break; } result.add(newElement); previousElement = newElement; } close(rsm, close); return result; } // ---------------------------------------------------------------- set public <T> Set<T> listSet(Class... types) { return listSet(types, -1, false); } public <T> Set<T> listSetAndClose(Class... types) { return listSet(types, -1, true); } public <T> Set<T> listSet() { return listSet(null, -1, false); } public <T> Set<T> listSetAndClose() { return listSet(null, -1, true); } public <T> Set<T> listSet(int max, Class... types) { return listSet(types, max, false); } public <T> Set<T> listSetAndClose(int max, Class... types) { return listSet(types, max, true); } public <T> Set<T> listSet(int max) { return listSet(null, max, false); } public <T> Set<T> listSetAndClose(int max) { return listSet(null, max, true); } @SuppressWarnings({"unchecked"}) protected <T> Set<T> listSet(Class[] types, int max, boolean close) { Set<T> result = new LinkedHashSet<T>(initialCollectionSize(max)); ResultSetMapper rsm = executeAndBuildResultSetMapper(); if (types == null) { types = rsm.resolveTables(); } Object previousElement = null; while (rsm.next()) { Object[] objects = rsm.parseObjects(types); Object row = resolveRowResults(objects); int size = result.size(); T newElement = (T) row; if (entityAwareMode && size > 0) { if (previousElement != null && newElement != null) { boolean equals; if (newElement.getClass().isArray()) { equals = Arrays.equals((Object[]) previousElement, (Object[]) newElement); } else { equals = previousElement.equals(newElement); } if (equals) { continue; } } } if (size == max) { break; } result.add(newElement); previousElement = newElement; } close(rsm, close); return result; } // ---------------------------------------------------------------- find public <T> T find(Class... types) { return find(types, false, null); } public <T> T findAndClose(Class... types) { return find(types, true, null); } public <T> T find() { return find(null, false, null); } public <T> T findAndClose() { return find(null, true, null); } protected <T> T find(Class[] types, boolean close, ResultSet resultSet) { if (resultSet == null) { resultSet = execute(); } ResultSetMapper rsm = createResultSetMapper(resultSet); Iterator<T> iterator = new DbListIterator<T>(this, types, rsm, false); T result = null; if (iterator.hasNext()) { result = iterator.next(); } close(rsm, close); return result; } // ---------------------------------------------------------------- generated columns /** * Finds generated key column of given type. */ public <T> T findGeneratedKey(Class<T> type) { return find(new Class[] {type}, false, getGeneratedColumns()); } /** * Finds generated columns. */ public Object findGeneratedColumns(Class... types) { return find(types, false, getGeneratedColumns()); } // ---------------------------------------------------------------- util /** * Closes results set or whole query. */ protected void close(ResultSetMapper rsm, boolean closeQuery) { if (closeQuery == true) { close(); } else { closeResultSet(rsm.getResultSet()); } } }
package com.esri.ges.manager.vehicles; import java.util.Date; import com.esri.ges.spatial.Point; public class DefaultVehicle implements Vehicle { private Integer nextStopSequenceNumber; private String eventName; private Double cumulativeMinutes; private String panic; private Double speed; private String fuelType; private Double fuelEconomy; private Double fixedCost; private String specialties; private String capacity; private String vehicleGroupName; private String vehicleName; private String deviceType; private String deviceId; private Point location; private Date lastUpdated; private String note; @Override public Date getLastUpdated() { return lastUpdated; } @Override public Point getLocation() { return location; } @Override public String getDeviceId() { return deviceId; } @Override public String getDeviceType() { return deviceType; } @Override public String getVehicleName() { return vehicleName; } @Override public String getVehicleGroupName() { return vehicleGroupName; } @Override public String getCapacity() { return capacity; } @Override public String getSpecialties() { return specialties; } @Override public Double getFixedCost() { return fixedCost; } @Override public Double getFuelEconomy() { return fuelEconomy; } @Override public String getFuelType() { return fuelType; } @Override public Double getSpeed() { return speed; } @Override public String getPanic() { return panic; } @Override public Double getCumulativeMinutes() { return cumulativeMinutes; } @Override public String getEventName() { return eventName; } @Override public Integer getNextStopSequenceNumber() { return nextStopSequenceNumber; } @Override public String getNote() { return note; } @Override public void setLastUpdated(Date date) { this.lastUpdated = date; } @Override public void setLocation(Point point) { this.location = point; } @Override public void setDeviceId(String deviceId) { this.deviceId = deviceId; } @Override public void setDeviceType(String deviceType) { this.deviceType = deviceType; } @Override public void setVehicleGroupName(String vehicleGropuName) { this.vehicleGroupName = vehicleGropuName; } @Override public void setSpeed(Double speed) { this.speed = speed; } @Override public void setPanic(String panic) { this.panic = panic; } @Override public void setCumulativeMinutes(Double cumulativeMinutes) { this.cumulativeMinutes = cumulativeMinutes; } @Override public void setEventName(String eventName) { this.eventName = eventName; } @Override public void setNextStopSequenceNumber(Integer nextStopSequenceNumber) { this.nextStopSequenceNumber = nextStopSequenceNumber; } public void setVehicleName( String vehicleName ) { this.vehicleName = vehicleName; } @Override public void setNote(String note) { this.note = note; } public void setFuelType(String fuelType) { this.fuelType = fuelType; } public void setFuelEconomy(Double fuelEconomy) { this.fuelEconomy = fuelEconomy; } public void setFixedCost(Double fixedCost) { this.fixedCost = fixedCost; } public void setSpecialties(String specialties) { this.specialties = specialties; } public void setCapacity(String capacity) { this.capacity = capacity; } }
/** * Copyright 2014 Groupn.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.arpnetworking.tsdcore.limiter.legacy; import com.arpnetworking.logback.annotations.LogValue; import com.arpnetworking.steno.LogValueMapFactory; import com.arpnetworking.steno.Logger; import com.arpnetworking.steno.LoggerFactory; import com.arpnetworking.tsdcore.limiter.legacy.LegacyMetricsLimiter.Mark; import com.arpnetworking.utility.OvalBuilder; import com.google.common.base.Charsets; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.joda.time.Duration; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Manages the state of the metrics limiter. * * @author Joe Frisbie (jfrisbie at groupon dot com) */ public final class MetricsLimiterStateManager implements Runnable { /** * Request a write. */ public void requestWrite() { _writeRequests.release(1); } /** * Flush marks to state file. */ public void writeState() { // Write the current mark times to a temp file final Path newFilePath = _stateFile.resolveSibling(_stateFile.getFileName() + ".tmp"); try { com.google.common.io.Files.asCharSink(newFilePath.toFile(), Charsets.UTF_8).writeLines( Iterables.transform( _marks.entrySet(), aggregationMarkTime -> String.format( "%d %d %s", aggregationMarkTime.getValue().getTime(), aggregationMarkTime.getValue().getCount(), aggregationMarkTime.getKey())) ); } catch (final IOException e) { throw new IllegalArgumentException( String.format( "Failed to flush state file; could not write temp file; stateFile=%s, tempFile=%s", _stateFile, newFilePath), e); } // Make the temp file the current file try { Files.move(newFilePath, _stateFile, StandardCopyOption.REPLACE_EXISTING); } catch (final IOException e) { throw new IllegalArgumentException( String.format( "Failed to flush state file; could not move temp file; stateFile=%s, tempFile=%s", _stateFile, newFilePath), e); } LOGGER.info() .setMessage("State file flushed") .addData("file", _stateFile) .log(); } /** * Read marks from the state file. * * @return <code>Map</code> of the metric to its <code>Mark</code>. */ public Map<String, Mark> readState() { // If there's no file, return the empty map if (!Files.exists(_stateFile)) { return Collections.emptyMap(); } final Map<String, Mark> marks = Maps.newHashMap(); try { for (final String line : Files.readAllLines(_stateFile, Charsets.UTF_8)) { final Matcher match = STATE_FILE_LINE_PATTERN.matcher(line); if (!match.lookingAt()) { LOGGER.warn() .setMessage("Ignoring unparsable line in state file") .addData("file", _stateFile) .addData("line", line) .log(); continue; } final String metric = match.group(3); final Mark mark; try { final int count = Integer.parseInt(match.group(2)); final long time = Long.parseLong(match.group(1)); mark = new Mark(count, time); } catch (final NumberFormatException e) { LOGGER.warn() .setMessage("Parsing error on line in state file") .addData("file", _stateFile) .addData("line", line) .setThrowable(e) .log(); continue; } marks.put(metric, mark); } return marks; } catch (final IOException e) { LOGGER.error() .setMessage("Could not read state file") .addData("file", _stateFile) .setThrowable(e) .log(); return Collections.emptyMap(); } } /** * Start the auto writer. */ public void startAutoWriter() { synchronized (_autoWriterMutex) { if (_autoWriterThread == null) { LOGGER.info() .setMessage("AutoWriter starting") .addData("file", _stateFile) .log(); _stop = false; _autoWriterThread = new Thread(this, "AutoWriter"); Runtime.getRuntime().addShutdownHook(_autoWriterShutdown); _autoWriterThread.start(); } else { LOGGER.warn() .setMessage("AutoWriter already started") .addData("file", _stateFile) .log(); } } } // We typically remove the shutdown hook while testing, but when the for-real // shutdown happens, the hook can't be removed. void stopAutoWriter(final boolean removeShutdownHook) { synchronized (_autoWriterMutex) { if (_autoWriterThread != null) { LOGGER.info() .setMessage("AutoWriter stopping") .addData("file", _stateFile) .log(); _stop = true; _autoWriterThread.interrupt(); // Wait up to 500ms for thread to die try { _autoWriterThread.join(500); } catch (final InterruptedException e) { LOGGER.warn() .setMessage("AutoWriter failed to terminate") .addData("file", _stateFile) .setThrowable(e) .log(); } if (removeShutdownHook) { Runtime.getRuntime().removeShutdownHook(_autoWriterShutdown); } _autoWriterThread = null; } else { LOGGER.warn() .setMessage("AutoWriter not running") .addData("file", _stateFile) .log(); } } } /** * {@inheritDoc} */ @Override public void run() { LOGGER.debug() .setMessage("AutoWriter running") .addData("file", _stateFile) .log(); try { while (true) { try { if (_stop) { return; } waitForRequestOrTimeout(); // Either we timed-out or somebody incremented the semaphore, either way, we want to write the // file. We want to write the file one last time, so we don't check the stop flag until after // the write writeState(); if (_stop) { return; } // And we don't want to run more frequently than every 500ms Thread.sleep(500); } catch (final InterruptedException e) { if (_stop) { return; } } } } finally { // Write the file one last time writeState(); LOGGER.debug() .setMessage("AutoWriter stopped") .addData("file", _stateFile) .log(); } } /** * Generate a Steno log compatible representation. * * @return Steno log compatible representation. */ @LogValue public Object toLogValue() { return LogValueMapFactory.of( "id", Integer.toHexString(System.identityHashCode(this)), "class", this.getClass(), "StateFile", _stateFile, "FlushInterval", _stateFileFlushInterval); } /** * {@inheritDoc} */ @Override public String toString() { return toLogValue().toString(); } @SuppressFBWarnings(value = "RV_RETURN_VALUE_IGNORED") private void waitForRequestOrTimeout() throws InterruptedException { _writeRequests.tryAcquire(_stateFileFlushInterval.getMillis(), TimeUnit.MILLISECONDS); _writeRequests.drainPermits(); } boolean isAlive() { return _autoWriterThread != null && _autoWriterThread.isAlive(); } private MetricsLimiterStateManager(final Builder builder) { this._stateFile = builder._stateFile; this._stateFileFlushInterval = builder._stateFileFlushInterval; this._marks = builder._marks; // Create the parent directories if necessary final Path dir = _stateFile.getParent(); try { Files.createDirectories(dir); } catch (final IOException e) { throw new IllegalArgumentException( String.format( "State file path could not be created; stateFile=%s", _stateFile), e); } // Using tmp defeats the purpose if (_stateFile.startsWith("/tmp")) { LOGGER.warn( "Storing the aggregator state file in /tmp is not recommended because" + "on many platforms /tmp is not persisted across reboots"); } } private final Semaphore _writeRequests = new Semaphore(0); private final Object _autoWriterMutex = new Object(); private volatile boolean _stop = false; private Thread _autoWriterThread; private final Thread _autoWriterShutdown = new Thread() { @Override public void run() { stopAutoWriter(false); } }; private final Path _stateFile; private final Duration _stateFileFlushInterval; private final ConcurrentMap<String, Mark> _marks; private static final Path DEFAULT_STATE_FILE = Paths.get("/var/db/tsd-aggregator/tsd-aggregator-state"); private static final Duration DEFAULT_STATE_FILE_FLUSH_INTERVAL = Duration.standardMinutes(5); private static final Pattern STATE_FILE_LINE_PATTERN = Pattern.compile("(\\d+)\\s+(\\d+)\\s+(.+)"); private static final Logger LOGGER = LoggerFactory.getLogger(MetricsLimiterStateManager.class); /** * Builder for <code>MetricsStateLimiterManager</code>. * * @author Joe Frisbie (jfrisbie at groupon dot com) */ public static final class Builder extends OvalBuilder<MetricsLimiterStateManager> { /** * Public constructor. */ public Builder() { super(MetricsLimiterStateManager.class); } /** * Build instance of <code>MetricsLimiterStateManager</code>. * * @param marks <code>Map</code> of metric to <code>Mark</code>. * @return Instance of <code>MetricsLimiterStateManager</code>. */ public MetricsLimiterStateManager build(final ConcurrentMap<String, Mark> marks) { this._marks = marks; return build(); } /** * Set the state file. * * @param stateFile The state file. * @return This instance of <code>Builder</code>. */ public Builder setStateFile(final Path stateFile) { this._stateFile = stateFile; return this; } /** * Set the state file flush interval. Optional. * * @param stateFileFlushInterval The state file flush interval. * @return This instance of <code>Builder</code>. */ public Builder setStateFileFlushInterval(final Duration stateFileFlushInterval) { this._stateFileFlushInterval = stateFileFlushInterval; return this; } private Path _stateFile = DEFAULT_STATE_FILE; private Duration _stateFileFlushInterval = DEFAULT_STATE_FILE_FLUSH_INTERVAL; private ConcurrentMap<String, Mark> _marks; } }
package mods.alice.infiniteorb.item; import ic2.api.info.IEnergyValueProvider; import ic2.api.item.IElectricItemManager; import ic2.api.item.ISpecialElectricItem; import java.util.List; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import mods.alice.infiniteorb.ItemList; import mods.alice.infiniteorb.ItemManager; import mods.alice.infiniteorb.creativetab.CreativeTabInfiniteOrb; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; public final class ItemInfiniteOrb extends Item implements ISpecialElectricItem, IElectricItemManager, IEnergyValueProvider { public ItemInfiniteOrb() { super(); setCreativeTab(CreativeTabInfiniteOrb.INSTANCE); // setMaxStackSize(1); setUnlocalizedName("infiniteorb"); ItemManager.addItem(this); GameRegistry.registerItem(this, ItemList.INFINITEORB.itemName); } /* IEnergyValueProvider implementations */ @Override public int getEnergyValue(ItemStack itemStack) { return Integer.MAX_VALUE; } /* End of IEenergyValueProvider implementations */ /* ISpecialElectricItem implementations */ @Override public IElectricItemManager getManager(ItemStack paramItemStack) { return this; } /* End of ISpecialElectricItem implementations */ /* IElectricItemManager implementations */ @Override public int charge(ItemStack itemStack, int amount, int tier, boolean ignoreTransferLimit, boolean simulate) { int limit; limit = getTransferLimit(itemStack); if(amount > limit) { if(ignoreTransferLimit) { return amount; } else { return limit; } } return amount; } @Override public int discharge(ItemStack itemStack, int amount, int tier, boolean ignoreTransferLimit, boolean simulate) { int limit; limit = getTransferLimit(itemStack); if(amount > limit) { if(ignoreTransferLimit) { return amount; } else { return limit; } } return amount; } @Override public int getCharge(ItemStack itemStack) { return Integer.MAX_VALUE; } @Override public boolean canUse(ItemStack itemStack, int amount) { return true; } @Override public boolean use(ItemStack itemStack, int amount, EntityLivingBase entity) { return true; } @Override public void chargeFromArmor(ItemStack itemStack, EntityLivingBase entity) { } @Override public String getToolTip(ItemStack itemStack) { return "This orb have infinite EU!"; } /* End of IElectricItemManager implementations */ /* IElectricItem implementations */ @Override public boolean canProvideEnergy(ItemStack itemStack) { return true; } @Override public Item getChargedItem(ItemStack itemStack) { return this; } @Override public Item getEmptyItem(ItemStack itemStack) { return this; } @Override public int getMaxCharge(ItemStack itemStack) { return Integer.MAX_VALUE; } @Override public int getTier(ItemStack itemStack) { NBTTagCompound tag; tag = itemStack.getTagCompound(); if(tag == null) { return 1; } return tag.getByte("Tier"); } @Override public int getTransferLimit(ItemStack itemStack) { return 65536; } /* End of IElectricItem implementations */ @Override @SideOnly(Side.CLIENT) public void addInformation(ItemStack itemStack, EntityPlayer player, @SuppressWarnings("rawtypes") List info, boolean detailMode) { @SuppressWarnings("unchecked") List<String> _list = (List<String>)info; NBTTagCompound tag; byte tier; tag = itemStack.getTagCompound(); if(tag == null) { tier = 1; } else { tier = tag.getByte("Tier"); } _list.add(String.format("Tier: %d", tier)); } @Override @SideOnly(Side.CLIENT) public void getSubItems(Item id, CreativeTabs tab, @SuppressWarnings("rawtypes") List list) { ItemStack i; NBTTagCompound tag; @SuppressWarnings("unchecked") List<ItemStack> _list = (List<ItemStack>)list; i = new ItemStack(id, 1, 1); tag = new NBTTagCompound(); tag.setByte("Tier", (byte)1); i.setTagCompound(tag); _list.add(i); i = new ItemStack(id, 1, 1); tag = new NBTTagCompound(); tag.setByte("Tier", (byte)2); i.setTagCompound(tag); _list.add(i); i = new ItemStack(id, 1, 1); tag = new NBTTagCompound(); tag.setByte("Tier", (byte)3); i.setTagCompound(tag); _list.add(i); i = new ItemStack(id, 1, 1); tag = new NBTTagCompound(); tag.setByte("Tier", (byte)4); i.setTagCompound(tag); _list.add(i); } @Override public boolean isValidArmor(ItemStack stack, int armorType, Entity entity) { return (armorType == 1); } @Override @SideOnly(Side.CLIENT) public void registerIcons(IIconRegister iconReg) { itemIcon = iconReg.registerIcon("infiniteorb:infiniteorb"); } }
package cmu.arktweetnlp.util; import java.io.*; import java.nio.charset.Charset; import java.util.logging.Logger; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonParser; import edu.cmu.geoparser.io.GetReader; /** * * @author Dipanjan Das * */ public class BasicFileIO { // taken from Dipanjan's codebase /* * A logger for the class. */ private static Logger log = Logger.getLogger(BasicFileIO.class.getCanonicalName()); public static BufferedReader openFileToRead(String file) { try { BufferedReader bReader = null; if (file.endsWith(".gz")) { bReader = new BufferedReader(new InputStreamReader( new GZIPInputStream(new FileInputStream(file)))); } else { bReader = new BufferedReader(new FileReader(file)); } return bReader; } catch (IOException e) { e.printStackTrace(); log.severe("Could not open file:" + file); System.exit(-1); } return null; } public static BufferedReader openFileToReadUTF8(String file) { try { BufferedReader bReader = null; if (file.endsWith(".gz")) { bReader = new BufferedReader(new InputStreamReader( new GZIPInputStream(new FileInputStream(file)), "UTF-8")); } else { bReader = new BufferedReader( new InputStreamReader(new FileInputStream(file), "UTF-8")); } return bReader; } catch (IOException e) { e.printStackTrace(); log.severe("Could not open file:" + file); System.exit(-1); } return null; } public static BufferedWriter openFileToWrite(String file) { try { BufferedWriter bWriter = null; if (file.endsWith(".gz")) { bWriter = new BufferedWriter(new OutputStreamWriter( new GZIPOutputStream(new FileOutputStream(file)))); } else { bWriter = new BufferedWriter(new FileWriter(file)); } return bWriter; } catch (IOException e) { e.printStackTrace(); log.severe("Could not open file for writing:" + file); System.exit(-1); } return null; } public static BufferedWriter openFileToWriteUTF8(String file) { try { BufferedWriter bWriter = null; if (file.endsWith(".gz")) { bWriter = new BufferedWriter(new OutputStreamWriter( new GZIPOutputStream(new FileOutputStream(file)), "UTF-8")); } else { bWriter = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(file), "UTF-8")); } return bWriter; } catch (IOException e) { e.printStackTrace(); log.severe("Could not open file for writing:" + file); System.exit(-1); } return null; } public static void closeFileAlreadyRead(BufferedReader bReader) { try { bReader.close(); } catch (IOException e) { e.printStackTrace(); log.severe("Could not close file."); System.exit(-1); } } public static void closeFileAlreadyWritten(BufferedWriter bWriter) { try { bWriter.close(); } catch (IOException e) { e.printStackTrace(); log.severe("Could not close file."); System.exit(-1); } } public static String getLine(BufferedReader bReader) { try { String line = bReader.readLine(); return line; } catch(IOException e) { e.printStackTrace(); log.severe("Could not read line from file."); System.exit(-1); } return null; } public static String getLine(JsonParser jParse) { //returns the next "text" field or null if none left try { while(jParse.getText()!=null){ if ("hashtags".equals(jParse.getCurrentName()) |"retweeted_status".equals(jParse.getCurrentName())) { jParse.nextToken(); jParse.skipChildren(); } if ("text".equals(jParse.getCurrentName())) { jParse.nextToken(); // move to value String tweet = jParse.getText(); jParse.nextToken(); if(tweet.length()>0) //because tagger crashes on 0-length tweets return tweet; } jParse.nextToken(); } } catch(JsonParseException e){ e.printStackTrace(); log.severe("Error parsing JSON."); System.exit(-1); } catch(IOException e) { e.printStackTrace(); log.severe("Could not read line from file."); System.exit(-1); } return null; //jParse is null (EOF) } public static void writeLine(BufferedWriter bWriter, String line) { try { bWriter.write(line + "\n"); } catch(IOException e) { e.printStackTrace(); log.severe("Could not write line to file."); System.exit(-1); } } public static void writeSerializedObject(String file, Object object) { try{ OutputStream oFile = new FileOutputStream(file); OutputStream buffer = new BufferedOutputStream(oFile); ObjectOutput output = new ObjectOutputStream(buffer); try{ output.writeObject(object); } finally{ output.close(); } } catch(IOException ex){ log.severe("Cannot perform output."); ex.printStackTrace(); System.exit(-1); } } public static Object readSerializedObject(String file) { try { return readSerializedObject(new FileInputStream(file)); } catch (FileNotFoundException e) { log.severe("Cannot perform input."); throw new RuntimeException(e); } } public static Object readSerializedObject(InputStream iFile) { Object object = null; try{ InputStream buffer = new BufferedInputStream(iFile); ObjectInput input = new ObjectInputStream(buffer); try{ object = input.readObject(); } finally{ input.close(); } } catch (ClassNotFoundException e) { log.severe("Cannot perform input."); throw new RuntimeException(e); } catch(IOException ex){ log.severe("Cannot perform input."); throw new RuntimeException(ex); } return object; } /** * Please only use absolute paths, e.g. /cmu/arktweetnlp/6mpaths * * e.g. http://stackoverflow.com/questions/1464291/how-to-really-read-text-file-from-classpath-in-java * * (added by Brendan 2012-08-14) * @throws IOException */ public static BufferedReader getResourceReader(String resourceName) throws IOException { // assert resourceName.startsWith("/") : "Absolute path needed for resource"; return GetReader.getUTF8FileReader(resourceName); } /** Try to get a file, if it doesn't exist, backoff to a resource. * @throws IOException **/ public static BufferedReader openFileOrResource(String fileOrResource) throws IOException { try { if (new File(fileOrResource).exists()) { return openFileToReadUTF8(fileOrResource); } else { return getResourceReader(fileOrResource); } } catch (IOException e) { throw new IOException("Neither file nor resource found for: " + fileOrResource); } } }
package org.visallo.web.clientapi.codegen; import org.visallo.web.clientapi.codegen.ApiException; import org.visallo.web.clientapi.ApiInvoker; import org.visallo.web.clientapi.model.ClientApiLongRunningProcessSubmitResponse; import org.visallo.web.clientapi.model.ClientApiVerticesExistsResponse; import org.visallo.web.clientapi.model.ClientApiElement; import org.visallo.web.clientapi.model.ClientApiElementAcl; import org.visallo.web.clientapi.model.ClientApiHistoricalPropertyResults; import org.visallo.web.clientapi.model.ClientApiVertexEdges; import org.visallo.web.clientapi.model.ClientApiVertexCountsByConceptType; import org.visallo.web.clientapi.model.ClientApiArtifactImportResponse; import org.visallo.web.clientapi.model.ClientApiVertexMultipleResponse; import org.visallo.web.clientapi.model.ClientApiElementFindRelatedResponse; import org.visallo.web.clientapi.model.ClientApiDetectedObjects; import org.visallo.web.clientapi.model.ClientApiTermMentionsResponse; import org.visallo.web.clientapi.model.ClientApiElementSearchResponse; import com.sun.jersey.multipart.FormDataMultiPart; import javax.ws.rs.core.MediaType; import java.io.File; import java.util.*; public class VertexApi { protected String basePath = "http://visallo-dev:8889"; protected ApiInvoker apiInvoker = ApiInvoker.getInstance(); public ApiInvoker getInvoker() { return apiInvoker; } public void setBasePath(String basePath) { this.basePath = basePath; } public String getBasePath() { return basePath; } //error info- code: 404 reason: "Vertex not found" model: <none> public ClientApiElement getByVertexId (String graphVertexId) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/properties".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElement) ApiInvoker.deserialize(response, "", ClientApiElement.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } //error info- code: 404 reason: "Vertex not found" model: <none> public ClientApiVertexEdges getEdges (String graphVertexId, String edgeLabel, Integer offset, Integer size) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/edges".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(edgeLabel))) queryParams.put("edgeLabel", String.valueOf(edgeLabel)); if(!"null".equals(String.valueOf(offset))) queryParams.put("offset", String.valueOf(offset)); if(!"null".equals(String.valueOf(size))) queryParams.put("size", String.valueOf(size)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiVertexEdges) ApiInvoker.deserialize(response, "", ClientApiVertexEdges.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiElement create (String conceptType, String visibilitySource, String justificationText, String vertexId, org.visallo.web.clientapi.model.ClientApiAddElementProperties properties) throws ApiException { Object postBody = null; // verify required params are set if(conceptType == null || visibilitySource == null || justificationText == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/new".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(conceptType))) queryParams.put("conceptType", String.valueOf(conceptType)); if(!"null".equals(String.valueOf(visibilitySource))) queryParams.put("visibilitySource", String.valueOf(visibilitySource)); if(!"null".equals(String.valueOf(justificationText))) queryParams.put("justificationText", String.valueOf(justificationText)); if(!"null".equals(String.valueOf(vertexId))) queryParams.put("vertexId", String.valueOf(vertexId)); if(!"null".equals(String.valueOf(properties))) queryParams.put("properties", properties == null ? null : ApiInvoker.serialize(properties)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElement) ApiInvoker.deserialize(response, "", ClientApiElement.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiElement setProperty (String graphVertexId, String propertyKey, String propertyName, String value, String visibilitySource, String justificationText, String sourceInfo, String metadata) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyKey == null || propertyName == null || value == null || visibilitySource == null || justificationText == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/property".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); if(!"null".equals(String.valueOf(value))) queryParams.put("value", String.valueOf(value)); if(!"null".equals(String.valueOf(visibilitySource))) queryParams.put("visibilitySource", String.valueOf(visibilitySource)); if(!"null".equals(String.valueOf(justificationText))) queryParams.put("justificationText", String.valueOf(justificationText)); if(!"null".equals(String.valueOf(sourceInfo))) queryParams.put("sourceInfo", String.valueOf(sourceInfo)); if(!"null".equals(String.valueOf(metadata))) queryParams.put("metadata", String.valueOf(metadata)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElement) ApiInvoker.deserialize(response, "", ClientApiElement.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public void deleteProperty (String graphVertexId, String propertyKey, String propertyName) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyKey == null || propertyName == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/property".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "DELETE", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public ClientApiHistoricalPropertyResults getPropertyHistory (String graphVertexId, String propertyKey, String propertyName, Long startTime, Long endTime) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyKey == null || propertyName == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/property/history".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); if(!"null".equals(String.valueOf(startTime))) queryParams.put("startTime", String.valueOf(startTime)); if(!"null".equals(String.valueOf(endTime))) queryParams.put("endTime", String.valueOf(endTime)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiHistoricalPropertyResults) ApiInvoker.deserialize(response, "", ClientApiHistoricalPropertyResults.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public void deleteEdge (String edgeId) throws ApiException { Object postBody = null; // verify required params are set if(edgeId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/edge".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(edgeId))) queryParams.put("edgeId", String.valueOf(edgeId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "DELETE", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public void deleteVertex (String graphVertexId) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "DELETE", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public ClientApiTermMentionsResponse getTermMentions (String graphVertexId, String propertyKey, String propertyName) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyKey == null || propertyName == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/term-mentions".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiTermMentionsResponse) ApiInvoker.deserialize(response, "", ClientApiTermMentionsResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiDetectedObjects getDetectedObjects (String graphVertexId, String propertyName, String workspaceId) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyName == null || workspaceId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/detected-objects".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); if(!"null".equals(String.valueOf(workspaceId))) queryParams.put("workspaceId", String.valueOf(workspaceId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiDetectedObjects) ApiInvoker.deserialize(response, "", ClientApiDetectedObjects.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } //error info- code: 404 reason: "Vertex not found" model: <none> public ClientApiElement setVisibility (String graphVertexId, String visibilitySource) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || visibilitySource == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/visibility".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(visibilitySource))) queryParams.put("visibilitySource", String.valueOf(visibilitySource)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElement) ApiInvoker.deserialize(response, "", ClientApiElement.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } //error info- code: 404 reason: "Artifact not found" model: <none> public String getHighlightedText (String graphVertexId, String propertyKey) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexId == null || propertyKey == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/highlighted-text".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (String) ApiInvoker.deserialize(response, "", String.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiArtifactImportResponse importFile (String visibilitySource, File file) throws ApiException { Object postBody = null; // verify required params are set if(visibilitySource == null || file == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/import".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); String[] contentTypes = { "multipart/form-data"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); hasFields = true; mp.field("visibilitySource", visibilitySource, MediaType.MULTIPART_FORM_DATA_TYPE); hasFields = true; com.sun.jersey.core.header.FormDataContentDisposition dispo = com.sun.jersey.core.header.FormDataContentDisposition .name("file") .fileName(file.getName()) .size(file.length()) .build(); com.sun.jersey.multipart.FormDataBodyPart bodyPart = new com.sun.jersey.multipart.FormDataBodyPart(dispo, file, MediaType.MULTIPART_FORM_DATA_TYPE); mp.bodyPart(bodyPart); if(hasFields) postBody = mp; } else { formParams.put("visibilitySource", visibilitySource);} try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiArtifactImportResponse) ApiInvoker.deserialize(response, "", ClientApiArtifactImportResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public void resolveTerm (String artifactId, String propertyKey, String propertyName, Integer mentionStart, Integer mentionEnd, String sign, String conceptId, String visibilitySource, String resolvedVertexId, String justificationText, String sourceInfo) throws ApiException { Object postBody = null; // verify required params are set if(artifactId == null || propertyKey == null || propertyName == null || mentionStart == null || mentionEnd == null || sign == null || conceptId == null || visibilitySource == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/resolve-term".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(artifactId))) queryParams.put("artifactId", String.valueOf(artifactId)); if(!"null".equals(String.valueOf(propertyKey))) queryParams.put("propertyKey", String.valueOf(propertyKey)); if(!"null".equals(String.valueOf(propertyName))) queryParams.put("propertyName", String.valueOf(propertyName)); if(!"null".equals(String.valueOf(mentionStart))) queryParams.put("mentionStart", String.valueOf(mentionStart)); if(!"null".equals(String.valueOf(mentionEnd))) queryParams.put("mentionEnd", String.valueOf(mentionEnd)); if(!"null".equals(String.valueOf(sign))) queryParams.put("sign", String.valueOf(sign)); if(!"null".equals(String.valueOf(conceptId))) queryParams.put("conceptId", String.valueOf(conceptId)); if(!"null".equals(String.valueOf(visibilitySource))) queryParams.put("visibilitySource", String.valueOf(visibilitySource)); if(!"null".equals(String.valueOf(resolvedVertexId))) queryParams.put("resolvedVertexId", String.valueOf(resolvedVertexId)); if(!"null".equals(String.valueOf(justificationText))) queryParams.put("justificationText", String.valueOf(justificationText)); if(!"null".equals(String.valueOf(sourceInfo))) queryParams.put("sourceInfo", String.valueOf(sourceInfo)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public void unresolveTerm (String termMentionId) throws ApiException { Object postBody = null; // verify required params are set if(termMentionId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/unresolve-term".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(termMentionId))) queryParams.put("termMentionId", String.valueOf(termMentionId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public void resolveDetectedObject (String artifactId, String title, String conceptId, String visibilitySource, String graphVertexId, String justificationText, String sourceInfo, String originalPropertyKey, Double x1, Double x2, Double y1, Double y2) throws ApiException { Object postBody = null; // verify required params are set if(artifactId == null || title == null || conceptId == null || visibilitySource == null || x1 == null || x2 == null || y1 == null || y2 == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/resolve-detected-object".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(artifactId))) queryParams.put("artifactId", String.valueOf(artifactId)); if(!"null".equals(String.valueOf(title))) queryParams.put("title", String.valueOf(title)); if(!"null".equals(String.valueOf(conceptId))) queryParams.put("conceptId", String.valueOf(conceptId)); if(!"null".equals(String.valueOf(visibilitySource))) queryParams.put("visibilitySource", String.valueOf(visibilitySource)); if(!"null".equals(String.valueOf(graphVertexId))) queryParams.put("graphVertexId", String.valueOf(graphVertexId)); if(!"null".equals(String.valueOf(justificationText))) queryParams.put("justificationText", String.valueOf(justificationText)); if(!"null".equals(String.valueOf(sourceInfo))) queryParams.put("sourceInfo", String.valueOf(sourceInfo)); if(!"null".equals(String.valueOf(originalPropertyKey))) queryParams.put("originalPropertyKey", String.valueOf(originalPropertyKey)); if(!"null".equals(String.valueOf(x1))) queryParams.put("x1", String.valueOf(x1)); if(!"null".equals(String.valueOf(x2))) queryParams.put("x2", String.valueOf(x2)); if(!"null".equals(String.valueOf(y1))) queryParams.put("y1", String.valueOf(y1)); if(!"null".equals(String.valueOf(y2))) queryParams.put("y2", String.valueOf(y2)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public void unresolveDetectedObject (String vertexId, String multiValueKey) throws ApiException { Object postBody = null; // verify required params are set if(vertexId == null || multiValueKey == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/unresolve-detected-object".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(vertexId))) queryParams.put("vertexId", String.valueOf(vertexId)); if(!"null".equals(String.valueOf(multiValueKey))) queryParams.put("multiValueKey", String.valueOf(multiValueKey)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return ; } else { return ; } } catch (ApiException ex) { if(ex.getCode() == 404) { return ; } else { throw ex; } } } public ClientApiElementSearchResponse vertexSearch (String q, String filter, Integer offset, Integer size, String conceptType, Boolean includeChildNodes, List<String> relatedToVertexIds) throws ApiException { Object postBody = null; // verify required params are set if(filter == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/search".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(q))) queryParams.put("q", String.valueOf(q)); if(!"null".equals(String.valueOf(filter))) queryParams.put("filter", String.valueOf(filter)); if(!"null".equals(String.valueOf(offset))) queryParams.put("offset", String.valueOf(offset)); if(!"null".equals(String.valueOf(size))) queryParams.put("size", String.valueOf(size)); if(!"null".equals(String.valueOf(conceptType))) queryParams.put("conceptType", String.valueOf(conceptType)); if(!"null".equals(String.valueOf(includeChildNodes))) queryParams.put("includeChildNodes", String.valueOf(includeChildNodes)); String[] contentTypes = { "multipart/form-data"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); hasFields = true; if(relatedToVertexIds != null) { for(String relatedToVertexId:relatedToVertexIds) { mp.field("relatedToVertexIds[]", relatedToVertexId, MediaType.MULTIPART_FORM_DATA_TYPE); } } if(hasFields && !mp.getFields().isEmpty()) postBody = mp; } else { throw new java.lang.RuntimeException("invalid content type");} try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElementSearchResponse) ApiInvoker.deserialize(response, "", ClientApiElementSearchResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiElementSearchResponse vertexGeoSearch (Double lat, Double lon, Double radius) throws ApiException { Object postBody = null; // verify required params are set if(lat == null || lon == null || radius == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/geo-search".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(lat))) queryParams.put("lat", String.valueOf(lat)); if(!"null".equals(String.valueOf(lon))) queryParams.put("lon", String.valueOf(lon)); if(!"null".equals(String.valueOf(radius))) queryParams.put("radius", String.valueOf(radius)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElementSearchResponse) ApiInvoker.deserialize(response, "", ClientApiElementSearchResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiLongRunningProcessSubmitResponse findPath (String outVertexId, String inVertexId, Integer hops) throws ApiException { Object postBody = null; // verify required params are set if(outVertexId == null || inVertexId == null || hops == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/find-path".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(outVertexId))) queryParams.put("outVertexId", String.valueOf(outVertexId)); if(!"null".equals(String.valueOf(inVertexId))) queryParams.put("inVertexId", String.valueOf(inVertexId)); if(!"null".equals(String.valueOf(hops))) queryParams.put("hops", String.valueOf(hops)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiLongRunningProcessSubmitResponse) ApiInvoker.deserialize(response, "", ClientApiLongRunningProcessSubmitResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiElementFindRelatedResponse findRelated (List<String> graphVertexIds, String limitParentConceptId, String limitEdgeLabel, Integer maxVerticesToReturn) throws ApiException { Object postBody = null; // verify required params are set if(graphVertexIds == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/find-related".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(limitParentConceptId))) queryParams.put("limitParentConceptId", String.valueOf(limitParentConceptId)); if(!"null".equals(String.valueOf(limitEdgeLabel))) queryParams.put("limitEdgeLabel", String.valueOf(limitEdgeLabel)); if(!"null".equals(String.valueOf(maxVerticesToReturn))) queryParams.put("maxVerticesToReturn", String.valueOf(maxVerticesToReturn)); String[] contentTypes = { "multipart/form-data"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); hasFields = true; if(graphVertexIds != null) { for(String graphVertexId:graphVertexIds) { mp.field("graphVertexIds[]", graphVertexId, MediaType.MULTIPART_FORM_DATA_TYPE); } } if(hasFields && !mp.getFields().isEmpty()) postBody = mp; } else { throw new java.lang.RuntimeException("invalid content type");} try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElementFindRelatedResponse) ApiInvoker.deserialize(response, "", ClientApiElementFindRelatedResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiVertexMultipleResponse findMultiple (List<String> vertexIds, Boolean fallbackToPublic) throws ApiException { Object postBody = null; // verify required params are set if(vertexIds == null || fallbackToPublic == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/multiple".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(fallbackToPublic))) queryParams.put("fallbackToPublic", String.valueOf(fallbackToPublic)); String[] contentTypes = { "multipart/form-data"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); hasFields = true; if(vertexIds != null) { for(String vertexId:vertexIds) { mp.field("vertexIds[]", vertexId, MediaType.MULTIPART_FORM_DATA_TYPE); } } if(hasFields && !mp.getFields().isEmpty()) postBody = mp; } else { throw new java.lang.RuntimeException("invalid content type");} try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiVertexMultipleResponse) ApiInvoker.deserialize(response, "", ClientApiVertexMultipleResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiVerticesExistsResponse doExist (List<String> vertexIds) throws ApiException { Object postBody = null; // verify required params are set if(vertexIds == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/exists".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); String[] contentTypes = { "multipart/form-data"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); hasFields = true; if(vertexIds != null) { for(String vertexId:vertexIds) { mp.field("vertexIds[]", vertexId, MediaType.MULTIPART_FORM_DATA_TYPE); } } if(hasFields && !mp.getFields().isEmpty()) postBody = mp; } else { throw new java.lang.RuntimeException("invalid content type");} try { String response = apiInvoker.invokeAPI(basePath, path, "POST", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiVerticesExistsResponse) ApiInvoker.deserialize(response, "", ClientApiVerticesExistsResponse.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiVertexCountsByConceptType getVertexCountsByConceptType () throws ApiException { Object postBody = null; // create path and map variables String path = "/vertex/counts-by-concept-type".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiVertexCountsByConceptType) ApiInvoker.deserialize(response, "", ClientApiVertexCountsByConceptType.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } public ClientApiElementAcl getAcl (String elementId) throws ApiException { Object postBody = null; // verify required params are set if(elementId == null ) { throw new ApiException(400, "missing required params"); } // create path and map variables String path = "/vertex/acl".replaceAll("\\{format\\}","json"); // query params Map<String, String> queryParams = new HashMap<String, String>(); Map<String, String> headerParams = new HashMap<String, String>(); Map<String, String> formParams = new HashMap<String, String>(); if(!"null".equals(String.valueOf(elementId))) queryParams.put("elementId", String.valueOf(elementId)); String[] contentTypes = { "application/json"}; String contentType = contentTypes.length > 0 ? contentTypes[0] : "application/json"; if(contentType.startsWith("multipart/form-data")) { boolean hasFields = false; FormDataMultiPart mp = new FormDataMultiPart(); if(hasFields) postBody = mp; } else { } try { String response = apiInvoker.invokeAPI(basePath, path, "GET", queryParams, postBody, headerParams, formParams, contentType); if(response != null){ return (ClientApiElementAcl) ApiInvoker.deserialize(response, "", ClientApiElementAcl.class); } else { return null; } } catch (ApiException ex) { if(ex.getCode() == 404) { return null; } else { throw ex; } } } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.weblayer; import android.os.RemoteException; import android.view.View; import android.webkit.ValueCallback; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.fragment.app.Fragment; import org.chromium.weblayer_private.interfaces.APICallException; import org.chromium.weblayer_private.interfaces.IBrowser; import org.chromium.weblayer_private.interfaces.IBrowserClient; import org.chromium.weblayer_private.interfaces.IRemoteFragment; import org.chromium.weblayer_private.interfaces.ITab; import org.chromium.weblayer_private.interfaces.ObjectWrapper; import org.chromium.weblayer_private.interfaces.StrictModeWorkaround; import java.util.Set; /** * Browser contains any number of Tabs, with one active Tab. The active Tab is visible to the user, * all other Tabs are hidden. * * Newly created Browsers have a single active Tab. * * Browser provides for two distinct ways to save state, which impacts the state of the Browser at * various points in the lifecycle. * * Asynchronously to the file system. This is used if a {@link persistenceId} was supplied when the * Browser was created. The {@link persistenceId} uniquely identifies the Browser for saving the * set of tabs and navigations. This is intended for long term persistence. * * For Browsers created with a {@link persistenceId}, restore happens asynchronously. As a result, * the Browser will not have any tabs until restore completes (which may be after the Fragment has * started). * * If a {@link persistenceId} is not supplied, then a minimal amount of state is saved to the * fragment (instance state). During recreation, if instance state is available, the state is * restored in {@link onStart}. Restore happens during start so that callbacks can be attached. As * a result of this, the Browser has no tabs until the Fragment is started. */ public class Browser { // Set to null once destroyed (or for tests). private IBrowser mImpl; // The Fragment the Browser is associated with. The value of this may change. @Nullable private Fragment mFragment; private final ObserverList<TabListCallback> mTabListCallbacks; private final UrlBarController mUrlBarController; private final ObserverList<BrowserControlsOffsetCallback> mBrowserControlsOffsetCallbacks; private final ObserverList<BrowserRestoreCallback> mBrowserRestoreCallbacks; private static int sMaxNavigationsPerTabForInstanceState; /** * Sets the maximum number of navigations saved when persisting a Browsers instance state. The * max applies to each Tab in the Browser. For example, if a value of 6 is supplied and the * Browser has 4 tabs, then up to 24 navigation entries may be saved. The supplied value is * a recommendation, for various reasons it may not be honored. A value of 0 results in * using the default. * * @param value The maximum number of navigations to persist. * * @throws IllegalArgumentException If {@code value} is less than 0. * * @since 98 */ public static void setMaxNavigationsPerTabForInstanceState(int value) { ThreadCheck.ensureOnUiThread(); if (value < 0) throw new IllegalArgumentException("Max must be >= 0"); sMaxNavigationsPerTabForInstanceState = value; } static int getMaxNavigationsPerTabForInstanceState() { return sMaxNavigationsPerTabForInstanceState; } // Constructor for test mocking. protected Browser() { mImpl = null; mTabListCallbacks = null; mUrlBarController = null; mBrowserControlsOffsetCallbacks = null; mBrowserRestoreCallbacks = null; } Browser(IBrowser impl, Fragment fragment) { mImpl = impl; mFragment = fragment; mTabListCallbacks = new ObserverList<TabListCallback>(); mBrowserControlsOffsetCallbacks = new ObserverList<BrowserControlsOffsetCallback>(); mBrowserRestoreCallbacks = new ObserverList<BrowserRestoreCallback>(); try { mImpl.setClient(new BrowserClientImpl()); mUrlBarController = new UrlBarController(mImpl.getUrlBarController()); } catch (RemoteException e) { throw new APICallException(e); } } /** * Changes the fragment. During configuration changes the fragment may change. */ void setFragment(@Nullable BrowserFragment fragment) { mFragment = fragment; } /** * Returns the fragment this Browser is associated with. During configuration changes the * fragment may change, and be null for some amount of time. */ @Nullable public Fragment getFragment() { return mFragment; } private void throwIfDestroyed() { if (mImpl == null) { throw new IllegalStateException("Browser can not be used once destroyed"); } } IBrowser getIBrowser() { return mImpl; } /** * Returns the Browser for the supplied Fragment; null if * {@link fragment} was not created by WebLayer. * * @return the Browser */ @Nullable public static Browser fromFragment(@Nullable Fragment fragment) { return fragment instanceof BrowserFragment ? ((BrowserFragment) fragment).getBrowser() : null; } /** * Returns true if this Browser has been destroyed. */ public boolean isDestroyed() { ThreadCheck.ensureOnUiThread(); return mImpl == null; } // Called prior to notifying IBrowser of destroy(). void prepareForDestroy() { mFragment = null; for (TabListCallback callback : mTabListCallbacks) { callback.onWillDestroyBrowserAndAllTabs(); } } // Called after the browser was destroyed. void onDestroyed() { mImpl = null; } /** * Sets the active (visible) Tab. Only one Tab is visible at a time. * * @param tab The Tab to make active. * * @throws IllegalStateException if {@link tab} was not added to this * Browser. * * @see #addTab() */ public void setActiveTab(@NonNull Tab tab) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { if (getActiveTab() != tab && !mImpl.setActiveTab(tab.getITab())) { throw new IllegalStateException("attachTab() must be called before " + "setActiveTab"); } } catch (RemoteException e) { throw new APICallException(e); } } /** * Adds a tab to this Browser. If {link tab} is the active Tab of another Browser, then the * other Browser's active tab is set to null. This does nothing if {@link tab} is already * contained in this Browser. * * @param tab The Tab to add. */ public void addTab(@NonNull Tab tab) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); if (tab.getBrowser() == this) return; try { mImpl.addTab(tab.getITab()); } catch (RemoteException e) { throw new APICallException(e); } } /** * Returns the active (visible) Tab associated with this * Browser. * * @return The Tab. */ @Nullable public Tab getActiveTab() { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { Tab tab = Tab.getTabById(mImpl.getActiveTabId()); assert tab == null || tab.getBrowser() == this; return tab; } catch (RemoteException e) { throw new APICallException(e); } } /** * Returns the set of Tabs contained in this Browser. * * @return The Tabs */ @NonNull public Set<Tab> getTabs() { ThreadCheck.ensureOnUiThread(); return Tab.getTabsInBrowser(this); } /** * Disposes a Tab. If {@link tab} is the active Tab, no Tab is made active. After this call * {@link tab} should not be used. * * Note this will skip any beforeunload handlers. To run those first, use * {@link Tab#dispatchBeforeUnloadAndClose} instead. * * @param tab The Tab to dispose. * * @throws IllegalStateException is {@link tab} is not in this Browser. */ public void destroyTab(@NonNull Tab tab) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); if (tab.getBrowser() != this) { throw new IllegalStateException("destroyTab() must be called on a Tab in the Browser"); } try { mImpl.destroyTab(tab.getITab()); } catch (RemoteException e) { throw new APICallException(e); } } /** * Adds a TabListCallback. * * @param callback The TabListCallback. */ public void registerTabListCallback(@NonNull TabListCallback callback) { ThreadCheck.ensureOnUiThread(); mTabListCallbacks.addObserver(callback); } /** * Removes a TabListCallback. * * @param callback The TabListCallback. */ public void unregisterTabListCallback(@NonNull TabListCallback callback) { ThreadCheck.ensureOnUiThread(); mTabListCallbacks.removeObserver(callback); } /** * Returns true if this Browser is in the process of restoring the previous state. * * @param True if restoring previous state. */ public boolean isRestoringPreviousState() { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { return mImpl.isRestoringPreviousState(); } catch (RemoteException e) { throw new APICallException(e); } } /** * Adds a BrowserRestoreCallback. * * @param callback The BrowserRestoreCallback. */ public void registerBrowserRestoreCallback(@NonNull BrowserRestoreCallback callback) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); mBrowserRestoreCallbacks.addObserver(callback); } /** * Removes a BrowserRestoreCallback. * * @param callback The BrowserRestoreCallback. */ public void unregisterBrowserRestoreCallback(@NonNull BrowserRestoreCallback callback) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); mBrowserRestoreCallbacks.removeObserver(callback); } /** * Sets the View shown at the top of the browser. A value of null removes the view. The * top-view is typically used to show the uri. The top-view scrolls with the page. * * @param view The new top-view. */ public void setTopView(@Nullable View view) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { mImpl.setTopView(ObjectWrapper.wrap(view)); } catch (RemoteException e) { throw new APICallException(e); } } /** * Sets the View shown at the top of the browser. The top-view is typically used to show the * uri. This method also allows you to control the scrolling behavior of the top-view by setting * a minimum height it will scroll to, and pinning the top-view to the top of the web contents. * * @param view The new top-view, or null to remove the view. * @param minHeight The minimum height in pixels that the top controls can scoll up to. A value * of 0 means the top-view should scroll entirely off screen. * @param onlyExpandControlsAtPageTop Whether the top-view should only be expanded when the web * content is scrolled to the top. A true value makes the top-view behave as though it * were inserted into the top of the page content. If true, the top-view should NOT be * used to display the URL, as this will prevent it from expanding in security-sensitive * contexts where the URL should be visible to the user. * @param animate Whether or not any height/visibility changes that result from this call * should be animated. */ public void setTopView(@Nullable View view, int minHeight, boolean onlyExpandControlsAtPageTop, boolean animate) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { mImpl.setTopViewAndScrollingBehavior( ObjectWrapper.wrap(view), minHeight, onlyExpandControlsAtPageTop, animate); } catch (RemoteException e) { throw new APICallException(e); } } /** * Sets the View shown at the bottom of the browser. A value of null removes the view. * * @param view The new bottom-view. */ public void setBottomView(@Nullable View view) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { mImpl.setBottomView(ObjectWrapper.wrap(view)); } catch (RemoteException e) { throw new APICallException(e); } } /** * Registers {@link callback} to be notified when the offset of the top or bottom view changes. * * @param callback The BrowserControlsOffsetCallback to notify * * @since 88 */ public void registerBrowserControlsOffsetCallback( @NonNull BrowserControlsOffsetCallback callback) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); if (WebLayer.getSupportedMajorVersionInternal() < 88) { throw new UnsupportedOperationException(); } if (mBrowserControlsOffsetCallbacks.isEmpty()) { try { mImpl.setBrowserControlsOffsetsEnabled(true); } catch (RemoteException e) { throw new APICallException(e); } } mBrowserControlsOffsetCallbacks.addObserver(callback); } /** * Removes a BrowserControlsOffsetCallback that was added using {@link * registerBrowserControlsOffsetCallback}. * * @param callback The BrowserControlsOffsetCallback to remove. * * @since 88 */ public void unregisterBrowserControlsOffsetCallback( @NonNull BrowserControlsOffsetCallback callback) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); if (WebLayer.getSupportedMajorVersionInternal() < 88) { throw new UnsupportedOperationException(); } mBrowserControlsOffsetCallbacks.removeObserver(callback); if (mBrowserControlsOffsetCallbacks.isEmpty()) { try { mImpl.setBrowserControlsOffsetsEnabled(false); } catch (RemoteException e) { throw new APICallException(e); } } } /** * Creates a new tab attached to this browser. This will call {@link TabListCallback#onTabAdded} * with the new tab. */ public @NonNull Tab createTab() { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { ITab iTab = mImpl.createTab(); Tab tab = Tab.getTabById(iTab.getId()); assert tab != null; return tab; } catch (RemoteException e) { throw new APICallException(e); } } /** * Control support for embedding use cases such as animations. This should be enabled when the * container view of the fragment is animated in any way, needs to be rotated or blended, or * need to control z-order with other views or other BrowserFragmentImpls. Note embedder should * keep WebLayer in the default non-embedding mode when user is interacting with the web * content. Embedding mode does not support encrypted video. * Deprecated in 90. Use setEmbeddabilityMode instead. * * @param enable Whether to support embedding * @param callback {@link Callback} to be called with a boolean indicating whether request * succeeded. A request might fail if it is subsumed by a subsequent request, or if this object * is destroyed. */ @Deprecated public void setSupportsEmbedding(boolean enable, @NonNull Callback<Boolean> callback) { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { mImpl.setSupportsEmbedding( enable, ObjectWrapper.wrap((ValueCallback<Boolean>) callback::onResult)); } catch (RemoteException e) { throw new APICallException(e); } } /** * See BrowserEmbeddabilityMode for details. The default mode is UNSUPPORTED. * @param mode the requested embedding mode. * @param callback {@link Callback} to be called with a boolean indicating whether request * succeeded. A request might fail if it is subsumed by a subsequent request, or if this object * is destroyed. * @since 90 */ public void setEmbeddabilityMode( @BrowserEmbeddabilityMode int mode, @NonNull Callback<Boolean> callback) { ThreadCheck.ensureOnUiThread(); if (WebLayer.getSupportedMajorVersionInternal() < 90) { throw new UnsupportedOperationException(); } throwIfDestroyed(); try { mImpl.setEmbeddabilityMode( mode, ObjectWrapper.wrap((ValueCallback<Boolean>) callback::onResult)); } catch (RemoteException e) { throw new APICallException(e); } } /** * Set the minimum surface size of this Browser instance. * Setting this avoids expensive surface resize for a fragment view resize that is within the * minimum size. The trade off is the additional memory and power needed for the larger * surface. For example, for a browser use case, it's likely worthwhile to set the minimum * surface size to the screen size to avoid surface resize when entering and exiting fullscreen. * It is safe to call this before Views are initialized. * Note Android does have a max size limit on Surfaces which applies here as well; this * generally should not be larger than the device screen size. * Note the surface size is increased to the layout size only if both the width and height are * no larger than the minimum surface size. No adjustment is made if the surface size is larger * than the minimum size in one dimension and smaller in the other dimension. * @since 89 */ public void setMinimumSurfaceSize(int width, int height) { ThreadCheck.ensureOnUiThread(); if (WebLayer.getSupportedMajorVersionInternal() < 89) { throw new UnsupportedOperationException(); } throwIfDestroyed(); try { mImpl.setMinimumSurfaceSize(width, height); } catch (RemoteException e) { throw new APICallException(e); } } /** * Controls how sites are themed when WebLayer is in dark mode. WebLayer considers itself to be * in dark mode if the UI_MODE_NIGHT_YES flag of its Resources' Configuration's uiMode field is * set, which is typically controlled with AppCompatDelegate#setDefaultNightMode. By default * pages will only be rendered in dark mode if WebLayer is in dark mode and they provide a dark * theme in CSS. See DarkModeStrategy for other possible configurations. * * @see DarkModeStrategy * @param strategy See {@link DarkModeStrategy}. * * @since 90 */ public void setDarkModeStrategy(@DarkModeStrategy int strategy) { ThreadCheck.ensureOnUiThread(); if (WebLayer.getSupportedMajorVersionInternal() < 89) { throw new UnsupportedOperationException(); } throwIfDestroyed(); try { mImpl.setDarkModeStrategy(strategy); } catch (RemoteException e) { throw new APICallException(e); } } /** * Returns {@link Profile} associated with this Browser Fragment. Multiple fragments can share * the same Profile. */ @NonNull public Profile getProfile() { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); try { return Profile.of(mImpl.getProfile()); } catch (RemoteException e) { throw new APICallException(e); } } /** * Returns the UrlBarController. */ @NonNull public UrlBarController getUrlBarController() { ThreadCheck.ensureOnUiThread(); throwIfDestroyed(); return mUrlBarController; } /** * Normally when the Browser is detached the visibility of the page is set to hidden. When the * visibility is hidden video may stop, or other side effects may result. At certain times, * such as fullscreen or rotation, it may be necessary to transiently detach the Browser. * Calling this method with a value of false results in WebLayer not hiding the page on the next * detach. Once the Browser is reattached, the value is implicitly reset to true. Calling this * method when the Browser is already detached does nothing. * * @param changeVisibility Whether WebLayer should change visibility as the result of a detach. * * @since 91 */ public void setChangeVisibilityOnNextDetach(boolean changeVisibility) { ThreadCheck.ensureOnUiThread(); if (WebLayer.getSupportedMajorVersionInternal() < 91) { throw new UnsupportedOperationException(); } throwIfDestroyed(); try { mImpl.setChangeVisibilityOnNextDetach(changeVisibility); } catch (RemoteException e) { throw new APICallException(e); } } private final class BrowserClientImpl extends IBrowserClient.Stub { @Override public void onActiveTabChanged(int activeTabId) { StrictModeWorkaround.apply(); Tab tab = Tab.getTabById(activeTabId); for (TabListCallback callback : mTabListCallbacks) { callback.onActiveTabChanged(tab); } } @Override public void onTabAdded(ITab iTab) { StrictModeWorkaround.apply(); int id = 0; try { id = iTab.getId(); } catch (RemoteException e) { throw new APICallException(e); } Tab tab = Tab.getTabById(id); if (tab == null) { tab = new Tab(iTab, Browser.this); } else { tab.setBrowser(Browser.this); } for (TabListCallback callback : mTabListCallbacks) { callback.onTabAdded(tab); } } @Override public void onTabRemoved(int tabId) { StrictModeWorkaround.apply(); Tab tab = Tab.getTabById(tabId); // This should only be called with a previously created tab. assert tab != null; // And this should only be called for tabs attached to this browser. assert tab.getBrowser() == Browser.this; tab.setBrowser(null); for (TabListCallback callback : mTabListCallbacks) { callback.onTabRemoved(tab); } } @Override public IRemoteFragment createMediaRouteDialogFragment() { StrictModeWorkaround.apply(); return MediaRouteDialogFragment.create(mFragment); } @Override public void onBrowserControlsOffsetsChanged(boolean isTop, int offset) { for (BrowserControlsOffsetCallback callback : mBrowserControlsOffsetCallbacks) { if (isTop) { callback.onTopViewOffsetChanged(offset); } else { callback.onBottomViewOffsetChanged(offset); } } } @Override public void onRestoreCompleted() { for (BrowserRestoreCallback callback : mBrowserRestoreCallbacks) { callback.onRestoreCompleted(); } } } }
/* * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Rhino code, released * May 6, 1999. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1997-1999 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Nick Santos * * Alternatively, the contents of this file may be used under the terms of * the GNU General Public License Version 2 or later (the "GPL"), in which * case the provisions of the GPL are applicable instead of those above. If * you wish to allow use of your version of this file only under the terms of * the GPL and not to allow others to use your version of this file under the * MPL, indicate your decision by deleting the provisions above and replacing * them with the notice and other provisions required by the GPL. If you do * not delete the provisions above, a recipient may use your version of this * file under either the MPL or the GPL. * * ***** END LICENSE BLOCK ***** */ package com.google.javascript.rhino.jstype; import static com.google.javascript.rhino.testing.TypeSubject.assertType; import com.google.javascript.rhino.jstype.JSType.Nullability; import com.google.javascript.rhino.testing.Asserts; import com.google.javascript.rhino.testing.BaseJSTypeTestCase; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @RunWith(JUnit4.class) public class RecordTypeTest extends BaseJSTypeTestCase { @Test public void testRecursiveRecord() { ProxyObjectType loop = new ProxyObjectType(registry, NUMBER_TYPE); JSType record = new RecordTypeBuilder(registry) .addProperty("loop", loop, null) .addProperty("number", NUMBER_TYPE, null) .addProperty("string", STRING_TYPE, null) .build(); assertEquals("{\n loop: number,\n number: number,\n string: string\n}", record.toString()); loop.setReferencedType(record); assertEquals("{\n loop: {...},\n number: number,\n string: string\n}", record.toString()); assertEquals("{loop: ?, number: number, string: string}", record.toAnnotationString(Nullability.EXPLICIT)); Asserts.assertEquivalenceOperations(record, loop); } @Test public void testLongToString() { JSType record = new RecordTypeBuilder(registry) .addProperty("a01", NUMBER_TYPE, null) .addProperty("a02", NUMBER_TYPE, null) .addProperty("a03", NUMBER_TYPE, null) .addProperty("a04", NUMBER_TYPE, null) .addProperty("a05", NUMBER_TYPE, null) .addProperty("a06", NUMBER_TYPE, null) .addProperty("a07", NUMBER_TYPE, null) .addProperty("a08", NUMBER_TYPE, null) .addProperty("a09", NUMBER_TYPE, null) .addProperty("a10", NUMBER_TYPE, null) .addProperty("a11", NUMBER_TYPE, null) .build(); assertEquals( LINE_JOINER.join( "{", " a01: number,", " a02: number,", " a03: number,", " a04: number,", " a05: number,", " a06: number,", " a07: number,", " a08: number,", " a09: number,", " a10: number, ...", "}"), record.toString()); assertEquals( "{a01: number, a02: number, a03: number, a04: number, a05: number, a06: number," + " a07: number, a08: number, a09: number, a10: number, a11: number}", record.toAnnotationString(Nullability.EXPLICIT)); } @Test public void testSupAndInf() { JSType recordA = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .addProperty("b", NUMBER_TYPE, null) .build(); JSType recordC = new RecordTypeBuilder(registry) .addProperty("b", NUMBER_TYPE, null) .addProperty("c", NUMBER_TYPE, null) .build(); ProxyObjectType proxyRecordA = new ProxyObjectType(registry, recordA); ProxyObjectType proxyRecordC = new ProxyObjectType(registry, recordC); JSType aInfC = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .addProperty("b", NUMBER_TYPE, null) .addProperty("c", NUMBER_TYPE, null) .build(); JSType aSupC = registry.createUnionType(recordA, recordC); assertType(recordA.getGreatestSubtype(recordC)).isStructurallyEqualTo(aInfC); assertType(recordA.getLeastSupertype(recordC)).isStructurallyEqualTo(aSupC); assertType(proxyRecordA.getGreatestSubtype(proxyRecordC)).isStructurallyEqualTo(aInfC); assertType(proxyRecordA.getLeastSupertype(proxyRecordC)).isStructurallyEqualTo(aSupC); } @Test public void testSubtypeWithUnknowns() { JSType recordA = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .build(); JSType recordB = new RecordTypeBuilder(registry) .addProperty("a", UNKNOWN_TYPE, null) .build(); assertTrue(recordA.isSubtypeOf(recordB)); assertTrue(recordB.isSubtypeOf(recordA)); } @Test public void testSubtypeWithUnknowns2() { JSType recordA = new RecordTypeBuilder(registry) .addProperty("a", new FunctionBuilder(registry) .withReturnType(NUMBER_TYPE) .build(), null) .build(); JSType recordB = new RecordTypeBuilder(registry) .addProperty("a", new FunctionBuilder(registry) .withReturnType(UNKNOWN_TYPE) .build(), null) .build(); assertTrue(recordA.isSubtypeOf(recordB)); assertTrue(recordB.isSubtypeOf(recordA)); } @Test public void testSubtypeWithFunctionProps() { JSType recordA = new RecordTypeBuilder(registry) .addProperty("a", new FunctionBuilder(registry) .withReturnType(NUMBER_TYPE) .build(), null) .build(); JSType recordB = new RecordTypeBuilder(registry) .addProperty("a", new FunctionBuilder(registry) .withReturnType(STRING_TYPE) .build(), null) .build(); assertFalse(recordA.isSubtypeOf(recordB)); assertFalse(recordB.isSubtypeOf(recordA)); } @Test public void testSubtypeWithManyProps() { JSType recordA = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .addProperty("b", NUMBER_TYPE, null) .build(); JSType recordB = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .addProperty("b", STRING_TYPE, null) .build(); JSType recordC = new RecordTypeBuilder(registry) .addProperty("a", NUMBER_TYPE, null) .addProperty("b", registry.createUnionType(NUMBER_TYPE, STRING_TYPE), null) .build(); assertFalse(recordA.isSubtypeOf(recordB)); assertFalse(recordB.isSubtypeOf(recordA)); assertFalse(recordC.isSubtypeOf(recordB)); assertTrue(recordB.isSubtypeOf(recordC)); assertTrue(recordA.isSubtypeOf(recordC)); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.introduceField; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.codeInsight.unwrap.ScopeHighlighter; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pass; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.util.PsiExpressionTrimRenderer; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.IntroduceTargetChooser; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.introduceVariable.IntroduceVariableBase; import com.intellij.refactoring.util.CommonRefactoringUtil; import java.util.Iterator; import java.util.List; /** * @author dsl */ public class ElementToWorkOn { public static final Key<PsiElement> PARENT = Key.create("PARENT"); private final PsiExpression myExpression; private final PsiLocalVariable myLocalVariable; public static final Key<String> PREFIX = Key.create("prefix"); public static final Key<String> SUFFIX = Key.create("suffix"); public static final Key<RangeMarker> TEXT_RANGE = Key.create("range"); public static final Key<TextRange> EXPR_RANGE = Key.create("expr_range"); public static final Key<Boolean> REPLACE_NON_PHYSICAL = Key.create("replace_non_physical"); public static final Key<Boolean> OUT_OF_CODE_BLOCK= Key.create("out_of_code_block"); private ElementToWorkOn(PsiLocalVariable localVariable, PsiExpression expr) { myLocalVariable = localVariable; myExpression = expr; } public static ElementToWorkOn adjustElements(PsiExpression expr, PsiElement anchorElement) { PsiLocalVariable localVariable = null; if (anchorElement instanceof PsiLocalVariable) { localVariable = (PsiLocalVariable)anchorElement; } else if (expr instanceof PsiReferenceExpression) { PsiElement ref = ((PsiReferenceExpression)expr).resolve(); if (ref instanceof PsiLocalVariable) { localVariable = (PsiLocalVariable)ref; } } else if (expr instanceof PsiArrayInitializerExpression && expr.getParent() instanceof PsiNewExpression) { expr = (PsiExpression)expr.getParent(); } return new ElementToWorkOn(localVariable, expr); } public PsiExpression getExpression() { return myExpression; } public PsiLocalVariable getLocalVariable() { return myLocalVariable; } public boolean isInvokedOnDeclaration() { return myExpression == null; } public static void processElementToWorkOn(final Editor editor, final PsiFile file, final String refactoringName, final String helpId, final Project project, final ElementsProcessor<ElementToWorkOn> processor) { PsiLocalVariable localVar = null; PsiExpression expr = null; if (!editor.getSelectionModel().hasSelection()) { PsiElement element = TargetElementUtil.findTargetElement(editor, TargetElementUtil .ELEMENT_NAME_ACCEPTED | TargetElementUtil .REFERENCED_ELEMENT_ACCEPTED | TargetElementUtil .LOOKUP_ITEM_ACCEPTED); if (element instanceof PsiLocalVariable) { localVar = (PsiLocalVariable) element; PsiElement elementAt = file.findElementAt(editor.getCaretModel().getOffset()); if (elementAt instanceof PsiIdentifier && elementAt.getParent() instanceof PsiReferenceExpression) { expr = (PsiExpression) elementAt.getParent(); } else { final PsiReference reference = TargetElementUtil.findReference(editor); if (reference != null) { final PsiElement refElement = reference.getElement(); if (refElement instanceof PsiReferenceExpression) { expr = (PsiReferenceExpression)refElement; } } } } else { final PsiLocalVariable variable = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset()), PsiLocalVariable.class); final int offset = editor.getCaretModel().getOffset(); final PsiElement[] statementsInRange = IntroduceVariableBase.findStatementsAtOffset(editor, file, offset); if (statementsInRange.length == 1 && IntroduceVariableBase.selectLineAtCaret(offset, statementsInRange)) { editor.getSelectionModel().selectLineAtCaret(); final ElementToWorkOn elementToWorkOn = getElementToWorkOn(editor, file, refactoringName, helpId, project, localVar, expr); if (elementToWorkOn == null || elementToWorkOn.getLocalVariable() == null && elementToWorkOn.getExpression() == null || !processor.accept(elementToWorkOn)) { editor.getSelectionModel().removeSelection(); } } if (!editor.getSelectionModel().hasSelection()){ final List<PsiExpression> expressions = IntroduceVariableBase.collectExpressions(file, editor, offset); for (Iterator<PsiExpression> iterator = expressions.iterator(); iterator.hasNext(); ) { PsiExpression expression = iterator.next(); if (!processor.accept(new ElementToWorkOn(null, expression))) { iterator.remove(); } } if (expressions.isEmpty()) { editor.getSelectionModel().selectLineAtCaret(); } else if (!IntroduceVariableBase.isChooserNeeded(expressions)) { expr = expressions.get(0); } else { final int selection = IntroduceVariableBase.preferredSelection(statementsInRange, expressions); IntroduceTargetChooser.showChooser(editor, expressions, new Pass<PsiExpression>() { @Override public void pass(final PsiExpression selectedValue) { PsiLocalVariable var = null; //replace var if selected expression == var initializer if (variable != null && variable.getInitializer() == selectedValue) { var = variable; } processor.pass(getElementToWorkOn(editor, file, refactoringName, helpId, project, var, selectedValue)); } }, new PsiExpressionTrimRenderer.RenderFunction(), "Expressions", selection, ScopeHighlighter.NATURAL_RANGER); return; } } } } processor.pass(getElementToWorkOn(editor, file, refactoringName, helpId, project, localVar, expr)); } private static ElementToWorkOn getElementToWorkOn(final Editor editor, final PsiFile file, final String refactoringName, final String helpId, final Project project, PsiLocalVariable localVar, PsiExpression expr) { int startOffset = 0; int endOffset = 0; if (localVar == null && expr == null) { startOffset = editor.getSelectionModel().getSelectionStart(); endOffset = editor.getSelectionModel().getSelectionEnd(); expr = CodeInsightUtil.findExpressionInRange(file, startOffset, endOffset); if (expr == null) { PsiIdentifier ident = CodeInsightUtil.findElementInRange(file, startOffset, endOffset, PsiIdentifier.class); if (ident != null) { localVar = PsiTreeUtil.getParentOfType(ident, PsiLocalVariable.class); } } else if (expr instanceof PsiArrayInitializerExpression && expr.getParent() instanceof PsiNewExpression) { expr = (PsiExpression)expr.getParent(); } } if (expr == null && localVar == null) { PsiElement[] statements = CodeInsightUtil.findStatementsInRange(file, startOffset, endOffset); if (statements.length == 1 && statements[0] instanceof PsiExpressionStatement) { expr = ((PsiExpressionStatement)statements[0]).getExpression(); } else if (statements.length == 1 && statements[0] instanceof PsiDeclarationStatement) { PsiDeclarationStatement decl = (PsiDeclarationStatement)statements[0]; PsiElement[] declaredElements = decl.getDeclaredElements(); if (declaredElements.length == 1 && declaredElements[0] instanceof PsiLocalVariable) { localVar = (PsiLocalVariable)declaredElements[0]; } } } if (localVar == null && expr == null) { expr = IntroduceVariableBase.getSelectedExpression(project, file, startOffset, endOffset); } if (localVar == null && expr != null) { final String errorMessage = IntroduceVariableBase.getErrorMessage(expr); if (errorMessage != null) { CommonRefactoringUtil.showErrorHint(project, editor, errorMessage, refactoringName, helpId); return null; } } return new ElementToWorkOn(localVar, expr); } public static void showNothingSelectedErrorMessage(final Editor editor, final String refactoringName, final String helpId, final Project project) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("error.wrong.caret.position.local.or.expression.name")); CommonRefactoringUtil.showErrorHint(project, editor, message, refactoringName, helpId); } public interface ElementsProcessor<T> { boolean accept(ElementToWorkOn el); void pass(T t); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.runtime.manager.impl; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.drools.core.impl.EnvironmentFactory; import org.jbpm.process.audit.AbstractAuditLogger; import org.jbpm.process.audit.AuditLoggerFactory; import org.jbpm.process.audit.event.AuditEventBuilder; import org.jbpm.runtime.manager.impl.jpa.EntityManagerFactoryManager; import org.jbpm.services.task.audit.JPATaskLifeCycleEventListener; import org.jbpm.services.task.wih.LocalHTWorkItemHandler; import org.kie.api.event.process.ProcessEventListener; import org.kie.api.event.rule.AgendaEventListener; import org.kie.api.event.rule.RuleRuntimeEventListener; import org.kie.api.runtime.Environment; import org.kie.api.runtime.EnvironmentName; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.manager.RuntimeManager; import org.kie.api.runtime.process.WorkItemHandler; import org.kie.api.task.TaskLifeCycleEventListener; import org.kie.internal.runtime.Cacheable; import org.kie.internal.runtime.Closeable; import org.kie.internal.runtime.conf.AuditMode; import org.kie.internal.runtime.conf.DeploymentDescriptor; import org.kie.internal.runtime.conf.NamedObjectModel; import org.kie.internal.runtime.conf.ObjectModel; import org.kie.internal.runtime.conf.ObjectModelResolver; import org.kie.internal.runtime.conf.ObjectModelResolverProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Default implementation of the <code>RegisterableItemsFactory</code> responsible for providing * a common set of WorkItemHandlers and EventListeners. This factory should not be used in CDI container. * <br/> * It will deliver fully configured instances of the following: * <ul> * <li>a WorkItemHandler for "Human Task" that is configured with local task service</li> * <li>a JPA audit logger - for history logging</li> * <li>a event listener to trigger rules automatically without a need of invoking fireAllRules</li> * </ul> * Moreover, it will invoke its super methods to define the rest of the registerable items, that might override defaults * when they are added to the resulting map at the end. * * @see InjectableRegisterableItemsFactory */ public class DefaultRegisterableItemsFactory extends SimpleRegisterableItemsFactory { private static final Logger logger = LoggerFactory.getLogger(DefaultRegisterableItemsFactory.class); private AuditEventBuilder auditBuilder = new ManagedAuditEventBuilderImpl(); @Override public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) { Map<String, WorkItemHandler> defaultHandlers = new HashMap<String, WorkItemHandler>(); //HT handler WorkItemHandler handler = getHTWorkItemHandler(runtime); if (handler != null) { defaultHandlers.put("Human Task", handler); } // add any custom registered defaultHandlers.putAll(super.getWorkItemHandlers(runtime)); // add handlers from descriptor defaultHandlers.putAll(getWorkItemHandlersFromDescriptor(runtime)); return defaultHandlers; } @Override public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) { List<ProcessEventListener> defaultListeners = new ArrayList<ProcessEventListener>(); DeploymentDescriptor descriptor = getRuntimeManager().getDeploymentDescriptor(); if (descriptor == null) { // register JPAWorkingMemoryDBLogger AbstractAuditLogger logger = AuditLoggerFactory.newJPAInstance(runtime.getKieSession().getEnvironment()); logger.setBuilder(getAuditBuilder(runtime)); defaultListeners.add(logger); } else if (descriptor.getAuditMode() == AuditMode.JPA) { // register JPAWorkingMemoryDBLogger AbstractAuditLogger logger = null; if (descriptor.getPersistenceUnit().equals(descriptor.getAuditPersistenceUnit())) { logger = AuditLoggerFactory.newJPAInstance(runtime.getKieSession().getEnvironment()); } else { Environment env = EnvironmentFactory.newEnvironment(); env.set(EnvironmentName.ENTITY_MANAGER_FACTORY, EntityManagerFactoryManager.get().getOrCreate(descriptor.getAuditPersistenceUnit())); logger = AuditLoggerFactory.newJPAInstance(env); } logger.setBuilder(getAuditBuilder(runtime)); defaultListeners.add(logger); } else if (descriptor.getAuditMode() == AuditMode.JMS) { try { Properties properties = new Properties(); InputStream input = getRuntimeManager().getEnvironment().getClassLoader().getResourceAsStream("/jbpm.audit.jms.properties"); properties.load(input); @SuppressWarnings({ "unchecked", "rawtypes" }) AbstractAuditLogger logger = AuditLoggerFactory.newJMSInstance((Map)properties); logger.setBuilder(getAuditBuilder(runtime)); defaultListeners.add(logger); } catch (IOException e) { logger.error("Unable to load jms audit properties from {}", "/jbpm.audit.jms.properties", e); } } // add any custom listeners defaultListeners.addAll(super.getProcessEventListeners(runtime)); // add listeners from descriptor defaultListeners.addAll(getEventListenerFromDescriptor(runtime, ProcessEventListener.class)); return defaultListeners; } @Override public List<AgendaEventListener> getAgendaEventListeners(RuntimeEngine runtime) { List<AgendaEventListener> defaultListeners = new ArrayList<AgendaEventListener>(); // add any custom listeners defaultListeners.addAll(super.getAgendaEventListeners(runtime)); // add listeners from descriptor defaultListeners.addAll(getEventListenerFromDescriptor(runtime, AgendaEventListener.class)); return defaultListeners; } @Override public List<RuleRuntimeEventListener> getRuleRuntimeEventListeners(RuntimeEngine runtime) { List<RuleRuntimeEventListener> defaultListeners = new ArrayList<RuleRuntimeEventListener>(); // add any custom listeners defaultListeners.addAll(super.getRuleRuntimeEventListeners(runtime)); // add listeners from descriptor defaultListeners.addAll(getEventListenerFromDescriptor(runtime, RuleRuntimeEventListener.class)); return defaultListeners; } @Override public List<TaskLifeCycleEventListener> getTaskListeners() { List<TaskLifeCycleEventListener> defaultListeners = new ArrayList<TaskLifeCycleEventListener>(); defaultListeners.add(new JPATaskLifeCycleEventListener(true)); // add any custom listeners defaultListeners.addAll(super.getTaskListeners()); // add listeners from deployment descriptor defaultListeners.addAll(getTaskListenersFromDescriptor()); return defaultListeners; } @Override public Map<String, Object> getGlobals(RuntimeEngine runtime) { Map<String, Object> defaultGlobals = new HashMap<String, Object>(); defaultGlobals.putAll(super.getGlobals(runtime)); // add globals from descriptor defaultGlobals.putAll(getGlobalsFromDescriptor(runtime)); return defaultGlobals; } protected WorkItemHandler getHTWorkItemHandler(RuntimeEngine runtime) { LocalHTWorkItemHandler humanTaskHandler = new LocalHTWorkItemHandler(); humanTaskHandler.setRuntimeManager(((RuntimeEngineImpl)runtime).getManager()); return humanTaskHandler; } public AuditEventBuilder getAuditBuilder() { return auditBuilder; } public AuditEventBuilder getAuditBuilder(RuntimeEngine engine) { if (this.auditBuilder != null && this.auditBuilder instanceof ManagedAuditEventBuilderImpl) { String identifier = ((RuntimeEngineImpl)engine).getManager().getIdentifier(); ((ManagedAuditEventBuilderImpl) this.auditBuilder).setOwnerId(identifier); } return this.auditBuilder; } public void setAuditBuilder(AuditEventBuilder auditBuilder) { this.auditBuilder = auditBuilder; } protected Object getInstanceFromModel(ObjectModel model, ClassLoader classloader, Map<String, Object> contaxtParams) { ObjectModelResolver resolver = ObjectModelResolverProvider.get(model.getResolver()); if (resolver == null) { throw new IllegalStateException("Unable to find ObjectModelResolver for " + model.getResolver()); } return resolver.getInstance(model, classloader, contaxtParams); } protected Map<String, Object> getParametersMap(RuntimeEngine runtime) { RuntimeManager manager = ((RuntimeEngineImpl)runtime).getManager(); Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("ksession", runtime.getKieSession()); try { parameters.put("taskService", runtime.getTaskService()); } catch (UnsupportedOperationException e) { // in case task service was not configured } parameters.put("runtimeManager", manager); parameters.put("classLoader", getRuntimeManager().getEnvironment().getClassLoader()); parameters.put("entityManagerFactory", runtime.getKieSession().getEnvironment().get(EnvironmentName.ENTITY_MANAGER_FACTORY)); parameters.put("kieContainer", getRuntimeManager().getKieContainer()); return parameters; } protected List<TaskLifeCycleEventListener> getTaskListenersFromDescriptor() { List<TaskLifeCycleEventListener> defaultListeners = new ArrayList<TaskLifeCycleEventListener>(); DeploymentDescriptor descriptor = getRuntimeManager().getDeploymentDescriptor(); if (descriptor != null) { Map<String, Object> params = new HashMap<String, Object>(); params.put("runtimeManager", getRuntimeManager()); params.put("classLoader", getRuntimeManager().getEnvironment().getClassLoader()); params.put("kieContainer", getRuntimeManager().getKieContainer()); for (ObjectModel model : descriptor.getTaskEventListeners()) { Object taskListener = getInstanceFromModel(model, getRuntimeManager().getEnvironment().getClassLoader(), params); if (taskListener != null) { defaultListeners.add((TaskLifeCycleEventListener) taskListener); } } } return defaultListeners; } protected Map<String, WorkItemHandler> getWorkItemHandlersFromDescriptor(RuntimeEngine runtime) { Map<String, WorkItemHandler> defaultHandlers = new HashMap<String, WorkItemHandler>(); DeploymentDescriptor descriptor = getRuntimeManager().getDeploymentDescriptor(); if (descriptor != null) { Map<String, Object> params = getParametersMap(runtime); for (NamedObjectModel model : descriptor.getWorkItemHandlers()) { Object hInstance = getInstanceFromModel(model, getRuntimeManager().getEnvironment().getClassLoader(), params); if (hInstance != null) { defaultHandlers.put(model.getName(), (WorkItemHandler) hInstance); } } } return defaultHandlers; } @SuppressWarnings("unchecked") protected <T> List<T> getEventListenerFromDescriptor(RuntimeEngine runtime, Class<T> type) { List<T> listeners = new ArrayList<T>(); DeploymentDescriptor descriptor = getRuntimeManager().getDeploymentDescriptor(); if (descriptor != null) { Map<String, Object> params = getParametersMap(runtime); for (ObjectModel model : descriptor.getEventListeners()) { Object listenerInstance = getInstanceFromModel(model, getRuntimeManager().getEnvironment().getClassLoader(), params); if (listenerInstance != null && type.isAssignableFrom(listenerInstance.getClass())) { listeners.add((T) listenerInstance); } else { // close/cleanup instance as it is not going to be used at the moment, except these that are cacheable if (listenerInstance instanceof Closeable && !(listenerInstance instanceof Cacheable)) { ((Closeable) listenerInstance).close(); } } } } return listeners; } protected Map<String, Object> getGlobalsFromDescriptor(RuntimeEngine runtime) { Map<String, Object> globals = new HashMap<String, Object>(); DeploymentDescriptor descriptor = getRuntimeManager().getDeploymentDescriptor(); if (descriptor != null) { Map<String, Object> params = getParametersMap(runtime); for (NamedObjectModel model : descriptor.getGlobals()) { Object gInstance = getInstanceFromModel(model, getRuntimeManager().getEnvironment().getClassLoader(), params); if (gInstance != null) { globals.put(model.getName(), gInstance); } } } return globals; } }
/** * Copyright 2018 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.command; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.redisson.RedissonReference; import org.redisson.RedissonShutdownException; import org.redisson.ScanResult; import org.redisson.SlotCallback; import org.redisson.api.RFuture; import org.redisson.api.RedissonClient; import org.redisson.api.RedissonReactiveClient; import org.redisson.client.RedisAskException; import org.redisson.client.RedisClient; import org.redisson.client.RedisConnection; import org.redisson.client.RedisException; import org.redisson.client.RedisLoadingException; import org.redisson.client.RedisMovedException; import org.redisson.client.RedisRedirectException; import org.redisson.client.RedisTimeoutException; import org.redisson.client.RedisTryAgainException; import org.redisson.client.WriteRedisConnectionException; import org.redisson.client.codec.Codec; import org.redisson.client.protocol.CommandData; import org.redisson.client.protocol.CommandsData; import org.redisson.client.protocol.RedisCommand; import org.redisson.client.protocol.RedisCommands; import org.redisson.client.protocol.ScoredEntry; import org.redisson.client.protocol.decoder.ListScanResult; import org.redisson.client.protocol.decoder.MapScanResult; import org.redisson.codec.ReferenceCodecProvider; import org.redisson.config.Config; import org.redisson.config.MasterSlaveServersConfig; import org.redisson.connection.ConnectionManager; import org.redisson.connection.MasterSlaveEntry; import org.redisson.connection.NodeSource; import org.redisson.connection.NodeSource.Redirect; import org.redisson.misc.LogHelper; import org.redisson.misc.RPromise; import org.redisson.misc.RedissonObjectFactory; import org.redisson.misc.RedissonPromise; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.util.ReferenceCountUtil; import io.netty.util.Timeout; import io.netty.util.TimerTask; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.FutureListener; /** * * @author Nikita Koksharov * */ public class CommandAsyncService implements CommandAsyncExecutor { static final Logger log = LoggerFactory.getLogger(CommandAsyncService.class); final ConnectionManager connectionManager; protected RedissonClient redisson; protected RedissonReactiveClient redissonReactive; public CommandAsyncService(ConnectionManager connectionManager) { this.connectionManager = connectionManager; } @Override public ConnectionManager getConnectionManager() { return connectionManager; } @Override public CommandAsyncExecutor enableRedissonReferenceSupport(RedissonClient redisson) { if (redisson != null) { this.redisson = redisson; enableRedissonReferenceSupport(redisson.getConfig()); this.redissonReactive = null; } return this; } @Override public CommandAsyncExecutor enableRedissonReferenceSupport(RedissonReactiveClient redissonReactive) { if (redissonReactive != null) { this.redissonReactive = redissonReactive; enableRedissonReferenceSupport(redissonReactive.getConfig()); this.redisson = null; } return this; } private void enableRedissonReferenceSupport(Config config) { Codec codec = config.getCodec(); ReferenceCodecProvider codecProvider = config.getReferenceCodecProvider(); codecProvider.registerCodec((Class<Codec>) codec.getClass(), codec); } @Override public boolean isRedissonReferenceSupportEnabled() { return redisson != null || redissonReactive != null; } @Override public void syncSubscription(RFuture<?> future) { MasterSlaveServersConfig config = connectionManager.getConfig(); try { int timeout = config.getTimeout() + config.getRetryInterval() * config.getRetryAttempts(); if (!future.await(timeout)) { throw new RedisTimeoutException("Subscribe timeout: (" + timeout + "ms)"); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } future.syncUninterruptibly(); } @Override public <V> V get(RFuture<V> future) { if (!future.isDone()) { final CountDownLatch l = new CountDownLatch(1); future.addListener(new FutureListener<V>() { @Override public void operationComplete(Future<V> future) throws Exception { l.countDown(); } }); boolean interrupted = false; while (!future.isDone()) { try { l.await(); } catch (InterruptedException e) { interrupted = true; break; } } if (interrupted) { Thread.currentThread().interrupt(); } } // commented out due to blocking issues up to 200 ms per minute for each thread // future.awaitUninterruptibly(); if (future.isSuccess()) { return future.getNow(); } throw convertException(future); } @Override public boolean await(RFuture<?> future, long timeout, TimeUnit timeoutUnit) throws InterruptedException { final CountDownLatch l = new CountDownLatch(1); future.addListener(new FutureListener<Object>() { @Override public void operationComplete(Future<Object> future) throws Exception { l.countDown(); } }); return l.await(timeout, timeoutUnit); } protected <R> RPromise<R> createPromise() { return new RedissonPromise<R>(); } @Override public <T, R> RFuture<R> readAsync(RedisClient client, MasterSlaveEntry entry, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); async(true, new NodeSource(entry, client), codec, command, params, mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> readAsync(RedisClient client, String name, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); int slot = connectionManager.calcSlot(name); async(true, new NodeSource(slot, client), codec, command, params, mainPromise, 0, false, null); return mainPromise; } public <T, R> RFuture<R> readAsync(RedisClient client, byte[] key, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); int slot = connectionManager.calcSlot(key); async(true, new NodeSource(slot, client), codec, command, params, mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> readAsync(RedisClient client, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); async(true, new NodeSource(client), codec, command, params, mainPromise, 0, false, null); return mainPromise; } public <T, R> RFuture<Collection<R>> readAllAsync(RedisCommand<T> command, Object... params) { List<R> results = new ArrayList<R>(); return readAllAsync(results, command, params); } @Override public <T, R> RFuture<Collection<R>> readAllAsync(final Collection<R> results, RedisCommand<T> command, Object... params) { final RPromise<Collection<R>> mainPromise = createPromise(); final Collection<MasterSlaveEntry> nodes = connectionManager.getEntrySet(); final AtomicInteger counter = new AtomicInteger(nodes.size()); FutureListener<Object> listener = new FutureListener<Object>() { @Override public void operationComplete(Future<Object> future) throws Exception { if (!future.isSuccess() && !(future.cause() instanceof RedisRedirectException)) { mainPromise.tryFailure(future.cause()); return; } Object result = future.getNow(); if (result instanceof Collection) { synchronized (results) { results.addAll((Collection) result); } } else { synchronized (results) { results.add((R) result); } } if (counter.decrementAndGet() == 0 && !mainPromise.isDone()) { mainPromise.trySuccess(results); } } }; for (MasterSlaveEntry entry : nodes) { RPromise<R> promise = new RedissonPromise<R>(); promise.addListener(listener); async(true, new NodeSource(entry), connectionManager.getCodec(), command, params, promise, 0, true, null); } return mainPromise; } @Override public <T, R> RFuture<R> readRandomAsync(Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); List<MasterSlaveEntry> nodes = new ArrayList<MasterSlaveEntry>(connectionManager.getEntrySet()); Collections.shuffle(nodes); retryReadRandomAsync(codec, command, mainPromise, nodes, params); return mainPromise; } @Override public <T, R> RFuture<R> readRandomAsync(MasterSlaveEntry entry, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); retryReadRandomAsync(codec, command, mainPromise, Collections.singletonList(entry), params); return mainPromise; } private <R, T> void retryReadRandomAsync(final Codec codec, final RedisCommand<T> command, final RPromise<R> mainPromise, final List<MasterSlaveEntry> nodes, final Object... params) { final RPromise<R> attemptPromise = new RedissonPromise<R>(); attemptPromise.addListener(new FutureListener<R>() { @Override public void operationComplete(Future<R> future) throws Exception { if (future.isSuccess()) { if (future.getNow() == null) { if (nodes.isEmpty()) { mainPromise.trySuccess(null); } else { retryReadRandomAsync(codec, command, mainPromise, nodes, params); } } else { mainPromise.trySuccess(future.getNow()); } } else { mainPromise.tryFailure(future.cause()); } } }); MasterSlaveEntry entry = nodes.remove(0); async(true, new NodeSource(entry), codec, command, params, attemptPromise, 0, false, null); } @Override public <T> RFuture<Void> writeAllAsync(RedisCommand<T> command, Object... params) { return writeAllAsync(command, null, params); } @Override public <R, T> RFuture<R> writeAllAsync(RedisCommand<T> command, SlotCallback<T, R> callback, Object... params) { return allAsync(false, connectionManager.getCodec(), command, callback, params); } @Override public <R, T> RFuture<R> writeAllAsync(Codec codec, RedisCommand<T> command, SlotCallback<T, R> callback, Object... params) { return allAsync(false, codec, command, callback, params); } @Override public <R, T> RFuture<R> readAllAsync(RedisCommand<T> command, SlotCallback<T, R> callback, Object... params) { return allAsync(true, connectionManager.getCodec(), command, callback, params); } private <T, R> RFuture<R> allAsync(boolean readOnlyMode, Codec codec, final RedisCommand<T> command, final SlotCallback<T, R> callback, Object... params) { final RPromise<R> mainPromise = new RedissonPromise<R>(); final Collection<MasterSlaveEntry> nodes = connectionManager.getEntrySet(); final AtomicInteger counter = new AtomicInteger(nodes.size()); FutureListener<T> listener = new FutureListener<T>() { @Override public void operationComplete(Future<T> future) throws Exception { if (!future.isSuccess() && !(future.cause() instanceof RedisRedirectException)) { mainPromise.tryFailure(future.cause()); return; } T result = future.getNow(); if (future.cause() instanceof RedisRedirectException) { result = command.getConvertor().convert(result); } if (callback != null) { callback.onSlotResult(result); } if (counter.decrementAndGet() == 0) { if (callback != null) { mainPromise.trySuccess(callback.onFinish()); } else { mainPromise.trySuccess(null); } } } }; for (MasterSlaveEntry entry : nodes) { RPromise<T> promise = new RedissonPromise<T>(); promise.addListener(listener); async(readOnlyMode, new NodeSource(entry), codec, command, params, promise, 0, true, null); } return mainPromise; } public <V> RedisException convertException(RFuture<V> future) { return future.cause() instanceof RedisException ? (RedisException) future.cause() : new RedisException("Unexpected exception while processing command", future.cause()); } private NodeSource getNodeSource(String key) { int slot = connectionManager.calcSlot(key); MasterSlaveEntry entry = connectionManager.getEntry(slot); return new NodeSource(entry); } private NodeSource getNodeSource(byte[] key) { int slot = connectionManager.calcSlot(key); MasterSlaveEntry entry = connectionManager.getEntry(slot); return new NodeSource(entry); } @Override public <T, R> RFuture<R> readAsync(String key, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); NodeSource source = getNodeSource(key); async(true, source, codec, command, params, mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> readAsync(byte[] key, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); NodeSource source = getNodeSource(key); async(true, source, codec, command, params, mainPromise, 0, false, null); return mainPromise; } public <T, R> RFuture<R> readAsync(MasterSlaveEntry entry, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); async(true, new NodeSource(entry), codec, command, params, mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> writeAsync(MasterSlaveEntry entry, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); async(false, new NodeSource(entry), codec, command, params, mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> readAsync(String key, RedisCommand<T> command, Object... params) { return readAsync(key, connectionManager.getCodec(), command, params); } @Override public <T, R> RFuture<R> evalReadAsync(String key, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { NodeSource source = getNodeSource(key); return evalAsync(source, true, codec, evalCommandType, script, keys, params); } @Override public <T, R> RFuture<R> evalReadAsync(MasterSlaveEntry entry, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { return evalAsync(new NodeSource(entry), true, codec, evalCommandType, script, keys, params); } @Override public <T, R> RFuture<R> evalReadAsync(RedisClient client, String name, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { int slot = connectionManager.calcSlot(name); return evalAsync(new NodeSource(slot, client), true, codec, evalCommandType, script, keys, params); } @Override public <T, R> RFuture<R> evalWriteAsync(String key, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { NodeSource source = getNodeSource(key); return evalAsync(source, false, codec, evalCommandType, script, keys, params); } public <T, R> RFuture<R> evalWriteAsync(MasterSlaveEntry entry, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { return evalAsync(new NodeSource(entry), false, codec, evalCommandType, script, keys, params); } @Override public <T, R> RFuture<R> evalWriteAllAsync(RedisCommand<T> command, SlotCallback<T, R> callback, String script, List<Object> keys, Object... params) { return evalAllAsync(false, command, callback, script, keys, params); } public <T, R> RFuture<R> evalAllAsync(boolean readOnlyMode, RedisCommand<T> command, final SlotCallback<T, R> callback, String script, List<Object> keys, Object... params) { final RPromise<R> mainPromise = new RedissonPromise<R>(); final Collection<MasterSlaveEntry> entries = connectionManager.getEntrySet(); final AtomicInteger counter = new AtomicInteger(entries.size()); FutureListener<T> listener = new FutureListener<T>() { @Override public void operationComplete(Future<T> future) throws Exception { if (!future.isSuccess() && !(future.cause() instanceof RedisRedirectException)) { mainPromise.tryFailure(future.cause()); return; } callback.onSlotResult(future.getNow()); if (counter.decrementAndGet() == 0 && !mainPromise.isDone()) { mainPromise.trySuccess(callback.onFinish()); } } }; List<Object> args = new ArrayList<Object>(2 + keys.size() + params.length); args.add(script); args.add(keys.size()); args.addAll(keys); args.addAll(Arrays.asList(params)); for (MasterSlaveEntry entry : entries) { RPromise<T> promise = new RedissonPromise<T>(); promise.addListener(listener); async(readOnlyMode, new NodeSource(entry), connectionManager.getCodec(), command, args.toArray(), promise, 0, true, null); } return mainPromise; } private <T, R> RFuture<R> evalAsync(NodeSource nodeSource, boolean readOnlyMode, Codec codec, RedisCommand<T> evalCommandType, String script, List<Object> keys, Object... params) { RPromise<R> mainPromise = createPromise(); List<Object> args = new ArrayList<Object>(2 + keys.size() + params.length); args.add(script); args.add(keys.size()); args.addAll(keys); args.addAll(Arrays.asList(params)); async(readOnlyMode, nodeSource, codec, evalCommandType, args.toArray(), mainPromise, 0, false, null); return mainPromise; } @Override public <T, R> RFuture<R> writeAsync(String key, RedisCommand<T> command, Object... params) { return writeAsync(key, connectionManager.getCodec(), command, params); } @Override public <T, R> RFuture<R> writeAsync(String key, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); NodeSource source = getNodeSource(key); async(false, source, codec, command, params, mainPromise, 0, false, null); return mainPromise; } public <T, R> RFuture<R> writeAsync(byte[] key, Codec codec, RedisCommand<T> command, Object... params) { RPromise<R> mainPromise = createPromise(); NodeSource source = getNodeSource(key); async(false, source, codec, command, params, mainPromise, 0, false, null); return mainPromise; } protected <V, R> void async(final boolean readOnlyMode, final NodeSource source, final Codec codec, final RedisCommand<V> command, final Object[] params, final RPromise<R> mainPromise, final int attempt, final boolean ignoreRedirect, final RFuture<RedisConnection> connFuture) { if (mainPromise.isCancelled()) { free(params); return; } if (!connectionManager.getShutdownLatch().acquire()) { free(params); mainPromise.tryFailure(new RedissonShutdownException("Redisson is shutdown")); return; } final AsyncDetails<V, R> details = AsyncDetails.acquire(); final RFuture<RedisConnection> connectionFuture = getConnection(readOnlyMode, source, command); final RPromise<R> attemptPromise = new RedissonPromise<R>(); details.init(connectionFuture, attemptPromise, readOnlyMode, source, codec, command, params, mainPromise, attempt); FutureListener<R> mainPromiseListener = new FutureListener<R>() { @Override public void operationComplete(Future<R> future) throws Exception { if (future.isCancelled() && connectionFuture.cancel(false)) { log.debug("Connection obtaining canceled for {}", command); details.getTimeout().cancel(); if (details.getAttemptPromise().cancel(false)) { free(params); } } } }; final TimerTask retryTimerTask = new TimerTask() { @Override public void run(Timeout t) throws Exception { if (details.getAttemptPromise().isDone()) { return; } if (details.getConnectionFuture().cancel(false)) { if (details.getException() == null) { details.setException(new RedisTimeoutException("Unable to get connection! " + "Node source: " + source + ", command: " + command + ", command params: " + LogHelper.toString(details.getParams()) + " after " + details.getAttempt() + " retry attempts")); } connectionManager.getShutdownLatch().release(); } else { if (details.getConnectionFuture().isSuccess()) { if (details.getWriteFuture() == null || !details.getWriteFuture().isDone()) { if (details.getAttempt() == connectionManager.getConfig().getRetryAttempts()) { if (details.getWriteFuture() != null && details.getWriteFuture().cancel(false)) { if (details.getException() == null) { details.setException(new RedisTimeoutException("Unable to send command! " + "Node source: " + source + ", connection: " + details.getConnectionFuture().getNow() + ", command: " + command + ", command params: " + LogHelper.toString(details.getParams()) + " after " + connectionManager.getConfig().getRetryAttempts() + " retry attempts")); } details.getAttemptPromise().tryFailure(details.getException()); } return; } details.incAttempt(); Timeout timeout = connectionManager.newTimeout(this, connectionManager.getConfig().getRetryInterval(), TimeUnit.MILLISECONDS); details.setTimeout(timeout); return; } if (details.getWriteFuture().isDone() && details.getWriteFuture().isSuccess()) { return; } } } if (details.getMainPromise().isCancelled()) { if (details.getAttemptPromise().cancel(false)) { free(details.getParams()); AsyncDetails.release(details); } return; } if (details.getAttempt() == connectionManager.getConfig().getRetryAttempts()) { if (details.getException() == null) { details.setException(new RedisTimeoutException("Unable to send command! Node source: " + source + ", command: " + command + ", command params: " + LogHelper.toString(details.getParams()) + " after " + connectionManager.getConfig().getRetryAttempts() + " retry attempts")); } details.getAttemptPromise().tryFailure(details.getException()); return; } if (!details.getAttemptPromise().cancel(false)) { return; } int count = details.getAttempt() + 1; if (log.isDebugEnabled()) { log.debug("attempt {} for command {} and params {}", count, details.getCommand(), Arrays.toString(details.getParams())); } details.removeMainPromiseListener(); async(details.isReadOnlyMode(), details.getSource(), details.getCodec(), details.getCommand(), details.getParams(), details.getMainPromise(), count, ignoreRedirect, connFuture); AsyncDetails.release(details); } }; Timeout timeout = connectionManager.newTimeout(retryTimerTask, connectionManager.getConfig().getRetryInterval(), TimeUnit.MILLISECONDS); details.setTimeout(timeout); details.setupMainPromiseListener(mainPromiseListener); connectionFuture.addListener(new FutureListener<RedisConnection>() { @Override public void operationComplete(Future<RedisConnection> connFuture) throws Exception { if (connFuture.isCancelled()) { return; } if (!connFuture.isSuccess()) { connectionManager.getShutdownLatch().release(); details.setException(convertException(connectionFuture)); return; } if (details.getAttemptPromise().isDone() || details.getMainPromise().isDone()) { releaseConnection(source, connectionFuture, details.isReadOnlyMode(), details.getAttemptPromise(), details); return; } final RedisConnection connection = connFuture.getNow(); sendCommand(details, connection); details.getWriteFuture().addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { checkWriteFuture(details, connection); } }); releaseConnection(source, connectionFuture, details.isReadOnlyMode(), details.getAttemptPromise(), details); } }); attemptPromise.addListener(new FutureListener<R>() { @Override public void operationComplete(Future<R> future) throws Exception { checkAttemptFuture(source, details, future, ignoreRedirect); } }); } protected <V> RFuture<RedisConnection> getConnection(final boolean readOnlyMode, final NodeSource source, final RedisCommand<V> command) { final RFuture<RedisConnection> connectionFuture; if (readOnlyMode) { connectionFuture = connectionManager.connectionReadOp(source, command); } else { connectionFuture = connectionManager.connectionWriteOp(source, command); } return connectionFuture; } protected void free(final Object[] params) { for (Object obj : params) { ReferenceCountUtil.safeRelease(obj); } } private <V, R> void checkWriteFuture(final AsyncDetails<V, R> details, final RedisConnection connection) { ChannelFuture future = details.getWriteFuture(); if (future.isCancelled() || details.getAttemptPromise().isDone()) { return; } if (!future.isSuccess()) { details.setException(new WriteRedisConnectionException( "Unable to send command! Node source: " + details.getSource() + ", connection: " + future.channel() + ", command: " + details.getCommand() + ", params: " + LogHelper.toString(details.getParams()), future.cause())); if (details.getAttempt() == connectionManager.getConfig().getRetryAttempts()) { if (!details.getAttemptPromise().tryFailure(details.getException())) { log.error(details.getException().getMessage()); } } return; } details.getTimeout().cancel(); long timeoutTime = connectionManager.getConfig().getTimeout(); if (RedisCommands.BLOCKING_COMMANDS.contains(details.getCommand().getName()) || RedisCommands.XREAD_BLOCKING_SINGLE == details.getCommand() || RedisCommands.XREAD_BLOCKING == details.getCommand()) { Long popTimeout = null; if (RedisCommands.XREAD_BLOCKING_SINGLE == details.getCommand() || RedisCommands.XREAD_BLOCKING == details.getCommand()) { boolean found = false; for (Object param : details.getParams()) { if (found) { popTimeout = Long.valueOf(param.toString()) / 1000; break; } if (param instanceof String) { found = true; } } } else { popTimeout = Long.valueOf(details.getParams()[details.getParams().length - 1].toString()); } handleBlockingOperations(details, connection, popTimeout); if (popTimeout == 0) { return; } timeoutTime += popTimeout * 1000; // add 1 second due to issue https://github.com/antirez/redis/issues/874 timeoutTime += 1000; } final long timeoutAmount = timeoutTime; TimerTask timeoutTask = new TimerTask() { @Override public void run(Timeout timeout) throws Exception { details.getAttemptPromise().tryFailure( new RedisTimeoutException("Redis server response timeout (" + timeoutAmount + " ms) occured for command: " + details.getCommand() + " with params: " + LogHelper.toString(details.getParams()) + " channel: " + connection.getChannel())); } }; Timeout timeout = connectionManager.newTimeout(timeoutTask, timeoutTime, TimeUnit.MILLISECONDS); details.setTimeout(timeout); } private <R, V> void handleBlockingOperations(final AsyncDetails<V, R> details, final RedisConnection connection, Long popTimeout) { final FutureListener<Boolean> listener = new FutureListener<Boolean>() { @Override public void operationComplete(Future<Boolean> future) throws Exception { details.getMainPromise().tryFailure(new RedissonShutdownException("Redisson is shutdown")); } }; final Timeout scheduledFuture; if (popTimeout != 0) { // handling cases when connection has been lost final Channel orignalChannel = connection.getChannel(); scheduledFuture = connectionManager.newTimeout(new TimerTask() { @Override public void run(Timeout timeout) throws Exception { // re-connection hasn't been made // and connection is still active // if (orignalChannel == connection.getChannel() // && connection.isActive()) { // return; // } if (details.getAttemptPromise().trySuccess(null)) { connection.forceFastReconnectAsync(); } } }, popTimeout, TimeUnit.SECONDS); } else { scheduledFuture = null; } details.getMainPromise().addListener(new FutureListener<R>() { @Override public void operationComplete(Future<R> future) throws Exception { if (scheduledFuture != null) { scheduledFuture.cancel(); } synchronized (listener) { connectionManager.getShutdownPromise().removeListener(listener); } // handling cancel operation for blocking commands if (future.isCancelled() && !details.getAttemptPromise().isDone()) { log.debug("Canceled blocking operation {} used {}", details.getCommand(), connection); connection.forceFastReconnectAsync().addListener(new FutureListener<Void>() { @Override public void operationComplete(Future<Void> future) throws Exception { details.getAttemptPromise().cancel(true); } }); return; } if (future.cause() instanceof RedissonShutdownException) { details.getAttemptPromise().tryFailure(future.cause()); } } }); synchronized (listener) { if (!details.getMainPromise().isDone()) { connectionManager.getShutdownPromise().addListener(listener); } } } protected <V, R> void releaseConnection(final NodeSource source, final RFuture<RedisConnection> connectionFuture, final boolean isReadOnly, RPromise<R> attemptPromise, final AsyncDetails<V, R> details) { attemptPromise.addListener(new FutureListener<R>() { @Override public void operationComplete(Future<R> future) throws Exception { if (!connectionFuture.isSuccess()) { return; } RedisConnection connection = connectionFuture.getNow(); connectionManager.getShutdownLatch().release(); if (isReadOnly) { connectionManager.releaseRead(source, connection); } else { connectionManager.releaseWrite(source, connection); } if (log.isDebugEnabled()) { log.debug("connection released for command {} and params {} from slot {} using connection {}", details.getCommand(), Arrays.toString(details.getParams()), details.getSource(), connection); } } }); } protected <R, V> void checkAttemptFuture(final NodeSource source, final AsyncDetails<V, R> details, Future<R> future, final boolean ignoreRedirect) { details.getTimeout().cancel(); if (future.isCancelled()) { return; } try { details.removeMainPromiseListener(); if (future.cause() instanceof RedisMovedException && !ignoreRedirect) { RedisMovedException ex = (RedisMovedException) future.cause(); if (source.getRedirect() == Redirect.MOVED) { details.getMainPromise().tryFailure(new RedisException("MOVED redirection loop detected. Node " + source.getAddr() + " has further redirect to " + ex.getUrl())); return; } async(details.isReadOnlyMode(), new NodeSource(ex.getSlot(), ex.getUrl(), Redirect.MOVED), details.getCodec(), details.getCommand(), details.getParams(), details.getMainPromise(), details.getAttempt(), ignoreRedirect, details.getConnectionFuture()); AsyncDetails.release(details); return; } if (future.cause() instanceof RedisAskException && !ignoreRedirect) { RedisAskException ex = (RedisAskException) future.cause(); async(details.isReadOnlyMode(), new NodeSource(ex.getSlot(), ex.getUrl(), Redirect.ASK), details.getCodec(), details.getCommand(), details.getParams(), details.getMainPromise(), details.getAttempt(), ignoreRedirect, details.getConnectionFuture()); AsyncDetails.release(details); return; } if (future.cause() instanceof RedisLoadingException) { async(details.isReadOnlyMode(), source, details.getCodec(), details.getCommand(), details.getParams(), details.getMainPromise(), details.getAttempt(), ignoreRedirect, details.getConnectionFuture()); AsyncDetails.release(details); return; } if (future.cause() instanceof RedisTryAgainException) { connectionManager.newTimeout(new TimerTask() { @Override public void run(Timeout timeout) throws Exception { async(details.isReadOnlyMode(), source, details.getCodec(), details.getCommand(), details.getParams(), details.getMainPromise(), details.getAttempt(), ignoreRedirect, details.getConnectionFuture()); } }, 1, TimeUnit.SECONDS); AsyncDetails.release(details); return; } free(details.getParams()); if (future.isSuccess()) { R res = future.getNow(); if (res instanceof ScanResult) { ((ScanResult) res).setRedisClient(details.getConnectionFuture().getNow().getRedisClient()); } handleSuccess(details, details.getMainPromise(), details.getCommand(), res); } else { handleError(details, details.getMainPromise(), future.cause()); } AsyncDetails.release(details); } catch (RuntimeException e) { handleError(details, details.getMainPromise(), e); throw e; } } protected <V, R> void handleError(AsyncDetails<V, R> details, RPromise<R> mainPromise, Throwable cause) { mainPromise.tryFailure(cause); } protected <V, R> void handleSuccess(AsyncDetails<V, R> details, RPromise<R> promise, RedisCommand<?> command, R res) { if (isRedissonReferenceSupportEnabled()) { handleReference(promise, res); } else { promise.trySuccess(res); } } private <R, V> void handleReference(RPromise<R> mainPromise, R res) { try { mainPromise.trySuccess(tryHandleReference(res)); } catch (Exception e) { //fall back and let other part of the code handle the type conversion. mainPromise.trySuccess(res); } } protected <T> T tryHandleReference(T o) { boolean hasConversion = false; if (o instanceof List) { List<Object> r = (List<Object>) o; for (int i = 0; i < r.size(); i++) { Object ref = tryHandleReference0(r.get(i)); if (ref != r.get(i)) { r.set(i, ref); } } return o; } else if (o instanceof Set) { Set set, r = (Set) o; boolean useNewSet = o instanceof LinkedHashSet; try { set = (Set) o.getClass().getConstructor().newInstance(); } catch (Exception exception) { set = new LinkedHashSet(); } for (Object i : r) { Object ref = tryHandleReference0(i); //Not testing for ref changes because r.add(ref) below needs to //fail on the first iteration to be able to perform fall back //if failure happens. // //Assuming the failure reason is systematic such as put method //is not supported or implemented, and not an occasional issue //like only one element fails. if (useNewSet) { set.add(ref); } else { try { r.add(ref); set.add(i); } catch (Exception e) { //r is not supporting add operation, like //LinkedHashMap$LinkedEntrySet and others. //fall back to use a new set. useNewSet = true; set.add(ref); } } hasConversion |= ref != i; } if (!hasConversion) { return o; } else if (useNewSet) { return (T) set; } else if (!set.isEmpty()) { r.removeAll(set); } return o; } else if (o instanceof Map) { Map<Object, Object> r = (Map<Object, Object>) o; for (Map.Entry<Object, Object> e : r.entrySet()) { if (e.getKey() instanceof RedissonReference || e.getValue() instanceof RedissonReference) { Object key = e.getKey(); Object value = e.getValue(); if (e.getKey() instanceof RedissonReference) { key = fromReference(e.getKey()); r.remove(e.getKey()); } if (e.getValue() instanceof RedissonReference) { value = fromReference(e.getValue()); } r.put(key, value); } } return o; } else if (o instanceof ListScanResult) { tryHandleReference(((ListScanResult) o).getValues()); return o; } else if (o instanceof MapScanResult) { MapScanResult scanResult = (MapScanResult) o; Map oldMap = ((MapScanResult) o).getMap(); Map map = tryHandleReference(oldMap); if (map != oldMap) { MapScanResult<Object, Object> newScanResult = new MapScanResult<Object, Object>(scanResult.getPos(), map); newScanResult.setRedisClient(scanResult.getRedisClient()); return (T) newScanResult; } else { return o; } } else { return tryHandleReference0(o); } } private <T> T tryHandleReference0(T o) { if (o instanceof RedissonReference) { return fromReference(o); } else if (o instanceof ScoredEntry && ((ScoredEntry) o).getValue() instanceof RedissonReference) { ScoredEntry<?> se = ((ScoredEntry<?>) o); return (T) new ScoredEntry(se.getScore(), fromReference(se.getValue())); } else if (o instanceof Map.Entry) { Map.Entry old = (Map.Entry) o; Object key = tryHandleReference0(old.getKey()); Object value = tryHandleReference0(old.getValue()); return value != old.getValue() || key != old.getKey() ? (T) new AbstractMap.SimpleEntry(key, value) : o; } else { return o; } } private <R> R fromReference(Object res) { try { return redisson != null ? RedissonObjectFactory.<R>fromReference(redisson, (RedissonReference) res) : RedissonObjectFactory.<R>fromReference(redissonReactive, (RedissonReference) res); } catch (Exception exception) { return (R) res; } } protected <R, V> void sendCommand(final AsyncDetails<V, R> details, final RedisConnection connection) { if (details.getSource().getRedirect() == Redirect.ASK) { List<CommandData<?, ?>> list = new ArrayList<CommandData<?, ?>>(2); RPromise<Void> promise = new RedissonPromise<Void>(); list.add(new CommandData<Void, Void>(promise, details.getCodec(), RedisCommands.ASKING, new Object[]{})); list.add(new CommandData<V, R>(details.getAttemptPromise(), details.getCodec(), details.getCommand(), details.getParams())); RPromise<Void> main = new RedissonPromise<Void>(); ChannelFuture future = connection.send(new CommandsData(main, list, false)); details.setWriteFuture(future); } else { if (log.isDebugEnabled()) { log.debug("acquired connection for command {} and params {} from slot {} using node {}... {}", details.getCommand(), Arrays.toString(details.getParams()), details.getSource(), connection.getRedisClient().getAddr(), connection); } ChannelFuture future = connection.send(new CommandData<V, R>(details.getAttemptPromise(), details.getCodec(), details.getCommand(), details.getParams())); details.setWriteFuture(future); } } }
/* * Copyright 1997-2015 Optimatika (www.optimatika.se) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.ojalgo.optimisation; import static org.ojalgo.constant.BigMath.*; import java.math.BigDecimal; import org.ojalgo.netio.BasicLogger; import org.ojalgo.type.TypeUtils; import org.ojalgo.type.context.NumberContext; /** * Variable * * @author apete */ public final class Variable extends ModelEntity<Variable> { public static Variable make(final String name) { return new Variable(name); } public static Variable makeBinary(final String name) { return Variable.make(name).binary(); } private Expression.Index myIndex = null; private boolean myInteger = false; private BigDecimal myValue = null; public Variable(final String name) { super(name); } protected Variable(final Variable entityToCopy) { super(entityToCopy); myIndex = null; myInteger = entityToCopy.isInteger(); myValue = entityToCopy.getValue(); } public Variable binary() { return this.lower(ZERO).upper(ONE).integer(true); } public Variable copy() { return new Variable(this); } public BigDecimal getLowerSlack() { BigDecimal retVal = null; if (this.getLowerLimit() != null) { if (myValue != null) { retVal = this.getLowerLimit().subtract(myValue); } else { retVal = this.getLowerLimit(); } } if ((retVal != null) && this.isInteger()) { retVal = retVal.setScale(0, BigDecimal.ROUND_CEILING); } return retVal; } public BigDecimal getUpperSlack() { BigDecimal retVal = null; if (this.getUpperLimit() != null) { if (myValue != null) { retVal = this.getUpperLimit().subtract(myValue); } else { retVal = this.getUpperLimit(); } } if ((retVal != null) && this.isInteger()) { retVal = retVal.setScale(0, BigDecimal.ROUND_FLOOR); } return retVal; } public BigDecimal getValue() { return myValue; } public Variable integer(final boolean integer) { this.setInteger(integer); return this; } public boolean isBinary() { boolean retVal = this.isInteger(); retVal &= this.isLowerConstraint() && (this.getLowerLimit().compareTo(ZERO) == 0); retVal &= this.isUpperConstraint() && (this.getUpperLimit().compareTo(ONE) == 0); return retVal; } public boolean isInteger() { return myInteger; } public boolean isValueSet() { return myValue != null; } public BigDecimal quantifyContribution() { BigDecimal retVal = ZERO; final BigDecimal tmpContributionWeight = this.getContributionWeight(); if ((tmpContributionWeight != null) && (myValue != null)) { retVal = tmpContributionWeight.multiply(myValue); } return retVal; } public Variable relax() { return this.integer(false); } public void setInteger(final boolean integer) { myInteger = integer; } public void setValue(final Number value) { myValue = TypeUtils.toBigDecimal(value); } @Override protected void appendMiddlePart(final StringBuilder aStringBuilder) { aStringBuilder.append(this.getName()); if (myValue != null) { aStringBuilder.append(": "); aStringBuilder.append(OptimisationUtils.DISPLAY.enforce(myValue).toPlainString()); } if (this.isObjective()) { aStringBuilder.append(" ("); aStringBuilder.append(OptimisationUtils.DISPLAY.enforce(this.getContributionWeight()).toPlainString()); aStringBuilder.append(")"); } } @Override protected void destroy() { super.destroy(); myIndex = null; myValue = null; } @Override protected boolean validate(final BigDecimal value, final NumberContext context, final BasicLogger.Appender appender) { boolean retVal = super.validate(value, context, appender); if (retVal && myInteger) { try { context.enforce(value).longValueExact(); } catch (final ArithmeticException ex) { appender.println(value + " ! Integer: " + this.getName()); retVal = false; } } return retVal; } protected boolean validate(final NumberContext context, final BasicLogger.Appender appender) { if (myValue != null) { return this.validate(myValue, context, appender); } else { return false; } } Expression.Index getIndex() { return myIndex; } void setIndex(final Expression.Index index) { if (index == null) { throw new IllegalArgumentException("The index cannot be null!"); } else if ((myIndex != null) && (myIndex.index != index.index)) { throw new IllegalStateException("Cannot change a variable's index, or add it to more than one model!"); } myIndex = index; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/entity_type.proto package com.google.cloud.dialogflow.v2; /** * <pre> * The request message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.ListEntityTypesRequest} */ public final class ListEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListEntityTypesRequest) ListEntityTypesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListEntityTypesRequest.newBuilder() to construct. private ListEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEntityTypesRequest() { parent_ = ""; languageCode_ = ""; pageSize_ = 0; pageToken_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListEntityTypesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); languageCode_ = s; break; } case 24: { pageSize_ = input.readInt32(); break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.ListEntityTypesRequest.class, com.google.cloud.dialogflow.v2.ListEntityTypesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LANGUAGE_CODE_FIELD_NUMBER = 2; private volatile java.lang.Object languageCode_; /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_; /** * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 3;</code> */ public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; private volatile java.lang.Object pageToken_; /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getParentBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!getLanguageCodeBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getParentBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!getLanguageCodeBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, pageSize_); } if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.ListEntityTypesRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.ListEntityTypesRequest other = (com.google.cloud.dialogflow.v2.ListEntityTypesRequest) obj; boolean result = true; result = result && getParent() .equals(other.getParent()); result = result && getLanguageCode() .equals(other.getLanguageCode()); result = result && (getPageSize() == other.getPageSize()); result = result && getPageToken() .equals(other.getPageToken()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2.ListEntityTypesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The request message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.ListEntityTypesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListEntityTypesRequest) com.google.cloud.dialogflow.v2.ListEntityTypesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.ListEntityTypesRequest.class, com.google.cloud.dialogflow.v2.ListEntityTypesRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.ListEntityTypesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); parent_ = ""; languageCode_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.EntityTypeProto.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor; } public com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.ListEntityTypesRequest.getDefaultInstance(); } public com.google.cloud.dialogflow.v2.ListEntityTypesRequest build() { com.google.cloud.dialogflow.v2.ListEntityTypesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.cloud.dialogflow.v2.ListEntityTypesRequest buildPartial() { com.google.cloud.dialogflow.v2.ListEntityTypesRequest result = new com.google.cloud.dialogflow.v2.ListEntityTypesRequest(this); result.parent_ = parent_; result.languageCode_ = languageCode_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.ListEntityTypesRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.ListEntityTypesRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListEntityTypesRequest other) { if (other == com.google.cloud.dialogflow.v2.ListEntityTypesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.v2.ListEntityTypesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.v2.ListEntityTypesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder setParent( java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * <pre> * Required. The agent to list all entity types from. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder setParentBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private java.lang.Object languageCode_ = ""; /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder setLanguageCode( java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; onChanged(); return this; } /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); onChanged(); return this; } /** * <pre> * Optional. The language to list entity synonyms for. If not specified, * the agent's default language is used. * [More than a dozen * languages](https://dialogflow.com/docs/reference/language) are supported. * Note: languages must be enabled in the agent, before they can be used. * </pre> * * <code>string language_code = 2;</code> */ public Builder setLanguageCodeBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; onChanged(); return this; } private int pageSize_ ; /** * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 3;</code> */ public int getPageSize() { return pageSize_; } /** * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 3;</code> */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * <pre> * Optional. The maximum number of items to return in a single page. By * default 100 and at most 1000. * </pre> * * <code>int32 page_size = 3;</code> */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public Builder setPageToken( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * <pre> * Optional. The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> */ public Builder setPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListEntityTypesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListEntityTypesRequest) private static final com.google.cloud.dialogflow.v2.ListEntityTypesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListEntityTypesRequest(); } public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEntityTypesRequest> PARSER = new com.google.protobuf.AbstractParser<ListEntityTypesRequest>() { public ListEntityTypesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListEntityTypesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListEntityTypesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEntityTypesRequest> getParserForType() { return PARSER; } public com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package uk.gov.gds.performance.collector; import org.joda.time.DateTimeUtils; import org.joda.time.Instant; import org.joda.time.LocalDate; import org.junit.*; import org.kohsuke.args4j.CmdLineException; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.util.UUID; import static org.junit.Assert.*; public class CommandLineArgumentsTest { public static final Instant RIGHT_NOW_ACCORDING_TO_THIS_TEST = new Instant("1993-02-02T06:00:00Z"); //region hard code joda-time system clock for this test @BeforeClass public static void setJodaTimeToFixedTime() { DateTimeUtils.setCurrentMillisFixed(RIGHT_NOW_ACCORDING_TO_THIS_TEST.getMillis()); } @AfterClass public static void resetJodaTime() { DateTimeUtils.setCurrentMillisSystem(); } //endregion //region capture system out private PrintStream originalSystemOut; ByteArrayOutputStream capturedSystemOut = new ByteArrayOutputStream(); @Before public void setFakeSystemOut() throws Exception { originalSystemOut = System.out; System.setOut(new PrintStream(capturedSystemOut, true)); } @After public void restoreOriginalSystemOut() throws Exception { System.setOut(originalSystemOut); } //endregion //region capture system error private PrintStream originalSystemErr; ByteArrayOutputStream capturedSystemErr = new ByteArrayOutputStream(); @Before public void setFakeSystemErr() throws Exception { originalSystemErr = System.err; System.setErr(new PrintStream(capturedSystemErr, true)); } @After public void restoreOriginalSystemErr() throws Exception { System.setErr(originalSystemErr); } //endregion @Test public void getConfigurationFile_shouldReturnASensibleDefault_whenNoArgumentsSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse(); assertEquals(new File("configuration.properties"), arguments.getConfigurationFile()); } @Test public void getConfigurationFile_shouldReturnACustomFile_whenTheFileArgumentIsSpecified() throws Exception { File tempFile = buildRandomTempFileThatDoesNotExist(); CommandLineArguments arguments = CommandLineArguments.parse("--config", tempFile.getAbsolutePath()); assertEquals(tempFile, arguments.getConfigurationFile()); } @Test public void getDateRange_shouldReturnFromThreeDaysAgoToToday_whenNoArgumentsAreSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse(); LocalDateRange range = arguments.getDateRange(); assertEquals(new LocalDate(RIGHT_NOW_ACCORDING_TO_THIS_TEST).minusDays(3), range.getStartDate()); assertEquals(new LocalDate(RIGHT_NOW_ACCORDING_TO_THIS_TEST), range.getEndDate()); } @Test public void getDateRange_shouldReturnACustomStartDate_whenTheFromArgumentIsSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse("--from", "1993-01-01"); LocalDateRange range = arguments.getDateRange(); assertEquals(new LocalDate("1993-01-01"), range.getStartDate()); } @Test(expected = CmdLineException.class) public void parsingCommandLineArguments_shouldBlowUp_givenAnInvalidFromDate() throws Exception { String invalidDate = "1993-01-01T00:00:00.000Z"; CommandLineArguments.parse("--from", invalidDate); } @Test public void getDateRange_shouldReturnACustomEndDate_whenTheToArgumentIsSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse("--to", "1993-12-31"); LocalDateRange range = arguments.getDateRange(); assertEquals(new LocalDate("1993-12-31"), range.getEndDate()); } @Test(expected = CmdLineException.class) public void parsingCommandLineArguments_shouldBlowUp_givenAnInvalidToDate() throws Exception { String invalidDate = "not even remotely a valid date"; CommandLineArguments.parse("--to", invalidDate); } @Test public void getDateRange_shouldReturnACustomRange_whenTheFromAndToArgumentsAreBothSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse("--from", "2014-01-01", "--to", "2014-12-31"); LocalDateRange range = arguments.getDateRange(); assertEquals(new LocalDate("2014-01-01"), range.getStartDate()); assertEquals(new LocalDate("2014-12-31"), range.getEndDate()); } @Test public void isDryRun_shouldReturnFalse_whenTheDryRunFlagIsNotSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse(); assertFalse(arguments.isDryRun()); } @Test public void isDryRun_shouldReturnTrue_whenTheDryRunFlagIsSpecified() throws Exception { CommandLineArguments arguments = CommandLineArguments.parse("--dry-run"); assertTrue(arguments.isDryRun()); } @Test public void parse_shouldPrintUsageToSystemErrAndRethrowTheException_givenABadCommandLineOption() throws Exception { try { CommandLineArguments.parse("--foo"); fail("expected an exception"); } catch (CmdLineException e) { ByteArrayOutputStream expectedMessage = new ByteArrayOutputStream(); try (PrintStream out = new PrintStream(expectedMessage, true)) { out.println(e.getMessage()); e.getParser().printUsage(out); } assertEquals(new String(expectedMessage.toByteArray()), new String(capturedSystemErr.toByteArray())); assertEquals(0, capturedSystemOut.toByteArray().length); } } @Test public void parse_shouldPrintUsageToSystemErrAndThrowACmdLineException_givenAHelpFlag() throws Exception { try { CommandLineArguments.parse("--help"); fail("expected an exception"); } catch (CmdLineException e) { ByteArrayOutputStream expectedMessage = new ByteArrayOutputStream(); try (PrintStream out = new PrintStream(expectedMessage, true)) { e.getParser().printUsage(out); } assertEquals(new String(expectedMessage.toByteArray()), new String(capturedSystemErr.toByteArray())); assertEquals(0, capturedSystemOut.toByteArray().length); } } @Test public void parse_shouldPrintUsageToSystemErrAndThrowACmdLineException_givenAnHFlag() throws Exception { try { CommandLineArguments.parse("-h"); fail("expected an exception"); } catch (CmdLineException e) { ByteArrayOutputStream expectedMessage = new ByteArrayOutputStream(); try (PrintStream out = new PrintStream(expectedMessage, true)) { e.getParser().printUsage(out); } assertEquals(new String(expectedMessage.toByteArray()), new String(capturedSystemErr.toByteArray())); assertEquals(0, capturedSystemOut.toByteArray().length); } } @Test public void parse_shouldPrintUsageToSystemErrAndThrowACmdLineException_givenAQuestionMarkFlag() throws Exception { try { CommandLineArguments.parse("-h"); fail("expected an exception"); } catch (CmdLineException e) { ByteArrayOutputStream expectedMessage = new ByteArrayOutputStream(); try (PrintStream out = new PrintStream(expectedMessage, true)) { e.getParser().printUsage(out); } assertEquals(new String(expectedMessage.toByteArray()), new String(capturedSystemErr.toByteArray())); assertEquals(0, capturedSystemOut.toByteArray().length); } } private File buildRandomTempFileThatDoesNotExist() { return new File(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString() + ".tmp"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kafka.table; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.connector.base.DeliveryGuarantee; import org.apache.flink.connector.kafka.sink.KafkaSink; import org.apache.flink.connector.kafka.sink.KafkaSinkBuilder; import org.apache.flink.streaming.api.datastream.DataStreamSink; import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.Projection; import org.apache.flink.table.connector.format.EncodingFormat; import org.apache.flink.table.connector.sink.DataStreamSinkProvider; import org.apache.flink.table.connector.sink.DynamicTableSink; import org.apache.flink.table.connector.sink.SinkV2Provider; import org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata; import org.apache.flink.table.data.ArrayData; import org.apache.flink.table.data.MapData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.utils.DataTypeUtils; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.header.Header; import javax.annotation.Nullable; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.stream.Stream; import static org.apache.flink.util.Preconditions.checkNotNull; /** A version-agnostic Kafka {@link DynamicTableSink}. */ @Internal public class KafkaDynamicSink implements DynamicTableSink, SupportsWritingMetadata { // -------------------------------------------------------------------------------------------- // Mutable attributes // -------------------------------------------------------------------------------------------- /** Metadata that is appended at the end of a physical sink row. */ protected List<String> metadataKeys; // -------------------------------------------------------------------------------------------- // Format attributes // -------------------------------------------------------------------------------------------- /** Data type of consumed data type. */ protected DataType consumedDataType; /** Data type to configure the formats. */ protected final DataType physicalDataType; /** Optional format for encoding keys to Kafka. */ protected final @Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat; /** Format for encoding values to Kafka. */ protected final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat; /** Indices that determine the key fields and the source position in the consumed row. */ protected final int[] keyProjection; /** Indices that determine the value fields and the source position in the consumed row. */ protected final int[] valueProjection; /** Prefix that needs to be removed from fields when constructing the physical data type. */ protected final @Nullable String keyPrefix; // -------------------------------------------------------------------------------------------- // Kafka-specific attributes // -------------------------------------------------------------------------------------------- /** The defined delivery guarantee. */ private final DeliveryGuarantee deliveryGuarantee; /** * If the {@link #deliveryGuarantee} is {@link DeliveryGuarantee#EXACTLY_ONCE} the value is the * prefix for all ids of opened Kafka transactions. */ @Nullable private final String transactionalIdPrefix; /** The Kafka topic to write to. */ protected final String topic; /** Properties for the Kafka producer. */ protected final Properties properties; /** Partitioner to select Kafka partition for each item. */ protected final @Nullable FlinkKafkaPartitioner<RowData> partitioner; /** * Flag to determine sink mode. In upsert mode sink transforms the delete/update-before message * to tombstone message. */ protected final boolean upsertMode; /** Sink buffer flush config which only supported in upsert mode now. */ protected final SinkBufferFlushMode flushMode; /** Parallelism of the physical Kafka producer. * */ protected final @Nullable Integer parallelism; public KafkaDynamicSink( DataType consumedDataType, DataType physicalDataType, @Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat, EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat, int[] keyProjection, int[] valueProjection, @Nullable String keyPrefix, String topic, Properties properties, @Nullable FlinkKafkaPartitioner<RowData> partitioner, DeliveryGuarantee deliveryGuarantee, boolean upsertMode, SinkBufferFlushMode flushMode, @Nullable Integer parallelism, @Nullable String transactionalIdPrefix) { // Format attributes this.consumedDataType = checkNotNull(consumedDataType, "Consumed data type must not be null."); this.physicalDataType = checkNotNull(physicalDataType, "Physical data type must not be null."); this.keyEncodingFormat = keyEncodingFormat; this.valueEncodingFormat = checkNotNull(valueEncodingFormat, "Value encoding format must not be null."); this.keyProjection = checkNotNull(keyProjection, "Key projection must not be null."); this.valueProjection = checkNotNull(valueProjection, "Value projection must not be null."); this.keyPrefix = keyPrefix; this.transactionalIdPrefix = transactionalIdPrefix; // Mutable attributes this.metadataKeys = Collections.emptyList(); // Kafka-specific attributes this.topic = checkNotNull(topic, "Topic must not be null."); this.properties = checkNotNull(properties, "Properties must not be null."); this.partitioner = partitioner; this.deliveryGuarantee = checkNotNull(deliveryGuarantee, "DeliveryGuarantee must not be null."); this.upsertMode = upsertMode; this.flushMode = checkNotNull(flushMode); if (flushMode.isEnabled() && !upsertMode) { throw new IllegalArgumentException( "Sink buffer flush is only supported in upsert-kafka."); } this.parallelism = parallelism; } @Override public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { return valueEncodingFormat.getChangelogMode(); } @Override public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { final SerializationSchema<RowData> keySerialization = createSerialization(context, keyEncodingFormat, keyProjection, keyPrefix); final SerializationSchema<RowData> valueSerialization = createSerialization(context, valueEncodingFormat, valueProjection, null); final KafkaSinkBuilder<RowData> sinkBuilder = KafkaSink.builder(); final List<LogicalType> physicalChildren = physicalDataType.getLogicalType().getChildren(); if (transactionalIdPrefix != null) { sinkBuilder.setTransactionalIdPrefix(transactionalIdPrefix); } final KafkaSink<RowData> kafkaSink = sinkBuilder .setDeliverGuarantee(deliveryGuarantee) .setBootstrapServers( properties.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG).toString()) .setKafkaProducerConfig(properties) .setRecordSerializer( new DynamicKafkaRecordSerializationSchema( topic, partitioner, keySerialization, valueSerialization, getFieldGetters(physicalChildren, keyProjection), getFieldGetters(physicalChildren, valueProjection), hasMetadata(), getMetadataPositions(physicalChildren), upsertMode)) .build(); if (flushMode.isEnabled() && upsertMode) { return (DataStreamSinkProvider) dataStream -> { final boolean objectReuse = dataStream .getExecutionEnvironment() .getConfig() .isObjectReuseEnabled(); final ReducingUpsertSink<?> sink = new ReducingUpsertSink<>( kafkaSink, physicalDataType, keyProjection, flushMode, objectReuse ? createRowDataTypeSerializer( context, dataStream.getExecutionConfig()) ::copy : rowData -> rowData); final DataStreamSink<RowData> end = dataStream.sinkTo(sink); if (parallelism != null) { end.setParallelism(parallelism); } return end; }; } return SinkV2Provider.of(kafkaSink, parallelism); } @Override public Map<String, DataType> listWritableMetadata() { final Map<String, DataType> metadataMap = new LinkedHashMap<>(); Stream.of(WritableMetadata.values()) .forEachOrdered(m -> metadataMap.put(m.key, m.dataType)); return metadataMap; } @Override public void applyWritableMetadata(List<String> metadataKeys, DataType consumedDataType) { this.metadataKeys = metadataKeys; this.consumedDataType = consumedDataType; } @Override public DynamicTableSink copy() { final KafkaDynamicSink copy = new KafkaDynamicSink( consumedDataType, physicalDataType, keyEncodingFormat, valueEncodingFormat, keyProjection, valueProjection, keyPrefix, topic, properties, partitioner, deliveryGuarantee, upsertMode, flushMode, parallelism, transactionalIdPrefix); copy.metadataKeys = metadataKeys; return copy; } @Override public String asSummaryString() { return "Kafka table sink"; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final KafkaDynamicSink that = (KafkaDynamicSink) o; return Objects.equals(metadataKeys, that.metadataKeys) && Objects.equals(consumedDataType, that.consumedDataType) && Objects.equals(physicalDataType, that.physicalDataType) && Objects.equals(keyEncodingFormat, that.keyEncodingFormat) && Objects.equals(valueEncodingFormat, that.valueEncodingFormat) && Arrays.equals(keyProjection, that.keyProjection) && Arrays.equals(valueProjection, that.valueProjection) && Objects.equals(keyPrefix, that.keyPrefix) && Objects.equals(topic, that.topic) && Objects.equals(properties, that.properties) && Objects.equals(partitioner, that.partitioner) && Objects.equals(deliveryGuarantee, that.deliveryGuarantee) && Objects.equals(upsertMode, that.upsertMode) && Objects.equals(flushMode, that.flushMode) && Objects.equals(transactionalIdPrefix, that.transactionalIdPrefix) && Objects.equals(parallelism, that.parallelism); } @Override public int hashCode() { return Objects.hash( metadataKeys, consumedDataType, physicalDataType, keyEncodingFormat, valueEncodingFormat, keyProjection, valueProjection, keyPrefix, topic, properties, partitioner, deliveryGuarantee, upsertMode, flushMode, transactionalIdPrefix, parallelism); } // -------------------------------------------------------------------------------------------- private TypeSerializer<RowData> createRowDataTypeSerializer( Context context, ExecutionConfig executionConfig) { final TypeInformation<RowData> typeInformation = context.createTypeInformation(consumedDataType); return typeInformation.createSerializer(executionConfig); } private int[] getMetadataPositions(List<LogicalType> physicalChildren) { return Stream.of(WritableMetadata.values()) .mapToInt( m -> { final int pos = metadataKeys.indexOf(m.key); if (pos < 0) { return -1; } return physicalChildren.size() + pos; }) .toArray(); } private boolean hasMetadata() { return metadataKeys.size() > 0; } private RowData.FieldGetter[] getFieldGetters( List<LogicalType> physicalChildren, int[] keyProjection) { return Arrays.stream(keyProjection) .mapToObj( targetField -> RowData.createFieldGetter( physicalChildren.get(targetField), targetField)) .toArray(RowData.FieldGetter[]::new); } private @Nullable SerializationSchema<RowData> createSerialization( DynamicTableSink.Context context, @Nullable EncodingFormat<SerializationSchema<RowData>> format, int[] projection, @Nullable String prefix) { if (format == null) { return null; } DataType physicalFormatDataType = Projection.of(projection).project(this.physicalDataType); if (prefix != null) { physicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix); } return format.createRuntimeEncoder(context, physicalFormatDataType); } // -------------------------------------------------------------------------------------------- // Metadata handling // -------------------------------------------------------------------------------------------- enum WritableMetadata { HEADERS( "headers", // key and value of the map are nullable to make handling easier in queries DataTypes.MAP(DataTypes.STRING().nullable(), DataTypes.BYTES().nullable()) .nullable(), new MetadataConverter() { private static final long serialVersionUID = 1L; @Override public Object read(RowData row, int pos) { if (row.isNullAt(pos)) { return null; } final MapData map = row.getMap(pos); final ArrayData keyArray = map.keyArray(); final ArrayData valueArray = map.valueArray(); final List<Header> headers = new ArrayList<>(); for (int i = 0; i < keyArray.size(); i++) { if (!keyArray.isNullAt(i) && !valueArray.isNullAt(i)) { final String key = keyArray.getString(i).toString(); final byte[] value = valueArray.getBinary(i); headers.add(new KafkaHeader(key, value)); } } return headers; } }), TIMESTAMP( "timestamp", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable(), new MetadataConverter() { private static final long serialVersionUID = 1L; @Override public Object read(RowData row, int pos) { if (row.isNullAt(pos)) { return null; } return row.getTimestamp(pos, 3).getMillisecond(); } }); final String key; final DataType dataType; final MetadataConverter converter; WritableMetadata(String key, DataType dataType, MetadataConverter converter) { this.key = key; this.dataType = dataType; this.converter = converter; } } interface MetadataConverter extends Serializable { Object read(RowData consumedRow, int pos); } // -------------------------------------------------------------------------------------------- private static class KafkaHeader implements Header { private final String key; private final byte[] value; KafkaHeader(String key, byte[] value) { this.key = key; this.value = value; } @Override public String key() { return key; } @Override public byte[] value() { return value; } } }
package org.togglz.appengine.repository; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.appengine.api.datastore.*; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.togglz.core.Feature; import org.togglz.core.repository.FeatureState; import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig; import com.google.appengine.tools.development.testing.LocalServiceTestHelper; public class DatastoreStateRepositoryTest { public static final int MAX_ENTITY_GROUPS = 25; private final LocalServiceTestHelper helper = new LocalServiceTestHelper( new LocalDatastoreServiceTestConfig().setApplyAllHighRepJobPolicy()); private DatastoreStateRepository repository; private DatastoreService datastoreService; @Before public void setup() { helper.setUp(); datastoreService = DatastoreServiceFactory.getDatastoreService(); repository = new DatastoreStateRepository(datastoreService); } @After public void tearDown() { helper.tearDown(); } @Test public void customKindName() throws EntityNotFoundException { final String kind = "CustomKind"; repository = new DatastoreStateRepository(kind, datastoreService); assertEquals(kind, repository.kind()); } @Test public void shouldNotAddNewEntityGroupToCurrentCrossGroupTransaction() { update("F", false, null, null, null); final Transaction txn = datastoreService.beginTransaction( TransactionOptions.Builder.withXG(true)); for (int i = 0; i < MAX_ENTITY_GROUPS - 1; i++) { update("F" + i, false, null, null, txn); } update("F", false, null, null, txn); repository.getFeatureState(TestFeature.F1); txn.commit(); } @Test public void shouldNotStartNewTransaction() { update("F1", false, null, null, null); DatastoreService spyDatastoreService = Mockito.spy(DelegateDatastoreService.getInstance(datastoreService)); repository = new DatastoreStateRepository(spyDatastoreService); repository.getFeatureState(TestFeature.F1); Mockito.verify(spyDatastoreService, Mockito.never()).beginTransaction(); } @Test public void shouldWorkInsideRunningTransaction() { update("F1", false, null, null, null); final Transaction txn = datastoreService.beginTransaction(); update("F3", false, null, null, txn); repository.getFeatureState(TestFeature.F1); txn.commit(); } @Test public void testShouldSaveStateWithoutStrategyOrParameters() throws EntityNotFoundException { /* * WHEN a feature without strategy is persisted */ final FeatureState state = new FeatureState(TestFeature.F1).disable(); repository.setFeatureState(state); /* * THEN there should be a corresponding entry in the database */ final Key key = KeyFactory.createKey(repository.kind(), TestFeature.F1.name()); final Entity featureEntity = datastoreService.get(key); assertEquals(false, featureEntity.getProperty(DatastoreStateRepository.ENABLED)); assertNull(featureEntity.getProperty(DatastoreStateRepository.STRATEGY_ID)); assertNull(featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_NAMES)); assertNull(featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_VALUES)); } @SuppressWarnings("unchecked") @Test public void testShouldSaveStateStrategyAndParameters() throws EntityNotFoundException { /* * WHEN a feature without strategy is persisted */ final FeatureState state = new FeatureState(TestFeature.F1) .enable() .setStrategyId("someId") .setParameter("param", "foo"); repository.setFeatureState(state); /* * THEN there should be a corresponding entry in the database */ final Key key = KeyFactory.createKey(repository.kind(), TestFeature.F1.name()); final Entity featureEntity = datastoreService.get(key); assertEquals(true, featureEntity.getProperty(DatastoreStateRepository.ENABLED)); assertEquals("someId", featureEntity.getProperty(DatastoreStateRepository.STRATEGY_ID)); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_NAMES), is(Arrays.asList("param"))); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_VALUES), is(Arrays.asList("foo"))); } @Test public void shouldReturnNullWhenStateDoesntExist() { /* * GIVEN there is no feature state in the datastore WHEN the repository reads the state */ final FeatureState state = repository.getFeatureState(TestFeature.F1); /* * THEN the properties should be set like expected */ assertNull(state); } @Test public void testShouldReadStateWithoutStrategyAndParameters() { /* * GIVEN a database row containing a simple feature state */ update("F1", false, null, null, null); /* * WHEN the repository reads the state */ final FeatureState state = repository.getFeatureState(TestFeature.F1); /* * THEN the properties should be set like expected */ assertNotNull(state); assertEquals(TestFeature.F1, state.getFeature()); assertEquals(false, state.isEnabled()); assertEquals(null, state.getStrategyId()); assertEquals(0, state.getParameterNames().size()); } @SuppressWarnings("serial") @Test public void testShouldReadStateWithStrategyAndParameters() { /* * GIVEN a database row containing a simple feature state */ final Map<String, String> map = new HashMap<String, String>() { { put("param23", "foobar"); } }; update("F1", true, "myStrategy", map, null); /* * WHEN the repository reads the state */ final FeatureState state = repository.getFeatureState(TestFeature.F1); /* * THEN the properties should be set like expected */ assertNotNull(state); assertEquals(TestFeature.F1, state.getFeature()); assertEquals(true, state.isEnabled()); assertEquals("myStrategy", state.getStrategyId()); assertEquals(1, state.getParameterNames().size()); assertEquals("foobar", state.getParameter("param23")); } @SuppressWarnings({ "unchecked", "serial" }) @Test public void testShouldUpdateExistingDatabaseEntry() throws EntityNotFoundException { /* * GIVEN a database row containing a simple feature state */ final Map<String, String> map = new HashMap<String, String>() { { put("param23", "foobar"); } }; update("F1", true, "myStrategy", map, null); /* * AND the database entries are like expected */ /* * THEN there should be a corresponding entry in the database */ final Key key = KeyFactory.createKey(repository.kind(), TestFeature.F1.name()); Entity featureEntity = datastoreService.get(key); assertEquals(true, featureEntity.getProperty(DatastoreStateRepository.ENABLED)); assertEquals("myStrategy", featureEntity.getProperty(DatastoreStateRepository.STRATEGY_ID)); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_NAMES), is(Arrays.asList("param23"))); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_VALUES), is(Arrays.asList("foobar"))); /* * WHEN the repository writes new state */ final FeatureState state = new FeatureState(TestFeature.F1) .disable() .setStrategyId("someId") .setParameter("param", "foo"); repository.setFeatureState(state); /* * THEN the properties should be set like expected */ featureEntity = datastoreService.get(key); assertEquals(false, featureEntity.getProperty(DatastoreStateRepository.ENABLED)); assertEquals("someId", featureEntity.getProperty(DatastoreStateRepository.STRATEGY_ID)); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_NAMES), is(Arrays.asList("param"))); assertThat((List<String>) featureEntity.getProperty(DatastoreStateRepository.STRATEGY_PARAMS_VALUES), is(Arrays.asList("foo"))); } private void update(final String name, final boolean enabled, final String strategyId, final Map<String, String> params, final Transaction txn) { final Entity featureEntity = new Entity(repository.kind(), name); featureEntity.setUnindexedProperty(DatastoreStateRepository.ENABLED, enabled); featureEntity.setUnindexedProperty(DatastoreStateRepository.STRATEGY_ID, strategyId); if (params != null) { final List<String> strategyParamsNames = new ArrayList<String>(); final List<String> strategyParamsValues = new ArrayList<String>(); for (final String paramName : params.keySet()) { strategyParamsNames.add(paramName); strategyParamsValues.add(params.get(paramName)); } featureEntity.setUnindexedProperty(DatastoreStateRepository.STRATEGY_PARAMS_NAMES, strategyParamsNames); featureEntity.setUnindexedProperty(DatastoreStateRepository.STRATEGY_PARAMS_VALUES, strategyParamsValues); } if (txn == null) { datastoreService.put(featureEntity); } else { datastoreService.put(txn, featureEntity); } } private static enum TestFeature implements Feature { F1 } }
package io.takari.bpm; import io.takari.bpm.api.ExecutionContext; import io.takari.bpm.api.JavaDelegate; import io.takari.bpm.model.*; import org.junit.Test; import org.mockito.ArgumentCaptor; import java.util.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; public class ScriptTaskTest extends AbstractEngineTest { /** * start --> t1 --> t2 --> end */ @Test public void testJs() throws Exception { double a = System.currentTimeMillis(); double b = 1234; String script = "execution.setVariable('c', a + b)"; // --- JavaDelegate t2 = spy(new JavaDelegate() { @Override public void execute(ExecutionContext ctx) throws Exception { Double c = (Double) ctx.getVariable("c"); assertNotNull(c); assertEquals(c, (Double) (a + b)); } }); getServiceTaskRegistry().register("t2", t2); // -- String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.CONTENT, "javascript", script), new SequenceFlow("f2", "t1", "t2"), new ServiceTask("t2", ExpressionType.DELEGATE, "${t2}"), new SequenceFlow("f3", "t2", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); Map<String, Object> args = new HashMap<>(); args.put("a", a); args.put("b", b); getEngine().start(key, processId, args); // --- assertActivations(key, processId, "start", "f1", "t1", "f2", "t2", "f3", "end"); assertNoMoreActivations(); verify(t2, times(1)).execute(any(ExecutionContext.class)); } /** * start --> t1 --> t2 --> end */ @Test public void testJsInOut() throws Exception { double outerA = System.currentTimeMillis(); double innerB = 2345; String script = "execution.setVariable('innerC', execution.getVariable('innerA') + execution.getVariable('innerB'))"; // --- JavaDelegate t2 = spy(new JavaDelegate() { @Override public void execute(ExecutionContext ctx) throws Exception { Double c = (Double) ctx.getVariable("outerC"); assertNotNull(c); assertEquals(c, (Double) (outerA + innerB)); } }); getServiceTaskRegistry().register("t2", t2); // -- Set<VariableMapping> in = new HashSet<>(); in.add(VariableMapping.copy("outerA", "innerA")); in.add(VariableMapping.set(innerB, "innerB")); Set<VariableMapping> out = Collections.singleton(VariableMapping.copy("innerC", "outerC")); String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.CONTENT, "javascript", script, in, out), new SequenceFlow("f2", "t1", "t2"), new ServiceTask("t2", ExpressionType.DELEGATE, "${t2}"), new SequenceFlow("f3", "t2", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); Map<String, Object> args = new HashMap<>(); args.put("outerA", outerA); getEngine().start(key, processId, args); // --- assertActivations(key, processId, "start", "f1", "t1", "f2", "t2", "f3", "end"); assertNoMoreActivations(); verify(t2, times(1)).execute(any(ExecutionContext.class)); } /** * start --> t1 --> t2 --> end */ @Test public void testJsWithCopyAllVariables() throws Exception { String script = "execution.setVariable('inner', execution.getVariable('main') + 10)"; // -- Set<VariableMapping> in = new HashSet<>(); in.add(VariableMapping.set("whatever", "invar1")); Set<VariableMapping> out = Collections.singleton(VariableMapping.copy("inner", "outer")); String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.CONTENT, "javascript", script, in, out, true), new SequenceFlow("f2", "t1", "t2"), new ServiceTask("t2", ExpressionType.DELEGATE, "${t2}"), new SequenceFlow("f3", "t2", "end"), new EndEvent("end") ))); // --- JavaDelegate t2 = spy(new JavaDelegate() { @Override public void execute(ExecutionContext ctx) throws Exception { Double c = (Double) ctx.getVariable("outer"); assertNotNull(c); assertEquals((Double)Double.sum(5,10), c); } }); getServiceTaskRegistry().register("t2", t2); // --- String key = UUID.randomUUID().toString(); Map<String, Object> args = new HashMap<>(); args.put("main", 5.0); getEngine().start(key, processId, args); verify(t2, times(1)).execute(any(ExecutionContext.class)); } /** * start --> t1 --> end */ @Test public void testExternalJs() throws Exception { String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.REFERENCE, null, "test.js"), new SequenceFlow("f2", "t1", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); getEngine().start(key, processId, null); // --- assertActivations(key, processId, "start", "f1", "t1", "f2", "end"); assertNoMoreActivations(); } /** * start --> t1 --> end */ @Test public void testNameExpression() throws Exception { String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.REFERENCE, null, "${myScript}"), new SequenceFlow("f2", "t1", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); getEngine().start(key, processId, Collections.singletonMap("myScript", "test.js")); // --- assertActivations(key, processId, "start", "f1", "t1", "f2", "end"); assertNoMoreActivations(); } /** * start --> sub1 --------> end * \ / * bev1 -- */ @Test public void testWrappedException() throws Exception { getConfiguration().setWrapAllExceptionsAsBpmnErrors(true); // --- String script = "throw new io.takari.bpm.api.BpmnError('boom')"; String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "sub1"), new SubProcess("sub1", new StartEvent("substart"), new SequenceFlow("f2", "substart", "t1"), new ScriptTask("t1", ScriptTask.Type.CONTENT, "groovy", script), new SequenceFlow("f3", "t1", "subend"), new EndEvent("subend")), new BoundaryEvent("bev1", "sub1", null), new SequenceFlow("f4", "bev1", "end"), new SequenceFlow("f5", "sub1", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); getEngine().start(key, processId, null); // --- assertActivations(key, processId, "start", "f1", "sub1", "substart", "f2", "t1", "bev1", "f4", "end"); assertNoMoreActivations(); } /** * start --> t1 --> t2 --> end */ @Test public void testTasks() throws Exception { double a = System.currentTimeMillis(); String script = "tasks.get('t1').doSomething(a)"; // --- TestTask t1 = spy(new TestTask() { @Override public void doSomething(Object o) { assertEquals(a, o); } }); getServiceTaskRegistry().register("t1", t1); // -- String processId = "test"; deploy(new ProcessDefinition(processId, Arrays.asList( new StartEvent("start"), new SequenceFlow("f1", "start", "t1"), new ScriptTask("t1", ScriptTask.Type.CONTENT, "javascript", script), new SequenceFlow("f2", "t1", "end"), new EndEvent("end") ))); // --- String key = UUID.randomUUID().toString(); Map<String, Object> args = new HashMap<>(); args.put("a", a); getEngine().start(key, processId, args); // --- assertActivations(key, processId, "start", "f1", "t1", "f2", "end"); assertNoMoreActivations(); verify(t1, times(1)).doSomething(anyObject()); } public interface TestTask { void doSomething(Object o); } }
package name.euleule.processing; import name.euleule.processing.elements.One; import processing.core.PApplet; import processing.core.PVector; import java.util.ArrayList; import java.util.List; /** * - Create a number of One, random in size, limited to the parameters for their maximum and minimum diameter. * <p/> * - Distribute the elements evenly on the canvas. * <p/> * - Set a random color. * <p/> * - If two elements intersect each other, start drawing a line. * <p/> * - Increase lightness if the elements distance grows. Decrease lightness if the elements distance decreases. * <p/> * - When an element reaches the border of the canvas, remove the element. * <p/> * - Repeat until desired number of iterations is reached. */ public class ElementOneVariationTwo extends PApplet { // Number of elements used for rendering final int NUM_OBJECTS = 60; // Minimum size of elements final int D_MIN = 100; // Maximum size of elements final int D_MAX = 200; // Border width in px final int BORDER = 200; // Number of iterations final int MAX_ITERATIONS = 6; final int SIZE = 1600; List<One> objects; List<List<One>> groups; PVector color; int iterations = 0; @Override public void settings(){ size(SIZE, SIZE); } /** * Set up scene. */ @Override public void setup() { background(255, 250, 240); stroke(245); for (int i = 0; i < width; i++) { for (int j = 0; j < height; j++) { if (random(0, 3) > 2) { point(i, j); } } } color = PVector.random3D(); strokeWeight(1); reset(); } /** * Re-initialize the elements. */ private void reset() { objects = new ArrayList<>(); groups = new ArrayList<>(); for (int i = 0; i < NUM_OBJECTS; i++) { float d = random(D_MIN, D_MAX); float x = random(-width / 2 + BORDER, width / 2 - BORDER); float y = random(-height / 2 + BORDER, height / 2 - BORDER); One one = new One(x, y, d, color); objects.add(one); } iterations++; } /** * Draw scene. */ @Override public void draw() { update(); translate(width / 2, height / 2); for (List<One> group : groups) { One o1 = group.get(0); One o2 = group.get(1); PVector a = o1.getPos(); PVector b = o2.getPos(); float alpha = min((D_MIN + D_MAX) * 2 / a.dist(b), 9); PVector color = o1.getColor().copy(); color.mult(a.dist(b) * iterations / MAX_ITERATIONS); stroke(color.x, color.y, color.z, alpha); line(a.x, a.y, b.x, b.y); } } /** * Update the element in the scene. */ private void update() { for (One o : objects) { o.update(); } checkIntersections(); checkOutOfScreen(); if (iterations > MAX_ITERATIONS) { exit(); } if (objects.size() == 0) { reset(); } } /** * Check if any elements are out of screen. If they are, remove them from the scene. */ private void checkOutOfScreen() { // check for items out of screen List<One> remove = new ArrayList<>(); for (One o : objects) { if (isOutOfScreen(o)) { remove.add(o); } } // get groups for elements out of screen List<List<One>> removeGroup = new ArrayList<>(); for (One o : remove) { for (List<One> group : groups) { if (group.get(0).equals(o) || group.get(1).equals(o)) { removeGroup.add(group); } } objects.remove(o); } groups.removeAll(removeGroup); } /** * Check if the element is out of screen. * * @param one Element that needs to be checked. * @return boolean */ private boolean isOutOfScreen(One one) { return (one.getPos().dist(new PVector(0, 0)) + BORDER > width / 2); } /** * Check all elements for intersections. */ private void checkIntersections() { List<One> list = new ArrayList<>(); list.addAll(objects); for (One o : objects) { for (One l : list) { if (o.equals(l)) { continue; } if (o.intersect(l)) { addGroup(o, l); } } } } /** * Add a group of two intersecting elements. * * @param o1 One * @param o2 One */ private void addGroup(One o1, One o2) { // check if there a group for the elements exists for (List<One> group : groups) { if (group.get(0).equals(o1) && group.get(1).equals(o2) || group.get(0).equals(o2) && group.get(1).equals(o1)) { // a group exists, do nothing return; } } // create new group List<One> group = new ArrayList<>(); group.add(o1); group.add(o2); groups.add(group); } public static void main(String args[]) { PApplet.main(new String[]{"--present", "name.euleule.processing.ElementOneVariationTwo"}); } }
/* * Licensed to ObjectStyle LLC under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ObjectStyle LLC licenses * this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.bootique.config.jackson; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.bootique.resource.ResourceFactory; import io.bootique.type.TypeRef; import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Scanner; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; public class JsonConfigurationFactoryTest { private JsonConfigurationFactory factory(String yaml) { JsonNode rootConfig = YamlReader.read(yaml); return new JsonConfigurationFactory(rootConfig, new ObjectMapper()); } @Test public void testConfig() { Bean1 b1 = factory("s: SS\ni: 55").config(Bean1.class, ""); assertNotNull(b1); assertEquals("SS", b1.s); assertEquals(55, b1.i); } @Test public void testConfig_Nested() { Bean2 b2 = factory("b1:\n s: SS\n i: 55").config(Bean2.class, ""); assertNotNull(b2); assertNotNull(b2.getB1()); assertEquals("SS", b2.getB1().getS()); assertEquals(55, b2.getB1().getI()); } @Test public void testConfig_Subconfig() { Bean1 b1 = factory("b1:\n s: SS\n i: 55").config(Bean1.class, "b1"); assertNotNull(b1); assertEquals("SS", b1.getS()); assertEquals(55, b1.getI()); } @Test public void testConfig_Subconfig_MultiLevel() { Bean1 b1 = factory("b0:\n b1:\n s: SS\n i: 55").config(Bean1.class, "b0.b1"); assertNotNull(b1); assertEquals("SS", b1.getS()); assertEquals(55, b1.getI()); } @Test public void testConfig_Subconfig_Missing() { Bean1 b1 = factory("b1:\n s: SS\n i: 55").config(Bean1.class, "no.such.path"); assertNotNull(b1); assertEquals(null, b1.getS()); assertEquals(0, b1.getI()); } @Test public void testList_SingleLevel() { List<Object> l = factory("- SS\n- 55").config(new TypeRef<List<Object>>() { }, ""); assertNotNull(l); assertEquals("SS", l.get(0)); assertEquals(55, l.get(1)); } @Test public void testList_MultiLevel() { List<List<Object>> l = factory("-\n - SS\n - 55\n-\n - X") .config(new TypeRef<List<List<Object>>>() { }, ""); assertNotNull(l); assertEquals(2, l.size()); List<Object> sl1 = l.get(0); assertEquals(2, sl1.size()); assertEquals("SS", sl1.get(0)); assertEquals(55, sl1.get(1)); List<Object> sl2 = l.get(1); assertEquals(1, sl2.size()); assertEquals("X", sl2.get(0)); } @Test public void testMap_SingleLevel() { Map<String, Object> m = factory("b1: SS\ni: 55").config(new TypeRef<Map<String, Object>>() { }, ""); assertNotNull(m); assertEquals("SS", m.get("b1")); assertEquals(55, m.get("i")); } @Test public void testMap_MultiLevel() { Map<String, Map<String, Object>> m = factory("b1:\n k1: SS\n i: 55") .config(new TypeRef<Map<String, Map<String, Object>>>() { }, ""); assertNotNull(m); Map<String, Object> subM = m.get("b1"); assertNotNull(subM); assertEquals("SS", subM.get("k1")); assertEquals(55, subM.get("i")); } @Test public void testConfig_Polimorphic_Super() { BeanSuper b1 = factory("type: sup1").config(BeanSuper.class, ""); assertEquals(BeanSuper.class, b1.getClass()); } @Test public void testConfig_Polimorphic_Sub1() { BeanSuper b1 = factory("type: sub1\np1: p111").config(BeanSuper.class, ""); assertEquals(BeanSub1.class, b1.getClass()); assertEquals("p111", ((BeanSub1) b1).getP1()); } @Test public void testConfig_Polimorphic_Sub2() { BeanSuper b1 = factory("type: sub2\np2: p222").config(BeanSuper.class, ""); assertEquals(BeanSub2.class, b1.getClass()); assertEquals("p222", ((BeanSub2) b1).getP2()); } @Test public void testConfig_ResourceFactory() throws IOException { ResourceFactoryHolder rfh = factory("resourceFactory: classpath:io/bootique/config/resourcefactory.txt") .config(ResourceFactoryHolder.class, ""); assertNotNull(rfh); assertNotNull(rfh.resourceFactory); try (Scanner scanner = new Scanner(rfh.resourceFactory.getUrl().openStream(), "UTF-8")) { assertEquals("resource factory worked!", scanner.useDelimiter("\\Z").nextLine()); } } public static class Bean1 { private String s; private int i; private long l; public String getS() { return s; } public int getI() { return i; } public long getL() { return l; } } public static class Bean2 { private Bean1 b1; public Bean1 getB1() { return b1; } } @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonTypeName("sup1") @JsonSubTypes(value = {@JsonSubTypes.Type(value = BeanSub1.class), @JsonSubTypes.Type(value = BeanSub2.class)}) public static class BeanSuper { } @JsonTypeName("sub1") public static class BeanSub1 extends BeanSuper { private String p1; public String getP1() { return p1; } } @JsonTypeName("sub2") public static class BeanSub2 extends BeanSuper { private String p2; public String getP2() { return p2; } } public static class ResourceFactoryHolder { private ResourceFactory resourceFactory; public void setResourceFactory(ResourceFactory resourceFactory) { this.resourceFactory = resourceFactory; } } }
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.flat; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import android.util.SparseArray; import android.util.SparseIntArray; import android.view.View; import android.view.View.MeasureSpec; import android.view.ViewGroup; import com.facebook.react.uimanager.NativeViewHierarchyManager; import com.facebook.react.uimanager.SizeMonitoringFrameLayout; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.ViewManagerRegistry; /** * FlatNativeViewHierarchyManager is the only class that performs View manipulations. All of this * class methods can only be called from UI thread by {@link FlatUIViewOperationQueue}. */ /* package */ final class FlatNativeViewHierarchyManager extends NativeViewHierarchyManager implements ViewResolver { /* package */ FlatNativeViewHierarchyManager(ViewManagerRegistry viewManagers) { super(viewManagers, new FlatRootViewManager()); } @Override public View getView(int reactTag) { return super.resolveView(reactTag); } @Override public void addRootView( int tag, SizeMonitoringFrameLayout view, ThemedReactContext themedContext) { FlatViewGroup root = new FlatViewGroup(themedContext); view.addView(root); // When unmounting, ReactInstanceManager.detachViewFromInstance() will check id of the // top-level View (SizeMonitoringFrameLayout) and pass it back to JS. We want that View's id to // be set, otherwise NativeViewHierarchyManager will not be able to cleanup properly. view.setId(tag); addRootViewGroup(tag, root, themedContext); } /** * Updates DrawCommands and AttachDetachListeners of a FlatViewGroup specified by a reactTag. * * @param reactTag reactTag to lookup FlatViewGroup by * @param drawCommands if non-null, new draw commands to execute during the drawing. * @param listeners if non-null, new attach-detach listeners. */ /* package */ void updateMountState( int reactTag, @Nullable DrawCommand[] drawCommands, @Nullable AttachDetachListener[] listeners, @Nullable NodeRegion[] nodeRegions) { FlatViewGroup view = (FlatViewGroup) resolveView(reactTag); if (drawCommands != null) { view.mountDrawCommands(drawCommands); } if (listeners != null) { view.mountAttachDetachListeners(listeners); } if (nodeRegions != null) { view.mountNodeRegions(nodeRegions); } } /** * Updates DrawCommands and AttachDetachListeners of a clipping FlatViewGroup specified by a * reactTag. * * @param reactTag The react tag to lookup FlatViewGroup by. * @param drawCommands If non-null, new draw commands to execute during the drawing. * @param drawViewIndexMap Mapping of react tags to the index of the corresponding DrawView * command in the draw command array. * @param commandMaxBot At each index i, the maximum bottom value (or right value in the case of * horizontal clipping) value of all draw commands at or below i. * @param commandMinTop At each index i, the minimum top value (or left value in the case of * horizontal clipping) value of all draw commands at or below i. * @param listeners If non-null, new attach-detach listeners. * @param nodeRegions Node regions to mount. * @param regionMaxBot At each index i, the maximum bottom value (or right value in the case of * horizontal clipping) value of all node regions at or below i. * @param regionMinTop At each index i, the minimum top value (or left value in the case of * horizontal clipping) value of all draw commands at or below i. * @param willMountViews Whether we are going to also send a mountViews command in this state * cycle. */ /* package */ void updateClippingMountState( int reactTag, @Nullable DrawCommand[] drawCommands, SparseIntArray drawViewIndexMap, float[] commandMaxBot, float[] commandMinTop, @Nullable AttachDetachListener[] listeners, @Nullable NodeRegion[] nodeRegions, float[] regionMaxBot, float[] regionMinTop, boolean willMountViews) { FlatViewGroup view = (FlatViewGroup) resolveView(reactTag); if (drawCommands != null) { view.mountClippingDrawCommands( drawCommands, drawViewIndexMap, commandMaxBot, commandMinTop, willMountViews); } if (listeners != null) { view.mountAttachDetachListeners(listeners); } if (nodeRegions != null) { view.mountClippingNodeRegions(nodeRegions, regionMaxBot, regionMinTop); } } /* package */ void updateViewGroup(int reactTag, int[] viewsToAdd, int[] viewsToDetach) { View view = resolveView(reactTag); if (view instanceof FlatViewGroup) { ((FlatViewGroup) view).mountViews(this, viewsToAdd, viewsToDetach); return; } ViewGroup viewGroup = (ViewGroup) view; ViewGroupManager viewManager = (ViewGroupManager) resolveViewManager(reactTag); List<View> listOfViews = new ArrayList<>(viewsToAdd.length); // batch the set of additions - some view managers can take advantage of the batching to // decrease operations, etc. for (int viewIdToAdd : viewsToAdd) { int tag = Math.abs(viewIdToAdd); listOfViews.add(resolveView(tag)); } viewManager.addViews(viewGroup, listOfViews); } /** * Updates View bounds, possibly re-measuring and re-layouting it if the size changed. * * @param reactTag reactTag to lookup a View by * @param left left coordinate relative to parent * @param top top coordinate relative to parent * @param right right coordinate relative to parent * @param bottom bottom coordinate relative to parent */ /* package */ void updateViewBounds(int reactTag, int left, int top, int right, int bottom) { View view = resolveView(reactTag); int width = right - left; int height = bottom - top; if (view.getWidth() != width || view.getHeight() != height) { // size changed, we need to measure and layout the View view.measure( MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY)); view.layout(left, top, right, bottom); } else { // same size, only location changed, there is a faster route. view.offsetLeftAndRight(left - view.getLeft()); view.offsetTopAndBottom(top - view.getTop()); } } /* package */ void setPadding( int reactTag, int paddingLeft, int paddingTop, int paddingRight, int paddingBottom) { resolveView(reactTag).setPadding(paddingLeft, paddingTop, paddingRight, paddingBottom); } /* package */ void dropViews(SparseIntArray viewsToDrop) { for (int i = 0, count = viewsToDrop.size(); i < count; i++) { int viewToDrop = viewsToDrop.keyAt(i); View view = null; if (viewToDrop > 0) { try { view = resolveView(viewToDrop); dropView(view); } catch (Exception e) { // the view is already dropped, nothing we can do } } else { // Root views are noted with a negative tag from StateBuilder. removeRootView(-viewToDrop); } int parentTag = viewsToDrop.valueAt(i); // this only happens for clipped, non-root views - clipped because there is no parent, and // not a root view (because we explicitly pass -1 for root views). if (parentTag > 0 && view != null && view.getParent() == null) { // this can only happen if the parent exists (if the parent were removed first, it'd also // remove the child, so trying to explicitly remove the child afterwards would crash at // the resolveView call above) - we also explicitly check for a null parent, implying that // we are either clipped (or that we already removed the child from its parent, in which // case this will essentially be a no-op). View parent = resolveView(parentTag); if (parent instanceof FlatViewGroup) { ((FlatViewGroup) parent).onViewDropped(view); } } } } @Override protected void dropView(View view) { super.dropView(view); // As a result of removeClippedSubviews, some views have strong references but are not attached // to a parent. consequently, when the parent gets removed, these Views don't get cleaned up, // because they aren't children (they also aren't removed from mTagsToViews, thus causing a // leak). To solve this, we ask for said detached views and explicitly drop them. if (view instanceof FlatViewGroup) { FlatViewGroup flatViewGroup = (FlatViewGroup) view; if (flatViewGroup.getRemoveClippedSubviews()) { SparseArray<View> detachedViews = flatViewGroup.getDetachedViews(); for (int i = 0, size = detachedViews.size(); i < size; i++) { View detachedChild = detachedViews.valueAt(i); try { dropView(detachedChild); } catch (Exception e) { // if the view is already dropped, ignore any exceptions // in reality, we should find out the edge cases that cause // this to happen and properly fix them. } // trigger onDetachedFromWindow and clean up this detached/clipped view flatViewGroup.removeDetachedView(detachedChild); } } } } /* package */ void detachAllChildrenFromViews(int[] viewsToDetachAllChildrenFrom) { for (int viewTag : viewsToDetachAllChildrenFrom) { View view = resolveView(viewTag); if (view instanceof FlatViewGroup) { ((FlatViewGroup) view).detachAllViewsFromParent(); continue; } ViewGroup viewGroup = (ViewGroup) view; ViewGroupManager viewManager = (ViewGroupManager) resolveViewManager(viewTag); viewManager.removeAllViews(viewGroup); } } }
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.pcepio.types; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Objects; import org.jboss.netty.buffer.ChannelBuffer; import org.onosproject.pcepio.exceptions.PcepParseException; import org.onosproject.pcepio.protocol.PcepVersion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.MoreObjects; /** * Provides LinkAttributesTlv. */ public class LinkAttributesTlv implements PcepValueType { /* * Reference :draft-dhodylee-pce-pcep-ls-01, section 9.2.8.2. * 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type=[TBD27] | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | // Link Attributes Sub-TLVs (variable) // | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ protected static final Logger log = LoggerFactory.getLogger(LinkAttributesTlv.class); public static final short TYPE = (short) 65286; short hLength; public static final int TLV_HEADER_LENGTH = 4; // LinkDescriptors Sub-TLVs (variable) private List<PcepValueType> llLinkAttributesSubTLVs; /** * Constructor to initialize Link Attributes Sub TLVs. * * @param llLinkAttributesSubTLVs linked list of PcepValueType */ public LinkAttributesTlv(List<PcepValueType> llLinkAttributesSubTLVs) { this.llLinkAttributesSubTLVs = llLinkAttributesSubTLVs; } /** * Returns object of TE Link Attributes TLV. * * @param llLinkAttributesSubTLVs linked list of Link Attribute of Sub TLV * @return object of LinkAttributesTlv */ public static LinkAttributesTlv of(final List<PcepValueType> llLinkAttributesSubTLVs) { return new LinkAttributesTlv(llLinkAttributesSubTLVs); } /** * Returns linked list of Link Attribute of Sub TLV. * * @return llLinkAttributesSubTLVs linked list of Link Attribute of Sub TLV */ public List<PcepValueType> getllLinkAttributesSubTLVs() { return llLinkAttributesSubTLVs; } @Override public PcepVersion getVersion() { return PcepVersion.PCEP_1; } @Override public short getType() { return TYPE; } @Override public short getLength() { return hLength; } @Override public int hashCode() { return Objects.hash(llLinkAttributesSubTLVs.hashCode()); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } /* * Here we have a list of Tlv so to compare each sub tlv between the object * we have to take a list iterator so one by one we can get each sub tlv object * and can compare them. * it may be possible that the size of 2 lists is not equal so we have to first check * the size, if both are same then we should check for the subtlv objects otherwise * we should return false. */ if (obj instanceof LinkAttributesTlv) { int countObjSubTlv = 0; int countOtherSubTlv = 0; boolean isCommonSubTlv = true; LinkAttributesTlv other = (LinkAttributesTlv) obj; Iterator<PcepValueType> objListIterator = ((LinkAttributesTlv) obj).llLinkAttributesSubTLVs.iterator(); countObjSubTlv = ((LinkAttributesTlv) obj).llLinkAttributesSubTLVs.size(); countOtherSubTlv = other.llLinkAttributesSubTLVs.size(); if (countObjSubTlv != countOtherSubTlv) { return false; } else { while (objListIterator.hasNext() && isCommonSubTlv) { PcepValueType subTlv = objListIterator.next(); isCommonSubTlv = Objects.equals(llLinkAttributesSubTLVs.contains(subTlv), other.llLinkAttributesSubTLVs.contains(subTlv)); } return isCommonSubTlv; } } return false; } @Override public int write(ChannelBuffer c) { int tlvStartIndex = c.writerIndex(); c.writeShort(TYPE); int tlvLenIndex = c.writerIndex(); hLength = 0; c.writeShort(hLength); ListIterator<PcepValueType> listIterator = llLinkAttributesSubTLVs.listIterator(); while (listIterator.hasNext()) { PcepValueType tlv = listIterator.next(); if (tlv == null) { log.debug("TLV is null from subTlv list"); continue; } tlv.write(c); // need to take care of padding int pad = tlv.getLength() % 4; if (0 != pad) { pad = 4 - pad; for (int i = 0; i < pad; ++i) { c.writeByte((byte) 0); } } } hLength = (short) (c.writerIndex() - tlvStartIndex); c.setShort(tlvLenIndex, (hLength - TLV_HEADER_LENGTH)); return c.writerIndex() - tlvStartIndex; } /** * Reads channel buffer and returns object of TE Link Attributes TLV. * * @param c input channel buffer * @param hLength length * @return object of LinkAttributesTlv * @throws PcepParseException if mandatory fields are missing */ public static PcepValueType read(ChannelBuffer c, short hLength) throws PcepParseException { // Node Descriptor Sub-TLVs (variable) List<PcepValueType> llLinkAttributesSubTLVs = new LinkedList<>(); ChannelBuffer tempCb = c.readBytes(hLength); while (TLV_HEADER_LENGTH <= tempCb.readableBytes()) { PcepValueType tlv; short hType = tempCb.readShort(); int iValue = 0; short length = tempCb.readShort(); switch (hType) { case IPv4RouterIdOfLocalNodeSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new IPv4RouterIdOfLocalNodeSubTlv(iValue); break; case IPv6RouterIdofLocalNodeSubTlv.TYPE: byte[] ipv6LValue = new byte[IPv6RouterIdofLocalNodeSubTlv.VALUE_LENGTH]; tempCb.readBytes(ipv6LValue, 0, IPv6RouterIdofLocalNodeSubTlv.VALUE_LENGTH); tlv = new IPv6RouterIdofLocalNodeSubTlv(ipv6LValue); break; case IPv4RouterIdOfRemoteNodeSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new IPv4RouterIdOfRemoteNodeSubTlv(iValue); break; case IPv6RouterIdofRemoteNodeSubTlv.TYPE: byte[] ipv6RValue = new byte[IPv6RouterIdofRemoteNodeSubTlv.VALUE_LENGTH]; tempCb.readBytes(ipv6RValue, 0, IPv6RouterIdofRemoteNodeSubTlv.VALUE_LENGTH); tlv = new IPv6RouterIdofRemoteNodeSubTlv(ipv6RValue); break; case LinkLocalRemoteIdentifiersSubTlv.TYPE: tlv = LinkLocalRemoteIdentifiersSubTlv.read(tempCb); break; case AdministrativeGroupSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new AdministrativeGroupSubTlv(iValue); break; case MaximumLinkBandwidthSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new MaximumLinkBandwidthSubTlv(iValue); break; case MaximumReservableLinkBandwidthSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new MaximumReservableLinkBandwidthSubTlv(iValue); break; case UnreservedBandwidthSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new UnreservedBandwidthSubTlv(iValue); break; case TEDefaultMetricSubTlv.TYPE: iValue = tempCb.readInt(); tlv = new TEDefaultMetricSubTlv(iValue); break; case LinkProtectionTypeSubTlv.TYPE: tlv = LinkProtectionTypeSubTlv.read(tempCb); break; case MplsProtocolMaskSubTlv.TYPE: byte cValue = tempCb.readByte(); tlv = new MplsProtocolMaskSubTlv(cValue); break; case IgpMetricSubTlv.TYPE: tlv = IgpMetricSubTlv.read(tempCb, length); break; case SharedRiskLinkGroupSubTlv.TYPE: tlv = SharedRiskLinkGroupSubTlv.read(tempCb, length); break; case OpaqueLinkAttributeSubTlv.TYPE: tlv = OpaqueLinkAttributeSubTlv.read(tempCb, length); break; case LinkNameAttributeSubTlv.TYPE: tlv = LinkNameAttributeSubTlv.read(tempCb, length); break; default: throw new PcepParseException("Unsupported Sub TLV type :" + hType); } // Check for the padding int pad = length % 4; if (0 < pad) { pad = 4 - pad; if (pad <= tempCb.readableBytes()) { tempCb.skipBytes(pad); } } llLinkAttributesSubTLVs.add(tlv); } if (0 < tempCb.readableBytes()) { throw new PcepParseException("Sub Tlv parsing error. Extra bytes received."); } return new LinkAttributesTlv(llLinkAttributesSubTLVs); } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("Type", TYPE) .add("Length", hLength) .add("LinkAttributesSubTLVs", llLinkAttributesSubTLVs) .toString(); } }
/** * Copyright [2014] Gaurav Gupta * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.netbeans.jpa.source; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.util.ElementFilter; import javax.lang.model.util.Types; import org.netbeans.api.java.source.CompilationController; import org.netbeans.api.java.source.WorkingCopy; import org.openide.filesystems.FileLock; import org.openide.filesystems.FileObject; import org.openide.util.Utilities; /** * * @author Gaurav Gupta */ public class JavaSourceParserUtil { public static String simpleClassName(String fqn) { int lastDot = fqn.lastIndexOf('.'); return lastDot > 0 ? fqn.substring(lastDot + 1) : fqn; } public static String readResource(InputStream is, String encoding) throws IOException { // read the config from resource first StringBuilder sbuffer = new StringBuilder(); String lineSep = System.getProperty("line.separator");//NOI18N BufferedReader br = new BufferedReader(new InputStreamReader(is, encoding)); String line = br.readLine(); while (line != null) { sbuffer.append(line); sbuffer.append(lineSep); line = br.readLine(); } br.close(); return sbuffer.toString(); } public static void createFile(FileObject target, String content, String encoding) throws IOException { FileLock lock = target.lock(); try { BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(target.getOutputStream(lock), encoding)); bw.write(content); bw.close(); } finally { lock.releaseLock(); } } public static boolean isFieldAccess(TypeElement clazz) { boolean fieldAccess = false; boolean accessTypeDetected = false; TypeElement typeElement = clazz; Name qualifiedName = typeElement.getQualifiedName(); whileloop: while (typeElement != null) { if (isAnnotatedWith(typeElement, "javax.persistence.Entity") || isAnnotatedWith(typeElement, "javax.persistence.MappedSuperclass")) { // NOI18N for (Element element : typeElement.getEnclosedElements()) { if (isAnnotatedWith(element, "javax.persistence.Id") || isAnnotatedWith(element, "javax.persistence.EmbeddedId") || isAnnotatedWith(element, "javax.persistence.Embedded") || isAnnotatedWith(element, "javax.persistence.Basic") || isAnnotatedWith(element, "javax.persistence.Transient") || isAnnotatedWith(element, "javax.persistence.Version") || isAnnotatedWith(element, "javax.persistence.ElementCollection") || isAnnotatedWith(element, "javax.persistence.OneToMany") || isAnnotatedWith(element, "javax.persistence.ManyToMany") || isAnnotatedWith(element, "javax.persistence.OneToOne") || isAnnotatedWith(element, "javax.persistence.ManyToOne")) { if (ElementKind.FIELD == element.getKind()) { fieldAccess = true; } accessTypeDetected = true; break whileloop; } } } typeElement = getSuperclassTypeElement(typeElement); } if (!accessTypeDetected) { Logger.getLogger(JavaSourceParserUtil.class.getName()).log(Level.WARNING, "Failed to detect correct access type for class: {0}", qualifiedName); // NOI18N } return fieldAccess; } public static boolean isAnnotatedWith(Element element, String annotationFqn) { return findAnnotation(element, annotationFqn) != null; } public static AnnotationMirror getAnnotation(Element element, String annotationFqn) {//temp replica return findAnnotation(element, annotationFqn); } public static AnnotationMirror findAnnotation(Element element, String annotationFqn) { for (AnnotationMirror annotationMirror : element.getAnnotationMirrors()) { String annotationQualifiedName = getAnnotationQualifiedName(annotationMirror); if (annotationQualifiedName.equals(annotationFqn)) { return annotationMirror; } } return null; } public static String getAnnotationQualifiedName(AnnotationMirror annotationMirror) { // Iterator<Entry<? extends ExecutableElement, ? extends AnnotationValue>> elementValuesItr = annotationMirror.getElementValues().entrySet().iterator(); DeclaredType annotationDeclaredType = annotationMirror.getAnnotationType(); TypeElement annotationTypeElement = (TypeElement) annotationDeclaredType.asElement(); Name name = annotationTypeElement.getQualifiedName(); return name.toString(); } public static TypeElement getSuperclassTypeElement(TypeElement typeElement) { TypeElement superclass = null; TypeMirror superclassMirror = typeElement.getSuperclass(); if (superclassMirror.getKind() == TypeKind.DECLARED) { DeclaredType superclassDeclaredType = (DeclaredType) superclassMirror; Element superclassElement = superclassDeclaredType.asElement(); if (superclassElement.getKind() == ElementKind.CLASS && (superclassElement instanceof TypeElement)) { superclass = (TypeElement) superclassElement; } } return superclass; } public static TypeElement getAttributeTypeElement(VariableElement variableElement) { TypeElement attribute = null; TypeMirror attributeMirror = variableElement.asType(); if (attributeMirror.getKind() == TypeKind.DECLARED) { DeclaredType attributeDeclaredType = (DeclaredType) attributeMirror; Element attributeElement = attributeDeclaredType.asElement(); if (attributeElement.getKind() == ElementKind.CLASS && (attributeElement instanceof TypeElement)) { attribute = (TypeElement) attributeElement; } } return attribute; } public static String findAnnotationValueAsString(AnnotationMirror annotation, String annotationKey) { String value = null; Map<? extends ExecutableElement, ? extends AnnotationValue> annotationMap = annotation.getElementValues(); for (ExecutableElement key : annotationMap.keySet()) { if (annotationKey.equals(key.getSimpleName().toString())) { AnnotationValue annotationValue = annotationMap.get(key); value = annotationValue.getValue().toString(); break; } } return value; } public static Object findAnnotationValue(AnnotationMirror annotation, String annotationKey) { Object value = null; Map<? extends ExecutableElement, ? extends AnnotationValue> annotationMap = annotation.getElementValues(); for (ExecutableElement key : annotationMap.keySet()) { if (annotationKey.equals(key.getSimpleName().toString())) { AnnotationValue annotationValue = annotationMap.get(key); value = annotationValue.getValue(); break; } } return value; } public static List<AnnotationMirror> findNestedAnnotations(AnnotationMirror annotationMirror, String annotationFqn) { List<AnnotationMirror> result = new ArrayList<AnnotationMirror>(); findNestedAnnotationsInternal(annotationMirror, annotationFqn, result); return result; } private static void findNestedAnnotationsInternal(Object object, String annotationFqn, List<AnnotationMirror> result) { Collection<? extends AnnotationValue> annotationValueCollection = null; if (object instanceof AnnotationMirror) { AnnotationMirror annotationMirror = (AnnotationMirror) object; String annotationQualifiedName = getAnnotationQualifiedName(annotationMirror); if (annotationQualifiedName.equals(annotationFqn)) { result.add(annotationMirror); } else { //prepare to recurse Map<? extends ExecutableElement, ? extends AnnotationValue> annotationMap = annotationMirror.getElementValues(); annotationValueCollection = annotationMap.values(); } } else if (object instanceof List) { //prepare to recurse annotationValueCollection = (Collection<? extends AnnotationValue>) object; } //recurse if (annotationValueCollection != null) { for (AnnotationValue annotationValue : annotationValueCollection) { Object value = annotationValue.getValue(); findNestedAnnotationsInternal(value, annotationFqn, result); } } } public static String fieldFromClassName(String className) { boolean makeFirstLower = className.length() == 1 || (!Character.isUpperCase(className.charAt(1))); String candidate = makeFirstLower ? className.substring(0, 1).toLowerCase() + className.substring(1) : className; if (!Utilities.isJavaIdentifier(candidate)) { candidate += "1"; //NOI18N } return candidate; } public static String getPropNameFromMethod(String name) { //getABcd should be converted to ABcd, getFooBar should become fooBar //getA1 is "a1", getA_ is a_, getAB is AB //in case method doesn't start with "get" return name with brackets if (!name.startsWith("get") && !name.startsWith("set")) { //NOI18N return name + "()"; //NOI18n } boolean makeFirstLower = name.length() < 5 || (!Character.isUpperCase(name.charAt(4))); return makeFirstLower ? name.substring(3, 4).toLowerCase() + name.substring(4) : name.substring(3); } public static boolean isEmbeddableClass(Element typeElement) {//TypeElement if (JavaSourceParserUtil.isAnnotatedWith(typeElement, "javax.persistence.Embeddable")) { return true; } return false; } public static boolean isMappedSuperClass(Element typeElement) {//TypeElement if (JavaSourceParserUtil.isAnnotatedWith(typeElement, "javax.persistence.MappedSuperclass")) { return true; } return false; } public static boolean isEntityClass(Element typeElement) {//TypeElement if (JavaSourceParserUtil.isAnnotatedWith(typeElement, "javax.persistence.Entity")) { return true; } return false; } public static boolean isNonEntityClass(TypeElement typeElement) { if (!isEntityClass(typeElement) && !isMappedSuperClass(typeElement) && !isEmbeddableClass(typeElement)) { return true; } return false; } public static int isRelationship(ExecutableElement method, boolean isFieldAccess) { Element element = isFieldAccess ? JavaSourceParserUtil.guessField(method) : method; if (element != null) { if (JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.OneToOne") || JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.ManyToOne")) { return REL_TO_ONE; } if (JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.OneToMany") || JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.ManyToMany")) { return REL_TO_MANY; } } return REL_NONE; } public static ExecutableElement getOtherSideOfRelation(CompilationController controller, ExecutableElement executableElement, boolean isFieldAccess) { TypeMirror passedReturnType = executableElement.getReturnType(); if (TypeKind.DECLARED != passedReturnType.getKind() || !(passedReturnType instanceof DeclaredType)) { return null; } Types types = controller.getTypes(); TypeMirror passedReturnTypeStripped = stripCollection((DeclaredType) passedReturnType, types); if (passedReturnTypeStripped == null) { return null; } TypeElement passedReturnTypeStrippedElement = (TypeElement) types.asElement(passedReturnTypeStripped); //try to find a mappedBy annotation element on the possiblyAnnotatedElement Element possiblyAnnotatedElement = isFieldAccess ? JavaSourceParserUtil.guessField(executableElement) : executableElement; String mappedBy = null; AnnotationMirror persistenceAnnotation = JavaSourceParserUtil.findAnnotation(possiblyAnnotatedElement, "javax.persistence.OneToOne"); //NOI18N" if (persistenceAnnotation == null) { persistenceAnnotation = JavaSourceParserUtil.findAnnotation(possiblyAnnotatedElement, "javax.persistence.OneToMany"); //NOI18N" } if (persistenceAnnotation == null) { persistenceAnnotation = JavaSourceParserUtil.findAnnotation(possiblyAnnotatedElement, "javax.persistence.ManyToOne"); //NOI18N" } if (persistenceAnnotation == null) { persistenceAnnotation = JavaSourceParserUtil.findAnnotation(possiblyAnnotatedElement, "javax.persistence.ManyToMany"); //NOI18N" } if (persistenceAnnotation != null) { mappedBy = JavaSourceParserUtil.findAnnotationValueAsString(persistenceAnnotation, "mappedBy"); //NOI18N } for (ExecutableElement method : JavaSourceParserUtil.getMethods(passedReturnTypeStrippedElement)) { if (mappedBy != null && mappedBy.length() > 0) { String tail = mappedBy.length() > 1 ? mappedBy.substring(1) : ""; String getterName = "get" + mappedBy.substring(0, 1).toUpperCase() + tail; if (getterName.equals(method.getSimpleName().toString())) { return method; } } else { TypeMirror iteratedReturnType = method.getReturnType(); iteratedReturnType = stripCollection(iteratedReturnType, types); TypeMirror executableElementEnclosingType = executableElement.getEnclosingElement().asType(); if (types.isSameType(executableElementEnclosingType, iteratedReturnType)) { return method; } } } return null; } public static final int REL_NONE = 0; public static final int REL_TO_ONE = 1; public static final int REL_TO_MANY = 2; public static TypeMirror stripCollection(TypeMirror passedType, Types types) { if (TypeKind.DECLARED != passedType.getKind() || !(passedType instanceof DeclaredType)) { return passedType; } TypeElement passedTypeElement = (TypeElement) types.asElement(passedType); String passedTypeQualifiedName = passedTypeElement.getQualifiedName().toString(); //does not include type parameter info Class passedTypeClass = null; try { passedTypeClass = Class.forName(passedTypeQualifiedName); } catch (ClassNotFoundException e) { //just let passedTypeClass be null } if (passedTypeClass != null && Collection.class.isAssignableFrom(passedTypeClass)) { List<? extends TypeMirror> passedTypeArgs = ((DeclaredType) passedType).getTypeArguments(); if (passedTypeArgs.isEmpty()) { return passedType; } return passedTypeArgs.get(0); } return passedType; } public static boolean isFieldOptionalAndNullable(ExecutableElement method, boolean fieldAccess) { boolean isFieldOptional = true; Boolean isFieldNullable; Element fieldElement = fieldAccess ? JavaSourceParserUtil.guessField(method) : method; if (fieldElement == null) { fieldElement = method; } String[] fieldAnnotationFqns = {"javax.persistence.ManyToOne", "javax.persistence.OneToOne", "javax.persistence.Basic"}; Boolean isFieldOptionalBoolean = findAnnotationValueAsBoolean(fieldElement, fieldAnnotationFqns, "optional"); if (isFieldOptionalBoolean != null) { isFieldOptional = isFieldOptionalBoolean.booleanValue(); } if (!isFieldOptional) { return false; } //field is optional fieldAnnotationFqns = new String[]{"javax.persistence.Column", "javax.persistence.JoinColumn"}; isFieldNullable = findAnnotationValueAsBoolean(fieldElement, fieldAnnotationFqns, "nullable"); if (isFieldNullable != null) { return isFieldNullable.booleanValue(); } //new ballgame boolean result = true; AnnotationMirror fieldAnnotation = JavaSourceParserUtil.findAnnotation(fieldElement, "javax.persistence.JoinColumns"); //NOI18N if (fieldAnnotation != null) { //all joinColumn annotations must indicate nullable = false to return a false result List<AnnotationMirror> joinColumnAnnotations = JavaSourceParserUtil.findNestedAnnotations(fieldAnnotation, "javax.persistence.JoinColumn"); for (AnnotationMirror joinColumnAnnotation : joinColumnAnnotations) { String columnNullableValue = JavaSourceParserUtil.findAnnotationValueAsString(joinColumnAnnotation, "nullable"); //NOI18N if (columnNullableValue != null) { result = Boolean.parseBoolean(columnNullableValue); if (result) { break; //one of the joinColumn annotations is nullable, so return true } } else { result = true; break; //one of the joinColumn annotations is nullable, so return true } } } return result; } private static Boolean findAnnotationValueAsBoolean(Element fieldElement, String[] fieldAnnotationFqns, String annotationKey) { Boolean isFieldXable = null; for (int i = 0; i < fieldAnnotationFqns.length; i++) { String fieldAnnotationFqn = fieldAnnotationFqns[i]; AnnotationMirror fieldAnnotation = JavaSourceParserUtil.findAnnotation(fieldElement, fieldAnnotationFqn); //NOI18N if (fieldAnnotation != null) { String annotationValueString = JavaSourceParserUtil.findAnnotationValueAsString(fieldAnnotation, annotationKey); //NOI18N if (annotationValueString != null) { isFieldXable = Boolean.valueOf(annotationValueString); } else { isFieldXable = Boolean.TRUE; } break; } } return isFieldXable; } /** * check if there is id in the entity * * @param typeElement * @return true if id is present */ public static boolean haveId(final TypeElement clazz) { boolean idDetected = false; TypeElement typeElement = clazz; while (typeElement != null && !idDetected) { if (isAnnotatedWith(typeElement, "javax.persistence.Entity") || isAnnotatedWith(typeElement, "javax.persistence.MappedSuperclass")) { // NOI18N for (Element element : typeElement.getEnclosedElements()) { if (isAnnotatedWith(element, "javax.persistence.Id") || isAnnotatedWith(element, "javax.persistence.EmbeddedId")) { idDetected = true; } } } typeElement = getSuperclassTypeElement(typeElement); } if (!idDetected) { return false;// } else { return true; } } public static ExecutableElement getIdGetter(final boolean isFieldAccess, final TypeElement typeElement) { ExecutableElement[] methods = JavaSourceParserUtil.getMethods(typeElement); for (ExecutableElement method : methods) { String methodName = method.getSimpleName().toString(); if (methodName.startsWith("get")) { Element element = isFieldAccess ? JavaSourceParserUtil.guessField(method) : method; if (element != null) { if (JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.Id") || JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.EmbeddedId")) { return method; } } } } Logger.getLogger(JavaSourceParserUtil.class.getName()).log(Level.WARNING, "Cannot find ID getter in class: {0}", typeElement.getQualifiedName()); return null; } public static boolean isGenerated(ExecutableElement method, boolean isFieldAccess) { Element element = isFieldAccess ? JavaSourceParserUtil.guessField(method) : method; if (element != null) { if (JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.GeneratedValue")) { // NOI18N return true; } } return false; } public static boolean exceptionsThrownIncludes(WorkingCopy workingCopy, String fqClass, String methodName, List<String> formalParamFqTypes, String exceptionFqClassMaybeIncluded) { List<String> exceptionsThrown = getExceptionsThrown(workingCopy, fqClass, methodName, formalParamFqTypes); for (String exception : exceptionsThrown) { if (exceptionFqClassMaybeIncluded.equals(exception)) { return true; } } return false; } public static List<String> getExceptionsThrown(WorkingCopy workingCopy, String fqClass, String methodName, List<String> formalParamFqTypes) { if (formalParamFqTypes == null) { formalParamFqTypes = Collections.<String>emptyList(); } ExecutableElement desiredMethodElement = null; TypeElement suppliedTypeElement = workingCopy.getElements().getTypeElement(fqClass); TypeElement typeElement = suppliedTypeElement; whileloop: while (typeElement != null) { for (ExecutableElement methodElement : ElementFilter.methodsIn(typeElement.getEnclosedElements())) { if (methodElement.getSimpleName().contentEquals(methodName)) { List<? extends VariableElement> formalParamElements = methodElement.getParameters(); //for now, just check sizes if (formalParamElements.size() == formalParamFqTypes.size()) { desiredMethodElement = methodElement; break whileloop; } } } typeElement = getSuperclassTypeElement(typeElement); } if (desiredMethodElement == null) { throw new IllegalArgumentException("Could not find " + methodName + " in " + fqClass); } List<String> result = new ArrayList<String>(); List<? extends TypeMirror> thrownTypes = desiredMethodElement.getThrownTypes(); for (TypeMirror thrownType : thrownTypes) { if (thrownType.getKind() == TypeKind.DECLARED) { DeclaredType thrownDeclaredType = (DeclaredType) thrownType; TypeElement thrownElement = (TypeElement) thrownDeclaredType.asElement(); String thrownFqClass = thrownElement.getQualifiedName().toString(); result.add(thrownFqClass); } else { result.add(null); } } return result; } /** * Returns all methods in class and its super classes which are entity * classes or mapped superclasses. */ // public static ExecutableElement[] getEntityMethods(TypeElement entityTypeElement) { // List<ExecutableElement> result = new LinkedList<ExecutableElement>(); // TypeElement typeElement = entityTypeElement; // while (typeElement != null) { // if (isAnnotatedWith(typeElement, "javax.persistence.Entity") || isAnnotatedWith(typeElement, "javax.persistence.MappedSuperclass")) { // NOI18N // result.addAll(ElementFilter.methodsIn(typeElement.getEnclosedElements())); // } // typeElement = getSuperclassTypeElement(typeElement); // } // return result.toArray(new ExecutableElement[result.size()]); // } public static ExecutableElement[] getMethods(TypeElement typeElement) { List<ExecutableElement> result = new LinkedList<ExecutableElement>(); result.addAll(ElementFilter.methodsIn(typeElement.getEnclosedElements())); return result.toArray(new ExecutableElement[result.size()]); } public static VariableElement[] getFields(TypeElement typeElement) { List<VariableElement> result = new LinkedList<VariableElement>(); result.addAll(ElementFilter.fieldsIn(typeElement.getEnclosedElements())); return result.toArray(new VariableElement[result.size()]); } public static VariableElement guessField(ExecutableElement getter) { String name = getter.getSimpleName().toString().substring(3); String guessFieldName = name.substring(0, 1).toLowerCase() + name.substring(1); TypeElement typeElement = (TypeElement) getter.getEnclosingElement(); for (VariableElement variableElement : ElementFilter.fieldsIn(typeElement.getEnclosedElements())) { if (variableElement.getSimpleName().contentEquals(guessFieldName)) { return variableElement; } } Logger.getLogger(JavaSourceParserUtil.class.getName()).log(Level.WARNING, "Cannot detect the field associated with property: {0}", guessFieldName); return null; } /** * TODO: actually it's guess setter from setter, need to review if it's a * problem of expected * * @param setter * @return */ public static VariableElement guessGetter(ExecutableElement setter) { String name = setter.getSimpleName().toString().substring(3); String guessGetterName = "set" + name; TypeElement typeElement = (TypeElement) setter.getEnclosingElement(); for (VariableElement variableElement : ElementFilter.fieldsIn(typeElement.getEnclosedElements())) { if (variableElement.getSimpleName().contentEquals(guessGetterName)) { return variableElement; } } Logger.getLogger(JavaSourceParserUtil.class.getName()).log(Level.INFO, "Cannot detect setter associated with getter: {0}", guessGetterName); return null; } public static class TypeInfo { private String rawType; private TypeInfo[] declaredTypeParameters; public String getRawType() { return rawType; } public TypeInfo[] getDeclaredTypeParameters() { return declaredTypeParameters; } public TypeInfo(String rawType) { if (rawType == null) { throw new IllegalArgumentException(); } this.rawType = rawType; } public TypeInfo(String rawType, TypeInfo[] declaredTypeParameters) { if (rawType == null) { throw new IllegalArgumentException(); } this.rawType = rawType; if (declaredTypeParameters == null || declaredTypeParameters.length == 0) { return; } this.declaredTypeParameters = declaredTypeParameters; } public TypeInfo(String rawType, String[] declaredTypeParamStrings) { if (rawType == null) { throw new IllegalArgumentException(); } this.rawType = rawType; if (declaredTypeParamStrings == null || declaredTypeParamStrings.length == 0) { return; } this.declaredTypeParameters = TypeInfo.fromStrings(declaredTypeParamStrings); } public static TypeInfo[] fromStrings(String[] strings) { if (strings == null || strings.length == 0) { return null; } TypeInfo[] typeInfos = new TypeInfo[strings.length]; for (int i = 0; i < strings.length; i++) { typeInfos[i] = new TypeInfo(strings[i]); } return typeInfos; } } // private static String getPersistenceVersion(Project project) throws IOException { // String version = Persistence.VERSION_1_0; // PersistenceScope persistenceScopes[] = PersistenceUtils.getPersistenceScopes(project); // if (persistenceScopes.length > 0) { // FileObject persXml = persistenceScopes[0].getPersistenceXml(); // if (persXml != null) { // Persistence persistence = PersistenceMetadata.getDefault().getRoot(persXml); // version = persistence.getVersion(); // } // } // return version; // } // // static boolean isId(ExecutableElement method, boolean isFieldAccess) { // Element element = isFieldAccess ? JavaSourceParserUtil.guessField(method) : method; // if (element != null) { // if (JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.Id") || JavaSourceParserUtil.isAnnotatedWith(element, "javax.persistence.EmbeddedId")) { // NOI18N // return true; // } // } // return false; // } }
package com.bagri.core; /** * Connection (client-side) and configuration (server-side) properties and standard values. * * @author Denis Sukhoroslov * */ public class Constants { // XDM Client configuration constants public static final String pn_schema_address = "bdb.schema.address"; public static final String pn_schema_host = "bdb.schema.host"; public static final String pn_schema_port = "bdb.schema.port"; public static final String pn_schema_name = "bdb.schema.name"; public static final String pn_schema_user = "bdb.schema.user"; public static final String pn_schema_password = "bdb.schema.password"; public static final String pn_client_id = "bdb.client.id"; public static final String pn_client_txId = "bdb.client.txId"; public static final String pn_client_txLevel = "bdb.client.txLevel"; public static final String pn_client_txTimeout = "bdb.client.txTimeout"; public static final String pv_client_txLevel_skip = "skip"; public static final String pn_client_smart = "bdb.client.smart"; public static final String pn_client_bufferSize = "bdb.client.bufferSize"; public static final String pn_client_connectAttempts = "bdb.client.connectAttempts"; public static final String pn_client_poolSize = "bdb.client.poolSize"; public static final String pn_client_connectedAt = "bdb.client.connectedAt"; public static final String pn_client_memberId = "bdb.client.member"; public static final String pn_client_sharedConnection = "bdb.client.sharedConnection"; public static final String pn_client_fetchAsynch = "bdb.client.fetchAsynch"; public static final String pn_client_fetchSize = "bdb.client.fetchSize"; public static final String pn_client_fetchType = "bdb.client.fetchType"; public static final String pv_client_fetchType_asynch = "asynch"; public static final String pv_client_fetchType_batch = "batch"; public static final String pv_client_fetchType_fixed = "fixed"; public static final String pv_client_fetchType_queue = "queue"; public static final String pn_client_healthCheck = "bdb.client.healthCheck"; public static final String pn_client_loginTimeout = "bdb.client.loginTimeout"; public static final String pn_client_dataFactory = "bdb.client.dataFactory"; public static final String pn_client_pageSize = "bdb.client.pageSize"; public static final String pn_client_idCount = "bdb.client.idCount"; public static final String pn_client_customAuth = "bdb.client.customAuth"; public static final String pn_client_queryCache = "bdb.client.queryCache"; public static final String pn_client_storeMode = "bdb.client.storeMode"; public static final String pn_client_submitTo = "bdb.client.submitTo"; public static final String pn_client_ownerParam = "bdb.client.ownerParam"; public static final String pv_client_storeMode_insert = "insert"; public static final String pv_client_storeMode_update = "update"; public static final String pv_client_storeMode_merge = "merge"; public static final String pv_client_submitTo_all = "all"; public static final String pv_client_submitTo_any = "any"; public static final String pv_client_submitTo_query_key_owner = "query-key-owner"; public static final String pv_client_submitTo_param_hash_owner = "param-hash-owner"; public static final String pv_client_submitTo_param_value_owner = "param-value-owner"; public static final String pn_client_contentSerializer = "bdb.client.contentSerializer"; public static final String pn_client_contentSerializers = "bdb.client.contentSerializers"; public static final String pv_client_defaultSerializers = "MAP BMAP SMAP JSON XML"; public static final String pn_document_collections = "bdb.document.collections"; public static final String pn_document_compress = "bdb.document.compress"; public static final String pn_document_content = "bdb.document.content"; public static final String pn_document_data_format = "bdb.document.data.format"; public static final String pn_document_distribution = "bdb.document.distribution"; public static final String pn_document_headers = "bdb.document.headers"; public static final String pn_document_data_source = "bdb.document.data.source"; public static final String pn_document_map_merge = "bdb.document.map.merge"; public static final String pv_document_data_source_file = "FILE"; public static final String pv_document_data_source_map = "MAP"; public static final String pv_document_data_source_json = "JSON"; public static final String pv_document_data_source_xml = "XML"; public static final String pn_query_command = "bdb.query.command"; public static final String pn_query_customPaths = "bdb.query.customPaths"; public static final String pn_query_customQuery = "bdb.query.customQuery"; public static final String pn_query_splitBy = "bdb.query.splitBy"; public static final String pn_query_invalidate = "bdb.query.invalidate"; public static final String pn_query_updateable = "bdb.query.updateable"; public static final String pv_query_invalidate_none = "none"; public static final String pv_query_invalidate_docs = "docs"; public static final String pv_query_invalidate_paths = "paths"; public static final String pv_query_invalidate_values = "values"; public static final String pv_query_invalidate_all = "all"; // XDM Server configuration constants public static final String pn_cluster_login = "bdb.cluster.login"; public static final String pn_cluster_node_name = "bdb.cluster.node.name"; public static final String pn_cluster_node_role = "bdb.cluster.node.role"; public static final String pn_cluster_node_size = "bdb.cluster.node.size"; public static final String pn_cluster_admin_port = "bdb.cluster.admin.port"; public static final String pn_cluster_node_schemas = "bdb.cluster.node.schemas"; public static final String pn_cluster_node_home = "bdb.cluster.node.home"; public static final String pn_cluster_node_start = "bdb.cluster.node.start"; public static final String pn_cluster_node_ssh = "bdb.cluster.node.ssh"; public static final String pn_cluster_node_user = "bdb.cluster.node.user"; // REST Server configuration constants public static final String pn_rest_enabled = "bdb.rest.enabled"; public static final String pn_rest_jmx = "bdb.rest.jmx"; public static final String pn_rest_port = "bdb.rest.port"; public static final String pn_rest_auth_port = "bdb.rest.auth.port"; public static final String pn_rest_accept_pool = "bdb.rest.accept.pool"; public static final String pn_rest_thread_pool = "bdb.rest.thread.pool"; public static final String pn_access_filename = "bdb.access.filename"; public static final String pn_config_filename = "bdb.config.filename"; public static final String pn_config_path = "bdb.config.path"; public static final String pn_config_context_file = "bdb.config.context.file"; public static final String pn_config_properties_file = "bdb.config.properties.file"; public static final String pn_log_level = "bdb.log.level"; public static final String pn_node_instance = "bdb.node.instance"; public static final String pn_spring_context = "bdb.spring.context"; public static final String pn_schema_data_distribution = "bdb.schema.data.distribution"; public static final String pv_distribution_first_dot = "first-dot"; public static final String pn_schema_members = "bdb.schema.members"; public static final String pn_schema_ports_first = "bdb.schema.ports.first"; public static final String pn_schema_ports_last = "bdb.schema.ports.last"; public static final String pn_schema_format_default = "bdb.schema.format.default"; public static final String pn_schema_store_type = "bdb.schema.store.type"; public static final String pn_schema_store_enabled = "bdb.schema.store.enabled"; public static final String pn_schema_store_data_path = "bdb.schema.store.data.path"; public static final String pn_schema_store_tx_buffer_size = "bdb.schema.store.tx.buffer.size"; public static final String pn_schema_store_read_only = "bdb.schema.store.read-only"; public static final String pn_schema_store_load_count = "bdb.schema.store.load.count"; public static final String pn_schema_store_load_percent = "bdb.schema.store.load.percent"; public static final String pn_schema_population_pool = "bdb.schema.population.pool"; public static final String pn_schema_population_size = "bdb.schema.population.size"; public static final String pn_schema_population_chunk_size = "bdb.schema.population.chunk.size"; public static final String pn_schema_population_buffer_size = "bdb.schema.population.buffer.size"; public static final String pn_schema_population_use_catalog = "bdb.schema.population.use.catalog"; public static final String pn_schema_fetch_size = "bdb.schema.fetch.size"; public static final String pn_schema_content_compress = "bdb.schema.content.compress"; public static final String pn_schema_publish_counters = "bdb.schema.publish.counters"; public static final String pn_schema_query_cache = "bdb.schema.query.cache"; public static final String pn_schema_tx_level = "bdb.schema.transaction.level"; public static final String pn_schema_tx_timeout = "bdb.schema.transaction.timeout"; public static final String pn_schema_cache_content = "bdb.schema.cache.content"; public static final String pn_schema_cache_elements = "bdb.schema.cache.elements"; public static final String pn_schema_cache_resources = "bdb.schema.cache.resources"; public static final String pn_schema_index_asynch = "bdb.schema.index.asynch"; public static final String pn_schema_health_check = "bdb.schema.health.management.enabled"; public static final String pn_schema_health_th_low = "bdb.schema.health.threshold.low"; public static final String pn_schema_health_th_high = "bdb.schema.health.threshold.high"; public static final String pn_schema_builder_pretty = "bdb.schema.builder.pretty"; public static final String pn_schema_builder_ident = "bdb.schema.builder.ident"; public static final String pn_schema_parser_schemas = "bdb.schema.parser.schemas"; // XQJ configuration constants public static final String pn_xqj_baseURI = "xqj.schema.baseUri"; public static final String pn_xqj_constructionMode = "xqj.schema.constructionMode"; public static final String pn_xqj_defaultCollationUri = "xqj.schema.defaultCollationUri"; public static final String pn_xqj_defaultElementTypeNamespace = "xqj.schema.defaultElementTypeNamespace"; public static final String pn_xqj_defaultFunctionNamespace = "xqj.schema.defaultFunctionNamespace"; public static final String pn_xqj_orderingMode = "xqj.schema.orderingMode"; public static final String pn_xqj_defaultOrderForEmptySequences = "xqj.schema.defaultOrderForEmptySequences"; public static final String pn_xqj_boundarySpacePolicy = "xqj.schema.boundarySpacePolicy"; public static final String pn_xqj_copyNamespacesModePreserve = "xqj.schema.copyNamespacesModePreserve"; public static final String pn_xqj_copyNamespacesModeInherit = "xqj.schema.copyNamespacesModeInherit"; public static final String pn_xqj_bindingMode = "xqj.schema.bindingMode"; public static final String pn_xqj_queryLanguageTypeAndVersion = "xqj.schema.queryLanguageTypeAndVersion"; public static final String pn_xqj_holdability = "xqj.schema.holdability"; public static final String pn_xqj_scrollability = "xqj.schema.scrollability"; public static final String pn_xqj_queryTimeout = "xqj.schema.queryTimeout"; public static final String pn_xqj_defaultNamespaces = "xqj.schema.defaultNamespaces"; // schemas, namespaces public static final String xs_prefix = "xs"; public static final String xs_ns = "http://www.w3.org/2001/XMLSchema"; public static final String bg_schema = "bgdb"; public static final String bg_prefix = bg_schema + ":"; public static final String bg_ns = "http://bagridb.com/bdb"; public static final String bg_version = "2.0.0-SNAPSHOT"; public static final String dc_ns = "http://www.w3.org/2005/xpath-functions/collation/codepoint"; public static final String df_ns = "http://www.w3.org/2005/xpath-functions"; public static final String mt_xml = "application/xml"; public static final String mt_json = "application/json"; // DocumentStore initialization context constants public static final String ctx_repo = "xdmRepository"; public static final String ctx_cache = "cacheInstance"; public static final String ctx_context = "userContext"; public static final String ctx_popService = "popManager"; // XQJ MetaData feature constants public static final int xqf_Update = 1; public static final int xqf_XQueryX = 2; public static final int xqf_Transaction = 3; public static final int xqf_Full_Axis = 4; public static final int xqf_Schema_Import = 5; public static final int xqf_Schema_Validation = 6; public static final int xqf_Module = 7; public static final int xqf_Serialization = 8; public static final int xqf_Static_Typing = 9; public static final int xqf_Static_Typing_Extensions = 10; public static final int xqf_XQuery_Encoding_Decl = 11; public static final int xqf_User_Defined_XML_Schema_Type = 12; // public int getMaxExpressionLength() throws XQException; // public int getMaxUserNameLength() throws XQException; // public boolean wasCreatedFromJDBCConnection() throws XQException; // public java.util.Set getSupportedXQueryEncodings() throws XQException; // public boolean isXQueryEncodingSupported(String encoding) throws XQException; // XQJ2 MetaData feature constants public static final int xqf_XQuery_Update_Facility = 13; public static final int xqf_XQuery_Full_Text = 14; public static final int xqf_XQuery_30 = 15; public static final int xqf_XA = 16; // direct commands public static final String cmd_get_document = "get-document-content"; public static final String cmd_get_document_uris = "get-document-uris"; public static final String cmd_query_document_uris = "query-document-uris"; public static final String cmd_remove_document = "remove-document"; public static final String cmd_remove_cln_documents = "remove-collection-documents"; public static final String cmd_store_document = "store-document"; // updating functions public static final String bg_remove_document = bg_prefix + cmd_remove_document; public static final String bg_remove_cln_documents = bg_prefix + cmd_remove_cln_documents; public static final String bg_store_document = bg_prefix + cmd_store_document; public static int propToInt(String property) { switch (property) { case pn_schema_address: return 1; case pn_schema_host: return 2; case pn_schema_port: return 3; case pn_schema_name: return 4; case pn_schema_user: return 5; case pn_schema_password: return 6; case pn_client_id: return 7; case pn_client_txId: return 8; case pn_client_txLevel: return 9; case pn_client_txTimeout: return 10; case pn_client_smart: return 11; case pn_client_bufferSize: return 12; case pn_client_connectAttempts: return 13; case pn_client_poolSize: return 14; case pn_client_connectedAt: return 15; case pn_client_memberId: return 16; case pn_client_fetchAsynch: return 17; case pn_client_fetchSize: return 18; case pn_client_fetchType: return 19; case pn_client_healthCheck: return 20; case pn_client_loginTimeout: return 21; case pn_client_dataFactory: return 22; case pn_client_pageSize: return 23; case pn_client_customAuth: return 24; case pn_client_queryCache: return 25; case pn_client_storeMode: return 26; case pn_client_submitTo: return 27; case pn_client_ownerParam: return 28; case pn_client_sharedConnection: return 29; case pn_query_command: return 40; case pn_query_customPaths: return 41; case pn_query_customQuery: return 42; case pn_query_splitBy: return 43; case pn_query_invalidate: return 44; case pn_query_updateable: return 45; case pn_document_collections: return 50; case pn_document_data_format: return 51; case pn_document_headers: return 52; case pn_document_data_source: return 53; case pn_document_map_merge: return 54; case pn_document_compress: return 55; case pn_xqj_baseURI: return 100; case pn_xqj_constructionMode: return 101; case pn_xqj_defaultCollationUri: return 102; case pn_xqj_defaultElementTypeNamespace: return 103; case pn_xqj_defaultFunctionNamespace: return 104; case pn_xqj_orderingMode: return 105; case pn_xqj_defaultOrderForEmptySequences: return 106; case pn_xqj_boundarySpacePolicy: return 107; case pn_xqj_copyNamespacesModePreserve: return 108; case pn_xqj_copyNamespacesModeInherit: return 109; case pn_xqj_bindingMode: return 110; case pn_xqj_queryLanguageTypeAndVersion: return 111; case pn_xqj_holdability: return 112; case pn_xqj_scrollability: return 113; case pn_xqj_queryTimeout: return 114; case pn_xqj_defaultNamespaces: return 115; } return 0; } public static String intToProp(int idx) { switch (idx) { case 1: return pn_schema_address; case 2: return pn_schema_host; case 3: return pn_schema_port; case 4: return pn_schema_name; case 5: return pn_schema_user; case 6: return pn_schema_password; case 7: return pn_client_id; case 8: return pn_client_txId; case 9: return pn_client_txLevel; case 10: return pn_client_txTimeout; case 11: return pn_client_smart; case 12: return pn_client_bufferSize; case 13: return pn_client_connectAttempts; case 14: return pn_client_poolSize; case 15: return pn_client_connectedAt; case 16: return pn_client_memberId; case 17: return pn_client_fetchAsynch; case 18: return pn_client_fetchSize; case 19: return pn_client_fetchType; case 20: return pn_client_healthCheck; case 21: return pn_client_loginTimeout; case 22: return pn_client_dataFactory; case 23: return pn_client_pageSize; case 24: return pn_client_customAuth; case 25: return pn_client_queryCache; case 26: return pn_client_storeMode; case 27: return pn_client_submitTo; case 28: return pn_client_ownerParam; case 29: return pn_client_sharedConnection; case 40: return pn_query_command; case 41: return pn_query_customPaths; case 42: return pn_query_customQuery; case 43: return pn_query_splitBy; case 44: return pn_query_invalidate; case 45: return pn_query_updateable; case 50: return pn_document_collections; case 51: return pn_document_data_format; case 52: return pn_document_headers; case 53: return pn_document_data_source; case 54: return pn_document_map_merge; case 55: return pn_document_compress; case 100: return pn_xqj_baseURI; case 101: return pn_xqj_constructionMode; case 102: return pn_xqj_defaultCollationUri; case 103: return pn_xqj_defaultElementTypeNamespace; case 104: return pn_xqj_defaultFunctionNamespace; case 105: return pn_xqj_orderingMode; case 106: return pn_xqj_defaultOrderForEmptySequences; case 107: return pn_xqj_boundarySpacePolicy; case 108: return pn_xqj_copyNamespacesModePreserve; case 109: return pn_xqj_copyNamespacesModeInherit; case 110: return pn_xqj_bindingMode; case 111: return pn_xqj_queryLanguageTypeAndVersion; case 112: return pn_xqj_holdability; case 113: return pn_xqj_scrollability; case 114: return pn_xqj_queryTimeout; case 115: return pn_xqj_defaultNamespaces; } return null; } }
package com.newsblur.activity; import android.content.Context; import android.text.TextUtils; import android.text.format.DateUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseExpandableListAdapter; import android.widget.CheckBox; import android.widget.ExpandableListView; import android.widget.ImageView; import android.widget.TextView; import com.newsblur.R; import com.newsblur.domain.Feed; import com.newsblur.util.AppConstants; import com.newsblur.util.FeedOrderFilter; import com.newsblur.util.FeedUtils; import com.newsblur.util.FolderViewFilter; import com.newsblur.util.ListOrderFilter; import com.newsblur.util.PrefsUtils; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.TimeZone; public class FeedChooserAdapter extends BaseExpandableListAdapter { protected final static int defaultTextSizeChild = 14; protected final static int defaultTextSizeGroup = 13; protected Set<String> feedIds = new HashSet<>(); protected ArrayList<String> folderNames = new ArrayList<>(); protected ArrayList<ArrayList<Feed>> folderChildren = new ArrayList<>(); protected FolderViewFilter folderViewFilter; protected ListOrderFilter listOrderFilter; protected FeedOrderFilter feedOrderFilter; protected float textSize; FeedChooserAdapter(Context context) { folderViewFilter = PrefsUtils.getFeedChooserFolderView(context); listOrderFilter = PrefsUtils.getFeedChooserListOrder(context); feedOrderFilter = PrefsUtils.getFeedChooserFeedOrder(context); textSize = PrefsUtils.getListTextSize(context); } @Override public int getGroupCount() { return folderNames.size(); } @Override public int getChildrenCount(int groupPosition) { return folderChildren.get(groupPosition).size(); } @Override public String getGroup(int groupPosition) { return folderNames.get(groupPosition); } @Override public Feed getChild(int groupPosition, int childPosition) { return folderChildren.get(groupPosition).get(childPosition); } @Override public long getGroupId(int groupPosition) { return folderNames.get(groupPosition).hashCode(); } @Override public long getChildId(int groupPosition, int childPosition) { return folderChildren.get(groupPosition).get(childPosition).hashCode(); } @Override public boolean hasStableIds() { return true; } @Override public View getGroupView(final int groupPosition, boolean isExpanded, View convertView, final ViewGroup parent) { String folderName = folderNames.get(groupPosition); if (folderName.equals(AppConstants.ROOT_FOLDER)) { convertView = LayoutInflater.from(parent.getContext()).inflate(R.layout.row_widget_config_root_folder, parent, false); } else { convertView = LayoutInflater.from(parent.getContext()).inflate(R.layout.row_widget_config_folder, parent, false); TextView textName = convertView.findViewById(R.id.text_folder_name); textName.setTextSize(textSize * defaultTextSizeGroup); textName.setText(folderName); } ((ExpandableListView) parent).expandGroup(groupPosition); return convertView; } @Override public View getChildView(int groupPosition, int childPosition, boolean isLastChild, View convertView, final ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater.from(parent.getContext()).inflate(R.layout.row_widget_config_feed, parent, false); } final Feed feed = folderChildren.get(groupPosition).get(childPosition); TextView textTitle = convertView.findViewById(R.id.text_title); TextView textDetails = convertView.findViewById(R.id.text_details); final CheckBox checkBox = convertView.findViewById(R.id.check_box); ImageView img = convertView.findViewById(R.id.img); textTitle.setTextSize(textSize * defaultTextSizeChild); textDetails.setTextSize(textSize * defaultTextSizeChild); textTitle.setText(feed.title); checkBox.setChecked(feedIds.contains(feed.feedId)); if (feedOrderFilter == FeedOrderFilter.NAME || feedOrderFilter == FeedOrderFilter.OPENS) { textDetails.setText(parent.getContext().getString(R.string.feed_opens, feed.feedOpens)); } else if (feedOrderFilter == FeedOrderFilter.SUBSCRIBERS) { textDetails.setText(parent.getContext().getString(R.string.feed_subscribers, feed.subscribers)); } else if (feedOrderFilter == FeedOrderFilter.STORIES_MONTH) { textDetails.setText(parent.getContext().getString(R.string.feed_stories_per_month, feed.storiesPerMonth)); } else { // FeedOrderFilter.RECENT_STORY try { DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault()); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); Date dateTime = dateFormat.parse(feed.lastStoryDate); CharSequence relativeTimeString = DateUtils.getRelativeTimeSpanString(dateTime.getTime(), System.currentTimeMillis(), DateUtils.MINUTE_IN_MILLIS); textDetails.setText(relativeTimeString); } catch (Exception e) { textDetails.setText(feed.lastStoryDate); } } FeedUtils.iconLoader.displayImage(feed.faviconUrl, img, img.getHeight(), true); return convertView; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { return true; } protected void setData(ArrayList<String> activeFoldersNames, ArrayList<ArrayList<Feed>> activeFolderChildren, ArrayList<Feed> feeds) { if (folderViewFilter == FolderViewFilter.NESTED) { this.folderNames = activeFoldersNames; this.folderChildren = activeFolderChildren; } else { this.folderNames = new ArrayList<>(1); this.folderNames.add(AppConstants.ROOT_FOLDER); this.folderChildren = new ArrayList<>(); this.folderChildren.add(feeds); } this.notifyDataChanged(); } protected void replaceFeedOrder(FeedOrderFilter feedOrderFilter) { this.feedOrderFilter = feedOrderFilter; notifyDataChanged(); } protected void replaceListOrder(ListOrderFilter listOrderFilter) { this.listOrderFilter = listOrderFilter; notifyDataChanged(); } protected void replaceFolderView(FolderViewFilter folderViewFilter) { this.folderViewFilter = folderViewFilter; } protected void notifyDataChanged() { for (ArrayList<Feed> feedList : this.folderChildren) { Collections.sort(feedList, getListComparator()); } this.notifyDataSetChanged(); } protected void setFeedIds(Set<String> feedIds) { this.feedIds.clear(); this.feedIds.addAll(feedIds); } protected void replaceFeedIds(Set<String> feedIds) { setFeedIds(feedIds); this.notifyDataSetChanged(); } private Comparator<Feed> getListComparator() { return (o1, o2) -> { // some feeds have missing data if (o1.title == null) o1.title = ""; if (o2.title == null) o2.title = ""; if (feedOrderFilter == FeedOrderFilter.NAME && listOrderFilter == ListOrderFilter.ASCENDING) { return o1.title.compareTo(o2.title); } else if (feedOrderFilter == FeedOrderFilter.NAME && listOrderFilter == ListOrderFilter.DESCENDING) { return o2.title.compareTo(o1.title); } else if (o1.subscribers != null && o2.subscribers != null && feedOrderFilter == FeedOrderFilter.SUBSCRIBERS && listOrderFilter == ListOrderFilter.ASCENDING) { return Integer.valueOf(o1.subscribers).compareTo(Integer.valueOf(o2.subscribers)); } else if (o1.subscribers != null && o2.subscribers != null && feedOrderFilter == FeedOrderFilter.SUBSCRIBERS && listOrderFilter == ListOrderFilter.DESCENDING) { return Integer.valueOf(o2.subscribers).compareTo(Integer.valueOf(o1.subscribers)); } else if (feedOrderFilter == FeedOrderFilter.OPENS && listOrderFilter == ListOrderFilter.ASCENDING) { return Integer.compare(o1.feedOpens, o2.feedOpens); } else if (feedOrderFilter == FeedOrderFilter.OPENS && listOrderFilter == ListOrderFilter.DESCENDING) { return Integer.compare(o2.feedOpens, o1.feedOpens); } else if (o1.lastStoryDate != null && o2.lastStoryDate != null && feedOrderFilter == FeedOrderFilter.RECENT_STORY && listOrderFilter == ListOrderFilter.ASCENDING) { return compareLastStoryDateTimes(o1.lastStoryDate, o2.lastStoryDate, listOrderFilter); } else if (o1.lastStoryDate != null && o2.lastStoryDate != null && feedOrderFilter == FeedOrderFilter.RECENT_STORY && listOrderFilter == ListOrderFilter.DESCENDING) { return compareLastStoryDateTimes(o1.lastStoryDate, o2.lastStoryDate, listOrderFilter); } else if (feedOrderFilter == FeedOrderFilter.STORIES_MONTH && listOrderFilter == ListOrderFilter.ASCENDING) { return Integer.compare(o1.storiesPerMonth, o2.storiesPerMonth); } else if (feedOrderFilter == FeedOrderFilter.STORIES_MONTH && listOrderFilter == ListOrderFilter.DESCENDING) { return Integer.compare(o2.storiesPerMonth, o1.storiesPerMonth); } return o1.title.compareTo(o2.title); }; } private int compareLastStoryDateTimes(String firstDateTime, String secondDateTime, ListOrderFilter listOrderFilter) { try { DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault()); // found null last story date times on feeds if (TextUtils.isEmpty(firstDateTime)) { firstDateTime = "2000-01-01 00:00:00"; } if (TextUtils.isEmpty(secondDateTime)) { secondDateTime = "2000-01-01 00:00:00"; } Date firstDate = dateFormat.parse(firstDateTime); Date secondDate = dateFormat.parse(secondDateTime); if (listOrderFilter == ListOrderFilter.ASCENDING) { return firstDate.compareTo(secondDate); } else { return secondDate.compareTo(firstDate); } } catch (ParseException e) { e.printStackTrace(); return 0; } } }
package dagger.reactions; import dagger.Reaction; import dagger.http.Formats; import dagger.http.HttpHeader; import dagger.http.Request; import dagger.http.StatusCode; import dagger.lang.DelegateClassLoader; import dagger.lang.mime.MimeTypeGuesser; import dagger.mime.MimeType; import dagger.mock.MockResponse; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.exporter.ZipExporter; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.Date; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import static dagger.http.HttpHeader.IF_MODIFIED_SINCE; import static dagger.http.HttpHeader.LAST_MODIFIED; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ResourceFileTest { private static final String FILE_PATH = "/foo/bar/static-file-test.html"; private static final String RESOURCE_NAME = "/view/static" + FILE_PATH; public static final String FILE_CONTENTS = "<html>lorem ipsum</html>"; private Request request; private MockResponse response; private MimeTypeGuesser mimeTypeGuesser; @Before public void setUp() throws Exception { request = mock(Request.class); response = new MockResponse(); mimeTypeGuesser = mock(MimeTypeGuesser.class); } @Test(expected = IllegalArgumentException.class) public void testFilePathMustStartWithSlash() { new ResourceFile("relative/file/path/not.allowed", mimeTypeGuesser); } @Test public void testExistingFile() throws Exception { URL fileUrl = getClass().getResource(RESOURCE_NAME); when(mimeTypeGuesser.guessMimeType(fileUrl)).thenReturn(MimeType.TEXT_HTML); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertOk(); } @Test public void testExistingFileInsideJar() throws Exception { ClassLoader classLoader = createClassLoaderFor(createJar()); URL fileUrl = classLoader.getResource(withoutTrailingSlash(RESOURCE_NAME)); when(mimeTypeGuesser.guessMimeType(fileUrl)).thenReturn(MimeType.TEXT_HTML); Reaction reaction = createReactionInstanceFrom(classLoader); reaction.execute(request, response); assertOk(); } @Test public void testLastModifiedHeader() throws Exception { Date modificationDate = getFileModificationDate(); String expectedLastModifiedValue = Formats.timestamp().format(modificationDate); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertEquals(expectedLastModifiedValue, response.getHeader("Last-Modified")); } @Test public void testLastModifiedHeaderForFilesInsideJar() throws Exception { File jarFile = createJar(); ZipFile jar = new ZipFile(jarFile); ZipEntry fileInsideJar = jar.getEntry(withoutTrailingSlash(RESOURCE_NAME)); Date fileDateInsideJar = new Date(fileInsideJar.getTime()); String expectedLastModifiedValue = Formats.timestamp().format(fileDateInsideJar); ClassLoader classLoader = createClassLoaderFor(createJar()); Reaction reaction = createReactionInstanceFrom(classLoader); reaction.execute(request, response); assertEquals(expectedLastModifiedValue, response.getHeader(LAST_MODIFIED)); } @Test public void testFileNotFound() throws Exception { Reaction reaction = new ResourceFile("/bogus.png", mimeTypeGuesser); reaction.execute(request, response); assertNotFound(); } @Test public void testDoNotMistakeDirectoryForFile() throws Exception { Reaction reaction = new ResourceFile("/", mimeTypeGuesser); reaction.execute(request, response); assertNotFound(); } @Test public void testReturnStatusNotModifiedIfNotModifiedSinceFileModificationDate() throws Exception { Date modificationDate = getFileModificationDate(); Date ifModifiedSince = new Date(modificationDate.getTime() + 1000); when(request.getHeader(IF_MODIFIED_SINCE)).thenReturn(Formats.timestamp().format(ifModifiedSince)); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertEquals(StatusCode.NOT_MODIFIED, response.getStatusCode()); } @Test public void testReturnStatusNotModifiedIfModificationDateEqualsToIfModifiedSinceHeader() throws Exception { Date modificationDate = getFileModificationDate(); Date ifModifiedSince = new Date(modificationDate.getTime()); when(request.getHeader(IF_MODIFIED_SINCE)).thenReturn(Formats.timestamp().format(ifModifiedSince)); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertEquals(StatusCode.NOT_MODIFIED, response.getStatusCode()); } @Test public void testReturnOkIfModificationDateIsAfterIfModifiedSinceHeader() throws Exception { Date modificationDate = getFileModificationDate(); Date ifModifiedSince = new Date(modificationDate.getTime() - 1000); when(request.getHeader(IF_MODIFIED_SINCE)).thenReturn(Formats.timestamp().format(ifModifiedSince)); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertEquals(StatusCode.OK, response.getStatusCode()); } @Test public void testIgnoreIncorrectFormatOfIfModifiedSinceHeader() throws Exception { when(request.getHeader(IF_MODIFIED_SINCE)).thenReturn("bogus date"); Reaction reaction = new ResourceFile(FILE_PATH, mimeTypeGuesser); reaction.execute(request, response); assertEquals(StatusCode.OK, response.getStatusCode()); } private Date getFileModificationDate() throws URISyntaxException { File file = new File(getClass().getResource(RESOURCE_NAME).toURI()); return new Date(file.lastModified()); } private void assertOk() { assertEquals(StatusCode.OK, response.getStatusCode()); assertEquals(MimeType.TEXT_HTML, response.getHeader(HttpHeader.CONTENT_TYPE)); assertEquals(FILE_CONTENTS, response.getOutputAsString()); assertTrue("Output stream should be closed", response.isOutputStreamClosed()); } private void assertNotFound() { assertEquals(StatusCode.NOT_FOUND, response.getStatusCode()); assertEquals(MimeType.TEXT_PLAIN, response.getHeader(HttpHeader.CONTENT_TYPE)); assertEquals("Not found.", response.getOutputAsString()); assertTrue("Output stream should be closed", response.isOutputStreamClosed()); } private File createJar() throws IOException { JavaArchive archive = ShrinkWrap.create(JavaArchive.class, "archive.jar") .addClasses(ResourceFile.class) .addAsResource(getClass().getResource(RESOURCE_NAME), RESOURCE_NAME); File jarFile = File.createTempFile("test", ".jar"); archive.as(ZipExporter.class).exportTo(jarFile, true); return jarFile; } private Reaction createReactionInstanceFrom(ClassLoader classLoader) throws Exception { Class<?> clazz = classLoader.loadClass(ResourceFile.class.getName()); Constructor<?> constructor = clazz.getConstructor(String.class, MimeTypeGuesser.class); return (Reaction) constructor.newInstance(FILE_PATH, mimeTypeGuesser); } private ClassLoader createClassLoaderFor(File jarFile) throws MalformedURLException { DelegateClassLoader parentClassLoader = new DelegateClassLoader(); parentClassLoader.delegateClassToChildrenClassLoaders(ResourceFile.class.getName()); parentClassLoader.delegateResourceToChildrenClassLoaders(withoutTrailingSlash(RESOURCE_NAME)); return new URLClassLoader(new URL[] { jarFile.toURI().toURL() }, parentClassLoader); } private String withoutTrailingSlash(String resourceName) { return RESOURCE_NAME.substring(1); } }
package jobs; import gnu.trove.set.TLongSet; import gnu.trove.set.hash.TLongHashSet; import java.io.File; import java.math.BigInteger; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.zip.ZipFile; import javax.persistence.EntityManager; import javax.persistence.Query; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.joda.time.DateTimeZone; import org.joda.time.LocalDate; import org.mapdb.Fun.Tuple2; import org.onebusaway.gtfs.impl.GtfsDaoImpl; import org.onebusaway.gtfs.model.AgencyAndId; import org.onebusaway.gtfs.model.calendar.ServiceDate; import org.onebusaway.gtfs.serialization.GtfsReader; import org.onebusaway.gtfs.serialization.GtfsWriter; import com.conveyal.gtfs.GTFSFeed; import com.conveyal.gtfs.model.CalendarDate; import com.conveyal.gtfs.model.Entity; import com.conveyal.gtfs.model.Frequency; import com.conveyal.gtfs.model.Service; import com.conveyal.gtfs.model.Shape; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import com.mchange.v2.c3p0.impl.DbAuth; import com.vividsolutions.jts.geom.Coordinate; import datastore.VersionedDataStore; import datastore.AgencyTx; import datastore.GlobalTx; import models.transit.Agency; import models.transit.AttributeAvailabilityType; import models.transit.Route; import models.transit.ScheduleException; import models.transit.ServiceCalendar; import models.transit.Stop; import models.transit.StopTime; import models.transit.TripDirection; import models.transit.TripPattern; import models.transit.TripPatternStop; import models.transit.Trip; import play.Logger; import play.Play; import play.jobs.Job; import play.jobs.OnApplicationStart; import utils.DirectoryZip; import utils.GeoUtils; public class ProcessGtfsSnapshotExport implements Runnable { private Collection<String> agencies; private File output; private LocalDate startDate; private LocalDate endDate; public ProcessGtfsSnapshotExport(Collection<String> agencies, File output, LocalDate startDate, LocalDate endDate) { this.agencies = agencies; this.output = output; this.startDate = startDate; this.endDate = endDate; } @Override public void run() { GTFSFeed feed = new GTFSFeed(); GlobalTx gtx = VersionedDataStore.getGlobalTx(); AgencyTx atx = null; try { for (String agencyId : agencies) { Agency agency = gtx.agencies.get(agencyId); com.conveyal.gtfs.model.Agency gtfsAgency = agency.toGtfs(); Logger.info("Exporting agency %s", gtfsAgency); atx = VersionedDataStore.getAgencyTx(agencyId); // write the agencies.txt entry feed.agency.put(agencyId, agency.toGtfs()); // write all of the calendars and calendar dates for (ServiceCalendar cal : atx.calendars.values()) { com.conveyal.gtfs.model.Service gtfsService = cal.toGtfs(toGtfsDate(startDate), toGtfsDate(endDate)); // note: not using user-specified IDs // add calendar dates for (ScheduleException ex : atx.exceptions.values()) { for (LocalDate date : ex.dates) { if (date.isBefore(startDate) || date.isAfter(endDate)) // no need to write dates that do not apply continue; CalendarDate cd = new CalendarDate(); cd.date = date; cd.service = gtfsService; cd.exception_type = ex.serviceRunsOn(cal) ? 1 : 2; if (gtfsService.calendar_dates.containsKey(date)) throw new IllegalArgumentException("Duplicate schedule exceptions on " + date.toString()); gtfsService.calendar_dates.put(date, cd); } } feed.services.put(gtfsService.service_id, gtfsService); } // write the routes for (Route route : atx.routes.values()) { com.conveyal.gtfs.model.Route gtfsRoute = route.toGtfs(gtfsAgency, gtx); feed.routes.put(route.getGtfsId(), gtfsRoute); // write the trips on those routes for (Trip trip : atx.getTripsByRoute(route.id)) { com.conveyal.gtfs.model.Trip gtfsTrip = new com.conveyal.gtfs.model.Trip(); gtfsTrip.block_id = trip.blockId; gtfsTrip.route = gtfsRoute; gtfsTrip.trip_id = trip.getGtfsId(); // not using custom ids gtfsTrip.service = feed.services.get(trip.calendarId); gtfsTrip.trip_headsign = trip.tripHeadsign; gtfsTrip.trip_short_name = trip.tripShortName; gtfsTrip.direction_id = trip.tripDirection == TripDirection.A ? 0 : 1; TripPattern pattern = atx.tripPatterns.get(trip.patternId); Tuple2<String, Integer> nextKey = feed.shapePoints.ceilingKey(new Tuple2(pattern.id, null)); if ((nextKey == null || !pattern.id.equals(nextKey.a)) && pattern.shape != null && !pattern.useStraightLineDistances) { // this shape has not yet been saved double[] coordDistances = GeoUtils.getCoordDistances(pattern.shape); for (int i = 0; i < coordDistances.length; i++) { Coordinate coord = pattern.shape.getCoordinateN(i); Shape shape = new Shape(pattern.id, coord.y, coord.x, i + 1, coordDistances[i]); feed.shapePoints.put(new Tuple2(pattern.id, shape.shape_pt_sequence), shape); } } if (pattern.shape != null && !pattern.useStraightLineDistances) gtfsTrip.shape_id = pattern.id; if (trip.wheelchairBoarding != null) { if (trip.wheelchairBoarding.equals(AttributeAvailabilityType.AVAILABLE)) gtfsTrip.wheelchair_accessible = 1; else if (trip.wheelchairBoarding.equals(AttributeAvailabilityType.UNAVAILABLE)) gtfsTrip.wheelchair_accessible = 2; else gtfsTrip.wheelchair_accessible = 0; } else if (route.wheelchairBoarding != null) { if (route.wheelchairBoarding.equals(AttributeAvailabilityType.AVAILABLE)) gtfsTrip.wheelchair_accessible = 1; else if (route.wheelchairBoarding.equals(AttributeAvailabilityType.UNAVAILABLE)) gtfsTrip.wheelchair_accessible = 2; else gtfsTrip.wheelchair_accessible = 0; } feed.trips.put(gtfsTrip.trip_id, gtfsTrip); TripPattern patt = atx.tripPatterns.get(trip.patternId); Iterator<TripPatternStop> psi = patt.patternStops.iterator(); int stopSequence = 1; // write the stop times for (StopTime st : trip.stopTimes) { TripPatternStop ps = psi.next(); if (st == null) continue; Stop stop = atx.stops.get(st.stopId); if (!st.stopId.equals(ps.stopId)) { throw new IllegalStateException("Trip " + trip.id + " does not match its pattern!"); } com.conveyal.gtfs.model.StopTime gst = new com.conveyal.gtfs.model.StopTime(); gst.arrival_time = st.arrivalTime != null ? st.arrivalTime : Entity.INT_MISSING; gst.departure_time = st.departureTime != null ? st.departureTime : Entity.INT_MISSING; if (st.dropOffType != null) gst.drop_off_type = st.dropOffType.toGtfsValue(); else if (stop.dropOffType != null) gst.drop_off_type = stop.dropOffType.toGtfsValue(); if (st.pickupType != null) gst.pickup_type = st.pickupType.toGtfsValue(); else if (stop.dropOffType != null) gst.drop_off_type = stop.dropOffType.toGtfsValue(); gst.shape_dist_traveled = ps.shapeDistTraveled; gst.stop_headsign = st.stopHeadsign; gst.stop_id = stop.getGtfsId(); // write the stop as needed if (!feed.stops.containsKey(gst.stop_id)) { feed.stops.put(gst.stop_id, stop.toGtfs()); } gst.stop_sequence = stopSequence++; if (ps.timepoint != null) gst.timepoint = ps.timepoint ? 1 : 0; else gst.timepoint = Entity.INT_MISSING; gst.trip_id = gtfsTrip.trip_id; feed.stop_times.put(new Tuple2(gtfsTrip.trip_id, gst.stop_sequence), gst); } // create frequencies as needed if (trip.useFrequency != null && trip.useFrequency) { Frequency f = new Frequency(); f.trip = gtfsTrip; f.start_time = trip.startTime; f.end_time = trip.endTime; f.exact_times = 0; f.headway_secs = trip.headway; feed.frequencies.put(gtfsTrip.trip_id, f); } } } } feed.toFile(output.getAbsolutePath()); } finally { gtx.rollbackIfOpen(); if (atx != null) atx.rollbackIfOpen(); } } public static int toGtfsDate (LocalDate date) { return date.getYear() * 10000 + date.getMonthOfYear() * 100 + date.getDayOfMonth(); } }
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.rendering.nui.widgets; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.asset.Assets; import org.terasology.input.Keyboard; import org.terasology.input.Keyboard.KeyId; import org.terasology.input.MouseInput; import org.terasology.input.device.KeyboardDevice; import org.terasology.math.geom.Rect2i; import org.terasology.math.TeraMath; import org.terasology.math.geom.Vector2i; import org.terasology.rendering.FontColor; import org.terasology.rendering.FontUnderline; import org.terasology.rendering.assets.font.Font; import org.terasology.rendering.assets.texture.TextureRegion; import org.terasology.rendering.nui.BaseInteractionListener; import org.terasology.rendering.nui.Canvas; import org.terasology.rendering.nui.Color; import org.terasology.rendering.nui.CoreWidget; import org.terasology.rendering.nui.InteractionListener; import org.terasology.rendering.nui.LayoutConfig; import org.terasology.rendering.nui.SubRegion; import org.terasology.rendering.nui.TextLineBuilder; import org.terasology.rendering.nui.databinding.Binding; import org.terasology.rendering.nui.databinding.DefaultBinding; import org.terasology.rendering.nui.events.NUIKeyEvent; import org.terasology.rendering.nui.events.NUIMouseClickEvent; import org.terasology.rendering.nui.events.NUIMouseDragEvent; import org.terasology.rendering.nui.events.NUIMouseReleaseEvent; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.StringSelection; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.io.IOException; import java.util.Arrays; import java.util.List; /** * @author Immortius */ public class UIText extends CoreWidget { private static final Logger logger = LoggerFactory.getLogger(UIText.class); private static final float BLINK_RATE = 0.25f; private float blinkCounter; private TextureRegion cursorTexture; private Binding<String> text = new DefaultBinding<>(""); @LayoutConfig private boolean multiline; @LayoutConfig private boolean readOnly; private int cursorPosition; private int selectionStart; private int lastWidth; private Font lastFont; private List<ActivateEventListener> activationListeners = Lists.newArrayList(); private List<CursorUpdateEventListener> cursorUpdateListeners = Lists.newArrayList(); private int offset; private InteractionListener interactionListener = new BaseInteractionListener() { boolean dragging; @Override public boolean onMouseClick(NUIMouseClickEvent event) { if (event.getMouseButton() == MouseInput.MOUSE_LEFT) { moveCursor(event.getRelativeMousePosition(), false, event.getKeyboard()); dragging = true; return true; } return false; } @Override public void onMouseDrag(NUIMouseDragEvent event) { if (dragging) { moveCursor(event.getRelativeMousePosition(), true, event.getKeyboard()); } } @Override public void onMouseRelease(NUIMouseReleaseEvent event) { if (event.getMouseButton() == MouseInput.MOUSE_LEFT) { dragging = false; } } }; public UIText() { cursorTexture = Assets.getTexture("engine:white").get(); } public UIText(String id) { super(id); cursorTexture = Assets.getTexture("engine:white").get(); } @Override public void onDraw(Canvas canvas) { if (text.get() == null) { text.set(""); } lastFont = canvas.getCurrentStyle().getFont(); lastWidth = canvas.size().x; canvas.addInteractionRegion(interactionListener, canvas.getRegion()); correctCursor(); int widthForDraw = (multiline) ? canvas.size().x : lastFont.getWidth(getText()); try (SubRegion ignored = canvas.subRegion(canvas.getRegion(), true); SubRegion ignored2 = canvas.subRegion(Rect2i.createFromMinAndSize(-offset, 0, widthForDraw + 1, Integer.MAX_VALUE), false)) { canvas.drawText(text.get(), canvas.getRegion()); if (isFocused()) { if (hasSelection()) { drawSelection(canvas); } else { drawCursor(canvas); } } } } private void drawSelection(Canvas canvas) { Font font = canvas.getCurrentStyle().getFont(); String currentText = getText(); int start = Math.min(getCursorPosition(), selectionStart); int end = Math.max(getCursorPosition(), selectionStart); Color textColor = canvas.getCurrentStyle().getTextColor(); int canvasWidth = (multiline) ? canvas.size().x : Integer.MAX_VALUE; // TODO: Support different text alignments List<String> rawLinesAfterCursor = TextLineBuilder.getLines(font, currentText, Integer.MAX_VALUE); int currentChar = 0; int lineOffset = 0; for (int lineIndex = 0; lineIndex < rawLinesAfterCursor.size() && currentChar <= end; ++lineIndex) { String line = rawLinesAfterCursor.get(lineIndex); List<String> innerLines = TextLineBuilder.getLines(font, line, canvasWidth); for (int innerLineIndex = 0; innerLineIndex < innerLines.size() && currentChar <= end; ++innerLineIndex) { String innerLine = innerLines.get(innerLineIndex); String selectionString; int offsetX = 0; if (currentChar + innerLine.length() < start) { selectionString = ""; } else if (currentChar < start) { offsetX = font.getWidth(innerLine.substring(0, start - currentChar)); selectionString = innerLine.substring(start - currentChar, Math.min(end - currentChar, innerLine.length())); } else if (currentChar + innerLine.length() >= end) { selectionString = innerLine.substring(0, end - currentChar); } else { selectionString = innerLine; } if (!selectionString.isEmpty()) { int selectionWidth = font.getWidth(selectionString); Vector2i selectionTopLeft = new Vector2i(offsetX, (lineOffset) * font.getLineHeight()); Rect2i region = Rect2i.createFromMinAndSize(selectionTopLeft.x, selectionTopLeft.y, selectionWidth, font.getLineHeight()); canvas.drawTexture(cursorTexture, region, textColor); canvas.drawTextRaw(FontUnderline.strip(FontColor.stripColor(selectionString)), font, textColor.inverse(), region); } currentChar += innerLine.length(); lineOffset++; } currentChar++; } } private void drawCursor(Canvas canvas) { if (blinkCounter < BLINK_RATE) { Font font = canvas.getCurrentStyle().getFont(); String beforeCursor = text.get(); if (getCursorPosition() < text.get().length()) { beforeCursor = beforeCursor.substring(0, getCursorPosition()); } List<String> lines = TextLineBuilder.getLines(font, beforeCursor, canvas.size().x); // TODO: Support different alignments int lastLineWidth = font.getWidth(lines.get(lines.size() - 1)); Rect2i region = Rect2i.createFromMinAndSize(lastLineWidth, (lines.size() - 1) * font.getLineHeight(), 1, font.getLineHeight()); canvas.drawTexture(cursorTexture, region, canvas.getCurrentStyle().getTextColor()); } } @Override public Vector2i getPreferredContentSize(Canvas canvas, Vector2i areaHint) { Font font = canvas.getCurrentStyle().getFont(); if (isMultiline()) { List<String> lines = TextLineBuilder.getLines(font, text.get(), areaHint.x); return font.getSize(lines); } else { return new Vector2i(font.getWidth(getText()), font.getLineHeight()); } } @Override public Vector2i getMaxContentSize(Canvas canvas) { Font font = canvas.getCurrentStyle().getFont(); if (isMultiline()) { return new Vector2i(Integer.MAX_VALUE, Integer.MAX_VALUE); } else { return new Vector2i(Integer.MAX_VALUE, font.getLineHeight()); } } @Override public boolean onKeyEvent(NUIKeyEvent event) { correctCursor(); boolean eventHandled = false; if (event.isDown() && lastFont != null) { String fullText = text.get(); switch (event.getKey().getId()) { case KeyId.LEFT: { if (hasSelection() && !isSelectionModifierActive(event.getKeyboard())) { setCursorPosition(Math.min(getCursorPosition(), selectionStart)); } else if (getCursorPosition() > 0) { decreaseCursorPosition(1, !isSelectionModifierActive(event.getKeyboard())); } eventHandled = true; break; } case KeyId.RIGHT: { if (hasSelection() && !isSelectionModifierActive(event.getKeyboard())) { setCursorPosition(Math.max(getCursorPosition(), selectionStart)); } else if (getCursorPosition() < fullText.length()) { increaseCursorPosition(1, !isSelectionModifierActive(event.getKeyboard())); } eventHandled = true; break; } case KeyId.HOME: { setCursorPosition(0, !isSelectionModifierActive(event.getKeyboard())); offset = 0; eventHandled = true; break; } case KeyId.END: { setCursorPosition(fullText.length(), !isSelectionModifierActive(event.getKeyboard())); eventHandled = true; break; } default: { if (event.getKeyboard().isKeyDown(KeyId.LEFT_CTRL) || event.getKeyboard().isKeyDown(KeyId.RIGHT_CTRL)) { if (event.getKey() == Keyboard.Key.C) { copySelection(); eventHandled = true; break; } } } } if (!readOnly) { switch (event.getKey().getId()) { case KeyId.BACKSPACE: { if (hasSelection()) { removeSelection(); } else if (getCursorPosition() > 0) { String before = fullText.substring(0, getCursorPosition() - 1); String after = fullText.substring(getCursorPosition()); if (getCursorPosition() < fullText.length()) { decreaseCursorPosition(1); } setText(before + after); } eventHandled = true; break; } case KeyId.DELETE: { if (hasSelection()) { removeSelection(); } else if (getCursorPosition() < fullText.length()) { String before = fullText.substring(0, getCursorPosition()); String after = fullText.substring(getCursorPosition() + 1); setText(before + after); } eventHandled = true; break; } case KeyId.ENTER: case KeyId.NUMPAD_ENTER: { for (ActivateEventListener listener : activationListeners) { listener.onActivated(this); } eventHandled = true; break; } default: { if (event.getKeyboard().isKeyDown(KeyId.LEFT_CTRL) || event.getKeyboard().isKeyDown(KeyId.RIGHT_CTRL)) { if (event.getKey() == Keyboard.Key.V) { removeSelection(); paste(); eventHandled = true; break; } else if (event.getKey() == Keyboard.Key.X) { copySelection(); removeSelection(); eventHandled = true; break; } } if (event.getKeyCharacter() != 0 && lastFont.hasCharacter(event.getKeyCharacter())) { String before = fullText.substring(0, Math.min(getCursorPosition(), selectionStart)); String after = fullText.substring(Math.max(getCursorPosition(), selectionStart)); setText(before + event.getKeyCharacter() + after); setCursorPosition(Math.min(getCursorPosition(), selectionStart) + 1); eventHandled = true; } break; } } } } updateOffset(); return eventHandled; } private void updateOffset() { if (lastFont != null && !multiline) { String before = getText().substring(0, getCursorPosition()); int cursorDist = lastFont.getWidth(before); if (cursorDist < offset) { offset = cursorDist; } if (cursorDist > offset + lastWidth) { offset = cursorDist - lastWidth + 1; } } } private boolean isSelectionModifierActive(KeyboardDevice keyboard) { return keyboard.isKeyDown(KeyId.LEFT_SHIFT) || keyboard.isKeyDown(KeyId.RIGHT_SHIFT); } private boolean hasSelection() { return getCursorPosition() != selectionStart; } private void removeSelection() { if (hasSelection()) { String before = getText().substring(0, Math.min(getCursorPosition(), selectionStart)); String after = getText().substring(Math.max(getCursorPosition(), selectionStart)); setText(before + after); setCursorPosition(Math.min(getCursorPosition(), selectionStart)); } } private void copySelection() { if (hasSelection()) { String fullText = getText(); String selection = fullText.substring(Math.min(selectionStart, getCursorPosition()), Math.max(selectionStart, getCursorPosition())); setClipboardContents(FontUnderline.strip(FontColor.stripColor(selection))); } } private void paste() { String fullText = getText(); String before = fullText.substring(0, getCursorPosition()); String after = fullText.substring(getCursorPosition()); String pasted = getClipboardContents(); setText(before + pasted + after); increaseCursorPosition(pasted.length()); } private String getClipboardContents() { Transferable t = Toolkit.getDefaultToolkit().getSystemClipboard().getContents(null); try { if (t != null && t.isDataFlavorSupported(DataFlavor.stringFlavor)) { return (String) t.getTransferData(DataFlavor.stringFlavor); } } catch (UnsupportedFlavorException | IOException e) { logger.warn("Failed to get data from clipboard", e); } return ""; } private void setClipboardContents(String str) { Toolkit.getDefaultToolkit().getSystemClipboard().setContents(new StringSelection(str), null); } private void moveCursor(Vector2i pos, boolean selecting, KeyboardDevice keyboard) { if (lastFont != null) { pos.x += offset; String rawText = getText(); List<String> lines = TextLineBuilder.getLines(lastFont, rawText, Integer.MAX_VALUE); int targetLineIndex = pos.y / lastFont.getLineHeight(); int passedLines = 0; int newCursorPos = 0; for (int lineIndex = 0; lineIndex < lines.size() && passedLines <= targetLineIndex; lineIndex++) { List<String> subLines; if (multiline) { subLines = TextLineBuilder.getLines(lastFont, lines.get(lineIndex), lastWidth); } else { subLines = Arrays.asList(lines.get(lineIndex)); } if (subLines.size() + passedLines > targetLineIndex) { for (String subLine : subLines) { if (passedLines == targetLineIndex) { int totalWidth = 0; for (char c : subLine.toCharArray()) { int charWidth = lastFont.getWidth(c); if (totalWidth + charWidth / 2 >= pos.x) { break; } newCursorPos++; totalWidth += charWidth; } passedLines++; break; } else { newCursorPos += subLine.length(); passedLines++; } } } else { passedLines += subLines.size(); newCursorPos += lines.get(lineIndex).length() + 1; } } setCursorPosition(Math.min(newCursorPos, rawText.length()), !isSelectionModifierActive(keyboard) && !selecting); updateOffset(); } } public void bindText(Binding<String> binding) { text = binding; } public String getText() { return text.get(); } public void setText(String val) { text.set(val != null ? val : ""); correctCursor(); } public boolean isMultiline() { return multiline; } public boolean isReadOnly() { return readOnly; } public void setReadOnly(boolean readOnly) { this.readOnly = readOnly; } public void setMultiline(boolean multiline) { this.multiline = multiline; } public void subscribe(ActivateEventListener listener) { Preconditions.checkNotNull(listener); activationListeners.add(listener); } public void unsubscribe(ActivateEventListener listener) { Preconditions.checkNotNull(listener); activationListeners.remove(listener); } public void subscribe(CursorUpdateEventListener listener) { Preconditions.checkNotNull(listener); cursorUpdateListeners.add(listener); } public void unsubscribe(CursorUpdateEventListener listener) { Preconditions.checkNotNull(listener); cursorUpdateListeners.remove(listener); } @Override public void update(float delta) { super.update(delta); blinkCounter += delta; while (blinkCounter > 2 * BLINK_RATE) { blinkCounter -= 2 * BLINK_RATE; } } public int increaseCursorPosition(int delta, boolean moveSelectionStart) { int newPosition = getCursorPosition() + delta; setCursorPosition(newPosition, moveSelectionStart); return newPosition; } public int increaseCursorPosition(int delta) { return increaseCursorPosition(delta, true); } public int decreaseCursorPosition(int delta, boolean moveSelectionStart) { return increaseCursorPosition(-delta, moveSelectionStart); } public int decreaseCursorPosition(int delta) { return decreaseCursorPosition(delta, true); } public int getCursorPosition() { return cursorPosition; } public void setCursorPosition(int position, boolean moveSelectionStart, boolean callEvent) { int previousPosition = cursorPosition; cursorPosition = position; if (moveSelectionStart) { selectionStart = position; } correctCursor(); if (callEvent) { for (CursorUpdateEventListener listener : cursorUpdateListeners) { listener.onCursorUpdated(previousPosition, cursorPosition); } } } public void setCursorPosition(int position, boolean moveSelectionStart) { setCursorPosition(position, moveSelectionStart, true); } public void setCursorPosition(int position) { setCursorPosition(position, true, true); } private void correctCursor() { cursorPosition = TeraMath.clamp(cursorPosition, 0, getText().length()); selectionStart = TeraMath.clamp(selectionStart, 0, getText().length()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.config.spring.beans.factory.annotation; import org.apache.dubbo.common.logger.Logger; import org.apache.dubbo.common.logger.LoggerFactory; import org.apache.dubbo.common.utils.ArrayUtils; import org.apache.dubbo.config.MethodConfig; import org.apache.dubbo.config.annotation.Method; import org.apache.dubbo.config.annotation.Service; import org.apache.dubbo.config.spring.ServiceBean; import org.apache.dubbo.config.spring.context.annotation.DubboClassPathBeanDefinitionScanner; import org.springframework.beans.BeansException; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.config.SingletonBeanRegistry; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.beans.factory.support.ManagedList; import org.springframework.context.EnvironmentAware; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.annotation.AnnotationBeanNameGenerator; import org.springframework.context.annotation.AnnotationConfigUtils; import org.springframework.context.annotation.ClassPathBeanDefinitionScanner; import org.springframework.context.annotation.ConfigurationClassPostProcessor; import org.springframework.core.annotation.AnnotationAttributes; import org.springframework.core.env.Environment; import org.springframework.core.io.ResourceLoader; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import java.lang.annotation.Annotation; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import static com.alibaba.spring.util.ObjectUtils.of; import static org.apache.dubbo.config.spring.beans.factory.annotation.ServiceBeanNameBuilder.create; import static org.apache.dubbo.config.spring.util.DubboAnnotationUtils.resolveServiceInterfaceClass; import static org.springframework.beans.factory.support.BeanDefinitionBuilder.rootBeanDefinition; import static org.springframework.context.annotation.AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR; import static org.springframework.core.annotation.AnnotatedElementUtils.findMergedAnnotation; import static org.springframework.core.annotation.AnnotationUtils.getAnnotationAttributes; import static org.springframework.util.ClassUtils.resolveClassName; /** * {@link Service} Annotation * {@link BeanDefinitionRegistryPostProcessor Bean Definition Registry Post Processor} * * @since 2.5.8 */ public class ServiceAnnotationBeanPostProcessor implements BeanDefinitionRegistryPostProcessor, EnvironmentAware, ResourceLoaderAware, BeanClassLoaderAware { private final Logger logger = LoggerFactory.getLogger(getClass()); private final Set<String> packagesToScan; private Environment environment; private ResourceLoader resourceLoader; private ClassLoader classLoader; public ServiceAnnotationBeanPostProcessor(String... packagesToScan) { this(Arrays.asList(packagesToScan)); } public ServiceAnnotationBeanPostProcessor(Collection<String> packagesToScan) { this(new LinkedHashSet<>(packagesToScan)); } public ServiceAnnotationBeanPostProcessor(Set<String> packagesToScan) { this.packagesToScan = packagesToScan; } @Override public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { Set<String> resolvedPackagesToScan = resolvePackagesToScan(packagesToScan); if (!CollectionUtils.isEmpty(resolvedPackagesToScan)) { registerServiceBeans(resolvedPackagesToScan, registry); } else { if (logger.isWarnEnabled()) { logger.warn("packagesToScan is empty , ServiceBean registry will be ignored!"); } } } /** * Registers Beans whose classes was annotated {@link Service} * * @param packagesToScan The base packages to scan * @param registry {@link BeanDefinitionRegistry} */ private void registerServiceBeans(Set<String> packagesToScan, BeanDefinitionRegistry registry) { DubboClassPathBeanDefinitionScanner scanner = new DubboClassPathBeanDefinitionScanner(registry, environment, resourceLoader); BeanNameGenerator beanNameGenerator = resolveBeanNameGenerator(registry); scanner.setBeanNameGenerator(beanNameGenerator); scanner.addIncludeFilter(new AnnotationTypeFilter(Service.class)); /** * Add the compatibility for legacy Dubbo's @Service * * The issue : https://github.com/apache/dubbo/issues/4330 * @since 2.7.3 */ scanner.addIncludeFilter(new AnnotationTypeFilter(com.alibaba.dubbo.config.annotation.Service.class)); for (String packageToScan : packagesToScan) { // Registers @Service Bean first scanner.scan(packageToScan); // Finds all BeanDefinitionHolders of @Service whether @ComponentScan scans or not. Set<BeanDefinitionHolder> beanDefinitionHolders = findServiceBeanDefinitionHolders(scanner, packageToScan, registry, beanNameGenerator); if (!CollectionUtils.isEmpty(beanDefinitionHolders)) { for (BeanDefinitionHolder beanDefinitionHolder : beanDefinitionHolders) { registerServiceBean(beanDefinitionHolder, registry, scanner); } if (logger.isInfoEnabled()) { logger.info(beanDefinitionHolders.size() + " annotated Dubbo's @Service Components { " + beanDefinitionHolders + " } were scanned under package[" + packageToScan + "]"); } } else { if (logger.isWarnEnabled()) { logger.warn("No Spring Bean annotating Dubbo's @Service was found under package[" + packageToScan + "]"); } } } } /** * It'd better to use BeanNameGenerator instance that should reference * {@link ConfigurationClassPostProcessor#componentScanBeanNameGenerator}, * thus it maybe a potential problem on bean name generation. * * @param registry {@link BeanDefinitionRegistry} * @return {@link BeanNameGenerator} instance * @see SingletonBeanRegistry * @see AnnotationConfigUtils#CONFIGURATION_BEAN_NAME_GENERATOR * @see ConfigurationClassPostProcessor#processConfigBeanDefinitions * @since 2.5.8 */ private BeanNameGenerator resolveBeanNameGenerator(BeanDefinitionRegistry registry) { BeanNameGenerator beanNameGenerator = null; if (registry instanceof SingletonBeanRegistry) { SingletonBeanRegistry singletonBeanRegistry = SingletonBeanRegistry.class.cast(registry); beanNameGenerator = (BeanNameGenerator) singletonBeanRegistry.getSingleton(CONFIGURATION_BEAN_NAME_GENERATOR); } if (beanNameGenerator == null) { if (logger.isInfoEnabled()) { logger.info("BeanNameGenerator bean can't be found in BeanFactory with name [" + CONFIGURATION_BEAN_NAME_GENERATOR + "]"); logger.info("BeanNameGenerator will be a instance of " + AnnotationBeanNameGenerator.class.getName() + " , it maybe a potential problem on bean name generation."); } beanNameGenerator = new AnnotationBeanNameGenerator(); } return beanNameGenerator; } /** * Finds a {@link Set} of {@link BeanDefinitionHolder BeanDefinitionHolders} whose bean type annotated * {@link Service} Annotation. * * @param scanner {@link ClassPathBeanDefinitionScanner} * @param packageToScan pachage to scan * @param registry {@link BeanDefinitionRegistry} * @return non-null * @since 2.5.8 */ private Set<BeanDefinitionHolder> findServiceBeanDefinitionHolders( ClassPathBeanDefinitionScanner scanner, String packageToScan, BeanDefinitionRegistry registry, BeanNameGenerator beanNameGenerator) { Set<BeanDefinition> beanDefinitions = scanner.findCandidateComponents(packageToScan); Set<BeanDefinitionHolder> beanDefinitionHolders = new LinkedHashSet<>(beanDefinitions.size()); for (BeanDefinition beanDefinition : beanDefinitions) { String beanName = beanNameGenerator.generateBeanName(beanDefinition, registry); BeanDefinitionHolder beanDefinitionHolder = new BeanDefinitionHolder(beanDefinition, beanName); beanDefinitionHolders.add(beanDefinitionHolder); } return beanDefinitionHolders; } /** * Registers {@link ServiceBean} from new annotated {@link Service} {@link BeanDefinition} * * @param beanDefinitionHolder * @param registry * @param scanner * @see ServiceBean * @see BeanDefinition */ private void registerServiceBean(BeanDefinitionHolder beanDefinitionHolder, BeanDefinitionRegistry registry, DubboClassPathBeanDefinitionScanner scanner) { Class<?> beanClass = resolveClass(beanDefinitionHolder); Annotation service = findServiceAnnotation(beanClass); /** * The {@link AnnotationAttributes} of @Service annotation */ AnnotationAttributes serviceAnnotationAttributes = getAnnotationAttributes(service, false, false); Class<?> interfaceClass = resolveServiceInterfaceClass(serviceAnnotationAttributes, beanClass); String annotatedServiceBeanName = beanDefinitionHolder.getBeanName(); AbstractBeanDefinition serviceBeanDefinition = buildServiceBeanDefinition(service, serviceAnnotationAttributes, interfaceClass, annotatedServiceBeanName); // ServiceBean Bean name String beanName = generateServiceBeanName(serviceAnnotationAttributes, interfaceClass); if (scanner.checkCandidate(beanName, serviceBeanDefinition)) { // check duplicated candidate bean registry.registerBeanDefinition(beanName, serviceBeanDefinition); if (logger.isInfoEnabled()) { logger.info("The BeanDefinition[" + serviceBeanDefinition + "] of ServiceBean has been registered with name : " + beanName); } } else { if (logger.isWarnEnabled()) { logger.warn("The Duplicated BeanDefinition[" + serviceBeanDefinition + "] of ServiceBean[ bean name : " + beanName + "] was be found , Did @DubboComponentScan scan to same package in many times?"); } } } /** * Find the {@link Annotation annotation} of @Service * * @param beanClass the {@link Class class} of Bean * @return <code>null</code> if not found * @since 2.7.3 */ private Annotation findServiceAnnotation(Class<?> beanClass) { Annotation service = findMergedAnnotation(beanClass, Service.class); if (service == null) { service = findMergedAnnotation(beanClass, com.alibaba.dubbo.config.annotation.Service.class); } return service; } /** * Generates the bean name of {@link ServiceBean} * * @param serviceAnnotationAttributes * @param interfaceClass the class of interface annotated {@link Service} * @return ServiceBean@interfaceClassName#annotatedServiceBeanName * @since 2.7.3 */ private String generateServiceBeanName(AnnotationAttributes serviceAnnotationAttributes, Class<?> interfaceClass) { ServiceBeanNameBuilder builder = create(interfaceClass, environment) .group(serviceAnnotationAttributes.getString("group")) .version(serviceAnnotationAttributes.getString("version")); return builder.build(); } private Class<?> resolveClass(BeanDefinitionHolder beanDefinitionHolder) { BeanDefinition beanDefinition = beanDefinitionHolder.getBeanDefinition(); return resolveClass(beanDefinition); } private Class<?> resolveClass(BeanDefinition beanDefinition) { String beanClassName = beanDefinition.getBeanClassName(); return resolveClassName(beanClassName, classLoader); } private Set<String> resolvePackagesToScan(Set<String> packagesToScan) { Set<String> resolvedPackagesToScan = new LinkedHashSet<String>(packagesToScan.size()); for (String packageToScan : packagesToScan) { if (StringUtils.hasText(packageToScan)) { String resolvedPackageToScan = environment.resolvePlaceholders(packageToScan.trim()); resolvedPackagesToScan.add(resolvedPackageToScan); } } return resolvedPackagesToScan; } /** * Build the {@link AbstractBeanDefinition Bean Definition} * * @param serviceAnnotation * @param serviceAnnotationAttributes * @param interfaceClass * @param annotatedServiceBeanName * @return * @since 2.7.3 */ private AbstractBeanDefinition buildServiceBeanDefinition(Annotation serviceAnnotation, AnnotationAttributes serviceAnnotationAttributes, Class<?> interfaceClass, String annotatedServiceBeanName) { BeanDefinitionBuilder builder = rootBeanDefinition(ServiceBean.class); AbstractBeanDefinition beanDefinition = builder.getBeanDefinition(); MutablePropertyValues propertyValues = beanDefinition.getPropertyValues(); String[] ignoreAttributeNames = of("provider", "monitor", "application", "module", "registry", "protocol", "interface", "interfaceName", "parameters"); propertyValues.addPropertyValues(new AnnotationPropertyValuesAdapter(serviceAnnotation, environment, ignoreAttributeNames)); // References "ref" property to annotated-@Service Bean addPropertyReference(builder, "ref", annotatedServiceBeanName); // Set interface builder.addPropertyValue("interface", interfaceClass.getName()); // Convert parameters into map builder.addPropertyValue("parameters", convertParameters(serviceAnnotationAttributes.getStringArray("parameters"))); // Add methods parameters List<MethodConfig> methodConfigs = convertMethodConfigs(serviceAnnotationAttributes.get("methods")); if (!methodConfigs.isEmpty()) { builder.addPropertyValue("methods", methodConfigs); } /** * Add {@link org.apache.dubbo.config.ProviderConfig} Bean reference */ String providerConfigBeanName = serviceAnnotationAttributes.getString("provider"); if (StringUtils.hasText(providerConfigBeanName)) { addPropertyReference(builder, "provider", providerConfigBeanName); } /** * Add {@link org.apache.dubbo.config.MonitorConfig} Bean reference */ String monitorConfigBeanName = serviceAnnotationAttributes.getString("monitor"); if (StringUtils.hasText(monitorConfigBeanName)) { addPropertyReference(builder, "monitor", monitorConfigBeanName); } /** * Add {@link org.apache.dubbo.config.ApplicationConfig} Bean reference */ String applicationConfigBeanName = serviceAnnotationAttributes.getString("application"); if (StringUtils.hasText(applicationConfigBeanName)) { addPropertyReference(builder, "application", applicationConfigBeanName); } /** * Add {@link org.apache.dubbo.config.ModuleConfig} Bean reference */ String moduleConfigBeanName = serviceAnnotationAttributes.getString("module"); if (StringUtils.hasText(moduleConfigBeanName)) { addPropertyReference(builder, "module", moduleConfigBeanName); } /** * Add {@link org.apache.dubbo.config.RegistryConfig} Bean reference */ String[] registryConfigBeanNames = serviceAnnotationAttributes.getStringArray("registry"); List<RuntimeBeanReference> registryRuntimeBeanReferences = toRuntimeBeanReferences(registryConfigBeanNames); if (!registryRuntimeBeanReferences.isEmpty()) { builder.addPropertyValue("registries", registryRuntimeBeanReferences); } /** * Add {@link org.apache.dubbo.config.ProtocolConfig} Bean reference */ String[] protocolConfigBeanNames = serviceAnnotationAttributes.getStringArray("protocol"); List<RuntimeBeanReference> protocolRuntimeBeanReferences = toRuntimeBeanReferences(protocolConfigBeanNames); if (!protocolRuntimeBeanReferences.isEmpty()) { builder.addPropertyValue("protocols", protocolRuntimeBeanReferences); } return builder.getBeanDefinition(); } private List convertMethodConfigs(Object methodsAnnotation) { if (methodsAnnotation == null){ return Collections.EMPTY_LIST; } return MethodConfig.constructMethodConfig((Method[])methodsAnnotation); } private ManagedList<RuntimeBeanReference> toRuntimeBeanReferences(String... beanNames) { ManagedList<RuntimeBeanReference> runtimeBeanReferences = new ManagedList<>(); if (!ObjectUtils.isEmpty(beanNames)) { for (String beanName : beanNames) { String resolvedBeanName = environment.resolvePlaceholders(beanName); runtimeBeanReferences.add(new RuntimeBeanReference(resolvedBeanName)); } } return runtimeBeanReferences; } private void addPropertyReference(BeanDefinitionBuilder builder, String propertyName, String beanName) { String resolvedBeanName = environment.resolvePlaceholders(beanName); builder.addPropertyReference(propertyName, resolvedBeanName); } private Map<String, String> convertParameters(String[] parameters) { if (ArrayUtils.isEmpty(parameters)) { return null; } if (parameters.length % 2 != 0) { throw new IllegalArgumentException("parameter attribute must be paired with key followed by value"); } Map<String, String> map = new HashMap<>(); for (int i = 0; i < parameters.length; i += 2) { map.put(parameters[i], parameters[i + 1]); } return map; } @Override public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { } @Override public void setEnvironment(Environment environment) { this.environment = environment; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } @Override public void setBeanClassLoader(ClassLoader classLoader) { this.classLoader = classLoader; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.AmazonS3EncryptionClient; import com.amazonaws.services.s3.S3ClientOptions; import com.amazonaws.services.s3.model.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.VersionInfo; import org.slf4j.Logger; import java.io.IOException; import java.net.URI; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.S3AUtils.createAWSCredentialProviderSet; import static org.apache.hadoop.fs.s3a.S3AUtils.intOption; /** * Factory for creation of S3 client instances to be used by {@link S3Store}. */ @InterfaceAudience.Private @InterfaceStability.Unstable interface S3ClientFactory { /** * Creates a new {@link AmazonS3} client. This method accepts the S3A file * system URI both in raw input form and validated form as separate arguments, * because both values may be useful in logging. * * @param name raw input S3A file system URI * @return S3 client * @throws IOException IO problem */ AmazonS3 createS3Client(URI name) throws IOException; /** * The default factory implementation, which calls the AWS SDK to configure * and create an {@link AmazonS3Client} that communicates with the S3 service. */ static class DefaultS3ClientFactory extends Configured implements S3ClientFactory { private static final Logger LOG = S3AFileSystem.LOG; @Override public AmazonS3 createS3Client(URI name) throws IOException { Configuration conf = getConf(); AWSCredentialsProvider credentials = createAWSCredentialProviderSet(name, conf); ClientConfiguration awsConf = new ClientConfiguration(); initConnectionSettings(conf, awsConf); initProxySupport(conf, awsConf); initUserAgent(conf, awsConf); return createAmazonS3Client(conf, credentials, awsConf); } /** * Initializes all AWS SDK settings related to connection management. * * @param conf Hadoop configuration * @param awsConf AWS SDK configuration */ private static void initConnectionSettings(Configuration conf, ClientConfiguration awsConf) { awsConf.setMaxConnections(intOption(conf, MAXIMUM_CONNECTIONS, DEFAULT_MAXIMUM_CONNECTIONS, 1)); boolean secureConnections = conf.getBoolean(SECURE_CONNECTIONS, DEFAULT_SECURE_CONNECTIONS); awsConf.setProtocol(secureConnections ? Protocol.HTTPS : Protocol.HTTP); awsConf.setMaxErrorRetry(intOption(conf, MAX_ERROR_RETRIES, DEFAULT_MAX_ERROR_RETRIES, 0)); awsConf.setConnectionTimeout(intOption(conf, ESTABLISH_TIMEOUT, DEFAULT_ESTABLISH_TIMEOUT, 0)); awsConf.setSocketTimeout(intOption(conf, SOCKET_TIMEOUT, DEFAULT_SOCKET_TIMEOUT, 0)); int sockSendBuffer = intOption(conf, SOCKET_SEND_BUFFER, DEFAULT_SOCKET_SEND_BUFFER, 2048); int sockRecvBuffer = intOption(conf, SOCKET_RECV_BUFFER, DEFAULT_SOCKET_RECV_BUFFER, 2048); awsConf.setSocketBufferSizeHints(sockSendBuffer, sockRecvBuffer); String signerOverride = conf.getTrimmed(SIGNING_ALGORITHM, ""); if (!signerOverride.isEmpty()) { LOG.debug("Signer override = {}", signerOverride); awsConf.setSignerOverride(signerOverride); } } /** * Initializes AWS SDK proxy support if configured. * * @param conf Hadoop configuration * @param awsConf AWS SDK configuration * @throws IllegalArgumentException if misconfigured */ private static void initProxySupport(Configuration conf, ClientConfiguration awsConf) throws IllegalArgumentException { String proxyHost = conf.getTrimmed(PROXY_HOST, ""); int proxyPort = conf.getInt(PROXY_PORT, -1); if (!proxyHost.isEmpty()) { awsConf.setProxyHost(proxyHost); if (proxyPort >= 0) { awsConf.setProxyPort(proxyPort); } else { if (conf.getBoolean(SECURE_CONNECTIONS, DEFAULT_SECURE_CONNECTIONS)) { LOG.warn("Proxy host set without port. Using HTTPS default 443"); awsConf.setProxyPort(443); } else { LOG.warn("Proxy host set without port. Using HTTP default 80"); awsConf.setProxyPort(80); } } String proxyUsername = conf.getTrimmed(PROXY_USERNAME); String proxyPassword = conf.getTrimmed(PROXY_PASSWORD); if ((proxyUsername == null) != (proxyPassword == null)) { String msg = "Proxy error: " + PROXY_USERNAME + " or " + PROXY_PASSWORD + " set without the other."; LOG.error(msg); throw new IllegalArgumentException(msg); } awsConf.setProxyUsername(proxyUsername); awsConf.setProxyPassword(proxyPassword); awsConf.setProxyDomain(conf.getTrimmed(PROXY_DOMAIN)); awsConf.setProxyWorkstation(conf.getTrimmed(PROXY_WORKSTATION)); if (LOG.isDebugEnabled()) { LOG.debug("Using proxy server {}:{} as user {} with password {} on " + "domain {} as workstation {}", awsConf.getProxyHost(), awsConf.getProxyPort(), String.valueOf(awsConf.getProxyUsername()), awsConf.getProxyPassword(), awsConf.getProxyDomain(), awsConf.getProxyWorkstation()); } } else if (proxyPort >= 0) { String msg = "Proxy error: " + PROXY_PORT + " set without " + PROXY_HOST; LOG.error(msg); throw new IllegalArgumentException(msg); } } /** * Initializes the User-Agent header to send in HTTP requests to the S3 * back-end. We always include the Hadoop version number. The user also * may set an optional custom prefix to put in front of the Hadoop version * number. The AWS SDK interally appends its own information, which seems * to include the AWS SDK version, OS and JVM version. * * @param conf Hadoop configuration * @param awsConf AWS SDK configuration */ private static void initUserAgent(Configuration conf, ClientConfiguration awsConf) { String userAgent = "Hadoop " + VersionInfo.getVersion(); String userAgentPrefix = conf.getTrimmed(USER_AGENT_PREFIX, ""); if (!userAgentPrefix.isEmpty()) { userAgent = userAgentPrefix + ", " + userAgent; } LOG.debug("Using User-Agent: {}", userAgent); awsConf.setUserAgentPrefix(userAgent); } /** * Creates an {@link AmazonS3Client} from the established configuration. * * @param conf Hadoop configuration * @param credentials AWS credentials * @param awsConf AWS SDK configuration * @return S3 client * @throws IllegalArgumentException if misconfigured */ private static AmazonS3 createAmazonS3Client(Configuration conf, AWSCredentialsProvider credentials, ClientConfiguration awsConf) throws IllegalArgumentException { AmazonS3 s3; if (conf.get(CLIENT_SIDE_ENCRYPTION_METHOD) == null) { s3 = new AmazonS3Client(credentials, awsConf); } else { s3 = getAmazonS3EncryptionClient(conf, credentials, awsConf); } String endPoint = conf.getTrimmed(ENDPOINT, ""); if (!endPoint.isEmpty()) { try { s3.setEndpoint(endPoint); } catch (IllegalArgumentException e) { String msg = "Incorrect endpoint: " + e.getMessage(); LOG.error(msg); throw new IllegalArgumentException(msg, e); } } enablePathStyleAccessIfRequired(s3, conf); return s3; } /** * Build {@link AmazonS3EncryptionClient}. * * @param conf Hadoop configuration * @param credentials AWS credentials * @param awsConf AWS SDK configuration * @return S3 client * @throws IllegalArgumentException if misconfigured */ private static AmazonS3 getAmazonS3EncryptionClient( Configuration conf, AWSCredentialsProvider credentials, ClientConfiguration awsConf) { AmazonS3 s3; try { S3AClientEncryptionMethods method = S3AClientEncryptionMethods.getMethod( conf.get(CLIENT_SIDE_ENCRYPTION_METHOD)); if (method == S3AClientEncryptionMethods.KMS) { EncryptionMaterialsProvider materialProvider; String kmsKeyId = conf.get(CLIENT_SIDE_ENCRYPTION_KMS_KEY_ID); if (kmsKeyId != null) { materialProvider = new KMSEncryptionMaterialsProvider(kmsKeyId); } else { String msg = CLIENT_SIDE_ENCRYPTION_KMS_KEY_ID + " has to be set for client encryption"; LOG.error(msg); throw new IllegalArgumentException(msg); } s3 = new AmazonS3EncryptionClient(credentials, materialProvider, awsConf, new CryptoConfiguration(CryptoMode.AuthenticatedEncryption)); } else { Class<? extends S3ACSEMaterialProviderConfig> materialProviderClass = conf.getClass( Constants.CLIENT_SIDE_ENCRYPTION_MATERIALS_PROVIDER, null, S3ACSEMaterialProviderConfig.class); if (materialProviderClass == null) { String msg = CLIENT_SIDE_ENCRYPTION_MATERIALS_PROVIDER + " has to be set for client encryption"; LOG.error(msg); throw new IllegalArgumentException(msg); } S3ACSEMaterialProviderConfig materialProviderConfig = ReflectionUtils.newInstance(materialProviderClass, null); s3 = new AmazonS3EncryptionClient(credentials, materialProviderConfig.buildMaterialsProvider(), awsConf, new CryptoConfiguration(CryptoMode.AuthenticatedEncryption)); } } catch (Exception e) { String msg = "Error during initialization of AmazonS3EncryptionClient"; LOG.error(msg); throw new IllegalArgumentException(msg, e); } return s3; } /** * Enables path-style access to S3 buckets if configured. By default, the * behavior is to use virtual hosted-style access with URIs of the form * http://bucketname.s3.amazonaws.com. Enabling path-style access and a * region-specific endpoint switches the behavior to use URIs of the form * http://s3-eu-west-1.amazonaws.com/bucketname. * * @param s3 S3 client * @param conf Hadoop configuration */ private static void enablePathStyleAccessIfRequired(AmazonS3 s3, Configuration conf) { final boolean pathStyleAccess = conf.getBoolean(PATH_STYLE_ACCESS, false); if (pathStyleAccess) { LOG.debug("Enabling path style access!"); s3.setS3ClientOptions(S3ClientOptions.builder() .setPathStyleAccess(true) .build()); } } } }
/* * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.whz.synapse.transition; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Intent; import android.graphics.Color; import android.graphics.Outline; import android.graphics.Rect; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.transition.Transition; import android.transition.TransitionValues; import android.view.View; import android.view.ViewAnimationUtils; import android.view.ViewGroup; import android.view.ViewOutlineProvider; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; import java.util.ArrayList; import java.util.List; import static android.view.View.MeasureSpec.makeMeasureSpec; /** * Thanks https://github.com/nickbutcher/plaid * A transition between a FAB & another surface using a circular reveal moving along an arc. * <p> * See: https://www.google.com/design/spec/motion/transforming-material.html#transforming-material-radial-transformation */ public class FabTransform extends Transition { private static final String EXTRA_FAB_COLOR = "EXTRA_FAB_COLOR"; private static final String EXTRA_FAB_ICON_RES_ID = "EXTRA_FAB_ICON_RES_ID"; private static final long DEFAULT_DURATION = 240L; private static final String PROP_BOUNDS = "plaid:fabTransform:bounds"; private static final String[] TRANSITION_PROPERTIES = { PROP_BOUNDS }; private final int color; private final int icon; private FabTransform(@ColorInt int fabColor, @DrawableRes int fabIconResId) { color = fabColor; icon = fabIconResId; setPathMotion(new GravityArcMotion()); setDuration(DEFAULT_DURATION); } /** * Configure {@code intent} with the extras needed to initialize this transition. */ public static void addExtras(@NonNull Intent intent, @ColorInt int fabColor, @DrawableRes int fabIconResId) { intent.putExtra(EXTRA_FAB_COLOR, fabColor); intent.putExtra(EXTRA_FAB_ICON_RES_ID, fabIconResId); } /** * Create a {@link FabTransform} from the supplied {@code activity} extras and set as its * shared element enter/return transition. */ public static boolean setup(@NonNull AppCompatActivity activity, @Nullable View target) { final Intent intent = activity.getIntent(); if (!intent.hasExtra(EXTRA_FAB_COLOR) || !intent.hasExtra(EXTRA_FAB_ICON_RES_ID)) { return false; } final int color = intent.getIntExtra(EXTRA_FAB_COLOR, Color.TRANSPARENT); final int icon = intent.getIntExtra(EXTRA_FAB_ICON_RES_ID, -1); final FabTransform sharedEnter = new FabTransform(color, icon); if (target != null) { sharedEnter.addTarget(target); } activity.getWindow().setSharedElementEnterTransition(sharedEnter); return true; } @Override public String[] getTransitionProperties() { return TRANSITION_PROPERTIES; } @Override public void captureStartValues(TransitionValues transitionValues) { captureValues(transitionValues); } @Override public void captureEndValues(TransitionValues transitionValues) { captureValues(transitionValues); } @Override public Animator createAnimator(final ViewGroup sceneRoot, final TransitionValues startValues, final TransitionValues endValues) { if (startValues == null || endValues == null) return null; final Rect startBounds = (Rect) startValues.values.get(PROP_BOUNDS); final Rect endBounds = (Rect) endValues.values.get(PROP_BOUNDS); final boolean fromFab = endBounds.width() > startBounds.width(); final View view = endValues.view; final Rect dialogBounds = fromFab ? endBounds : startBounds; final Rect fabBounds = fromFab ? startBounds : endBounds; final Interpolator fastOutSlowInInterpolator = AnimationUtils.loadInterpolator(sceneRoot.getContext(), android.R.interpolator.fast_out_linear_in); final long duration = getDuration(); final long halfDuration = duration / 2; final long twoThirdsDuration = duration * 2 / 3; if (!fromFab) { // Force measure / layout the dialog back to it's original bounds view.measure( makeMeasureSpec(startBounds.width(), View.MeasureSpec.EXACTLY), makeMeasureSpec(startBounds.height(), View.MeasureSpec.EXACTLY)); view.layout(startBounds.left, startBounds.top, startBounds.right, startBounds.bottom); } final int translationX = startBounds.centerX() - endBounds.centerX(); final int translationY = startBounds.centerY() - endBounds.centerY(); if (fromFab) { view.setTranslationX(translationX); view.setTranslationY(translationY); } // Add a color overlay to fake appearance of the FAB final ColorDrawable fabColor = new ColorDrawable(color); fabColor.setBounds(0, 0, dialogBounds.width(), dialogBounds.height()); if (!fromFab) fabColor.setAlpha(0); view.getOverlay().add(fabColor); // Add an icon overlay again to fake the appearance of the FAB final Drawable fabIcon = ContextCompat.getDrawable(sceneRoot.getContext(), icon).mutate(); final int iconLeft = (dialogBounds.width() - fabIcon.getIntrinsicWidth()) / 2; final int iconTop = (dialogBounds.height() - fabIcon.getIntrinsicHeight()) / 2; fabIcon.setBounds(iconLeft, iconTop, iconLeft + fabIcon.getIntrinsicWidth(), iconTop + fabIcon.getIntrinsicHeight()); if (!fromFab) fabIcon.setAlpha(0); view.getOverlay().add(fabIcon); // Circular clip from/to the FAB size final Animator circularReveal; if (fromFab) { circularReveal = ViewAnimationUtils.createCircularReveal(view, view.getWidth() / 2, view.getHeight() / 2, startBounds.width() / 2, (float) Math.hypot(endBounds.width() / 2, endBounds.height() / 2)); circularReveal.setInterpolator(fastOutSlowInInterpolator); } else { circularReveal = ViewAnimationUtils.createCircularReveal(view, view.getWidth() / 2, view.getHeight() / 2, (float) Math.hypot(startBounds.width() / 2, startBounds.height() / 2), endBounds.width() / 2); circularReveal.setInterpolator(AnimationUtils.loadInterpolator(sceneRoot.getContext(), android.R.interpolator.linear_out_slow_in)); // Persist the end clip i.e. stay at FAB size after the reveal has run circularReveal.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { view.setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { final int left = (view.getWidth() - fabBounds.width()) / 2; final int top = (view.getHeight() - fabBounds.height()) / 2; outline.setOval( left, top, left + fabBounds.width(), top + fabBounds.height()); view.setClipToOutline(true); } }); } }); } circularReveal.setDuration(duration); // Translate to end position along an arc final Animator translate = ObjectAnimator.ofFloat( view, View.TRANSLATION_X, View.TRANSLATION_Y, fromFab ? getPathMotion().getPath(translationX, translationY, 0, 0) : getPathMotion().getPath(0, 0, -translationX, -translationY)); translate.setDuration(duration); translate.setInterpolator(fastOutSlowInInterpolator); // Fade contents of non-FAB view in/out List<Animator> fadeContents = null; if (view instanceof ViewGroup) { final ViewGroup vg = ((ViewGroup) view); fadeContents = new ArrayList<>(vg.getChildCount()); for (int i = vg.getChildCount() - 1; i >= 0; i--) { final View child = vg.getChildAt(i); final Animator fade = ObjectAnimator.ofFloat(child, View.ALPHA, fromFab ? 1f : 0f); if (fromFab) { child.setAlpha(0f); } fade.setDuration(twoThirdsDuration); fade.setInterpolator(fastOutSlowInInterpolator); fadeContents.add(fade); } } // Fade in/out the fab color & icon overlays final Animator colorFade = ObjectAnimator.ofInt(fabColor, "alpha", fromFab ? 0 : 255); final Animator iconFade = ObjectAnimator.ofInt(fabIcon, "alpha", fromFab ? 0 : 255); if (!fromFab) { colorFade.setStartDelay(halfDuration); iconFade.setStartDelay(halfDuration); } colorFade.setDuration(halfDuration); iconFade.setDuration(halfDuration); colorFade.setInterpolator(fastOutSlowInInterpolator); iconFade.setInterpolator(fastOutSlowInInterpolator); // Work around issue with elevation shadows. At the end of the return transition the shared // element's shadow is drawn twice (by each activity) which is jarring. This workaround // still causes the shadow to snap, but it's better than seeing it double drawn. Animator elevation = null; if (!fromFab) { elevation = ObjectAnimator.ofFloat(view, View.TRANSLATION_Z, -view.getElevation()); elevation.setDuration(duration); elevation.setInterpolator(fastOutSlowInInterpolator); } // Run all animations together final AnimatorSet transition = new AnimatorSet(); transition.playTogether(circularReveal, translate, colorFade, iconFade); transition.playTogether(fadeContents); if (elevation != null) transition.play(elevation); if (fromFab) { transition.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { // Clean up view.getOverlay().clear(); } }); } return transition; } private void captureValues(TransitionValues transitionValues) { final View view = transitionValues.view; if (view == null || view.getWidth() <= 0 || view.getHeight() <= 0) return; transitionValues.values.put(PROP_BOUNDS, new Rect(view.getLeft(), view.getTop(), view.getRight(), view.getBottom())); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.codeStyle; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.FList; import com.intellij.util.io.IOUtil; import com.intellij.util.text.Matcher; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.BitSet; import java.util.Iterator; /** * Tells whether a string matches a specific pattern. Allows for lowercase camel-hump matching. * Used in navigation, code completion, speed search etc. * * @see NameUtil#buildMatcher(String) * * @author peter */ public class MinusculeMatcher implements Matcher { private final ThreadLocal<MatchingState> myMatchingState = new ThreadLocal<MatchingState>() { @Override protected MatchingState initialValue() { return new MatchingState(); } }; private final char[] myPattern; private final String myHardSeparators; private final NameUtil.MatchingCaseSensitivity myOptions; private final boolean myHasHumps; private final boolean myHasSeparators; private final boolean myHasDots; private final boolean[] isLowerCase; private final boolean[] isUpperCase; private final boolean[] isWordSeparator; private final char[] toUpperCase; private final char[] toLowerCase; private final char[] myMeaningfulCharacters; private final int myMinNameLength; /** * Constructs a matcher by a given pattern. * @param pattern the pattern * @param options case sensitivity settings * @param hardSeparators A string of characters (empty by default). Lowercase humps don't work for parts separated by any of these characters. * Need either an explicit uppercase letter or the same separator character in prefix */ MinusculeMatcher(@NotNull String pattern, @NotNull NameUtil.MatchingCaseSensitivity options, @NotNull String hardSeparators) { myOptions = options; myPattern = StringUtil.trimEnd(pattern, "* ").toCharArray(); myHardSeparators = hardSeparators; isLowerCase = new boolean[myPattern.length]; isUpperCase = new boolean[myPattern.length]; isWordSeparator = new boolean[myPattern.length]; toUpperCase = new char[myPattern.length]; toLowerCase = new char[myPattern.length]; StringBuilder meaningful = new StringBuilder(); for (int k = 0; k < myPattern.length; k++) { char c = myPattern[k]; isLowerCase[k] = Character.isLowerCase(c); isUpperCase[k] = Character.isUpperCase(c); isWordSeparator[k] = isWordSeparator(c); toUpperCase[k] = StringUtil.toUpperCase(c); toLowerCase[k] = StringUtil.toLowerCase(c); if (!isWildcard(k)) { meaningful.append(toLowerCase[k]); meaningful.append(toUpperCase[k]); } } int i = 0; while (isWildcard(i)) i++; myHasHumps = hasFlag(i + 1, isUpperCase) && hasFlag(i, isLowerCase); myHasSeparators = hasFlag(i, isWordSeparator); myHasDots = hasDots(i); myMeaningfulCharacters = meaningful.toString().toCharArray(); myMinNameLength = myMeaningfulCharacters.length / 2; } private static boolean isWordSeparator(char c) { return Character.isWhitespace(c) || c == '_' || c == '-' || c == ':' || c == '+' || c == '.'; } private static boolean isWordStart(String text, int i) { char c = text.charAt(i); if (Character.isUpperCase(c)) { if (i > 0 && Character.isUpperCase(text.charAt(i - 1))) { // check that we're not in the middle of an all-caps word return i + 1 < text.length() && Character.isLowerCase(text.charAt(i + 1)); } return true; } if (Character.isDigit(c)) { return true; } if (!Character.isLetter(c)) { return false; } return i == 0 || !Character.isLetterOrDigit(text.charAt(i - 1)); } private static int nextWord(@NotNull String name, int start) { if (start < name.length() && Character.isDigit(name.charAt(start))) { return start + 1; //treat each digit as a separate hump } return NameUtil.nextWord(name, start); } private boolean hasFlag(int start, boolean[] flags) { for (int i = start; i < myPattern.length; i++) { if (flags[i]) { return true; } } return false; } private boolean hasDots(int start) { for (int i = start; i < myPattern.length; i++) { if (myPattern[i] == '.') { return true; } } return false; } private static FList<TextRange> prependRange(@NotNull FList<TextRange> ranges, int from, int length) { TextRange head = ranges.getHead(); if (head != null && head.getStartOffset() == from + length) { return ranges.getTail().prepend(new TextRange(from, head.getEndOffset())); } return ranges.prepend(TextRange.from(from, length)); } public int matchingDegree(@NotNull String name) { return matchingDegree(name, false); } public int matchingDegree(@NotNull String name, boolean valueStartCaseMatch) { return matchingDegree(name, valueStartCaseMatch, matchingFragments(name)); } public int matchingDegree(@NotNull String name, boolean valueStartCaseMatch, @Nullable FList<TextRange> fragments) { if (fragments == null) return Integer.MIN_VALUE; if (fragments.isEmpty()) return 0; final TextRange first = fragments.getHead(); boolean startMatch = first.getStartOffset() == 0; int matchingCase = 0; int p = -1; int integral = 0; // -sum of matching-char-count * hump-index over all matched humps; favors longer fragments matching earlier words int humpIndex = 1; int nextHumpStart = 0; for (TextRange range : fragments) { for (int i = range.getStartOffset(); i < range.getEndOffset(); i++) { boolean isHumpStart = false; while (nextHumpStart <= i) { if (nextHumpStart == i) { isHumpStart = true; } nextHumpStart = nextWord(name, nextHumpStart); if (first != range) { humpIndex++; } } integral -= humpIndex; char c = name.charAt(i); p = StringUtil.indexOf(myPattern, c, p + 1, myPattern.length, false); if (p < 0) { break; } if (c == myPattern[p]) { if (isUpperCase[p]) matchingCase += 50; // strongly prefer user's uppercase matching uppercase: they made an effort to press Shift else if (i == 0 && startMatch) matchingCase += 15; // the very first letter case distinguishes classes in Java etc else if (isHumpStart) matchingCase += 1; // if a lowercase matches lowercase hump start, that also means something } else if (isHumpStart) { // disfavor hump starts where pattern letter case doesn't match name case matchingCase -= 1; } } } int startIndex = first.getStartOffset(); boolean afterSeparator = StringUtil.indexOfAny(name, myHardSeparators, 0, startIndex) >= 0; boolean wordStart = startIndex == 0 || isWordStart(name, startIndex) && !isWordStart(name, startIndex - 1); boolean finalMatch = fragments.get(fragments.size() - 1).getEndOffset() == name.length(); return (wordStart ? 1000 : 0) + integral * 10 + matchingCase * (startMatch && valueStartCaseMatch ? 10 : 1) + (afterSeparator ? 0 : 2) + (startMatch ? 1 : 0) + (finalMatch ? 1 : 0); } public boolean isStartMatch(@NotNull String name) { FList<TextRange> fragments = matchingFragments(name); return fragments != null && isStartMatch(fragments); } public static boolean isStartMatch(@NotNull Iterable<TextRange> fragments) { Iterator<TextRange> iterator = fragments.iterator(); return !iterator.hasNext() || iterator.next().getStartOffset() == 0; } @Override public boolean matches(@NotNull String name) { return matchingFragments(name) != null; } @Nullable public FList<TextRange> matchingFragments(@NotNull String name) { if (name.length() < myMinNameLength) { return null; } int length = name.length(); int patternIndex = 0; boolean isAscii = true; for (int i = 0; i < length; ++i) { char c = name.charAt(i); if (c >= 128) { isAscii = false; } if (patternIndex < myMeaningfulCharacters.length && (c == myMeaningfulCharacters[patternIndex] || c == myMeaningfulCharacters[patternIndex + 1])) { patternIndex += 2; } } if (patternIndex < myMinNameLength) { return null; } MatchingState state = myMatchingState.get(); state.initializeState(isAscii, length); try { return matchWildcards(name, 0, 0, state); } finally { state.releaseState(); } } /** * After a wildcard (* or space), search for the first non-wildcard pattern character in the name starting from nameIndex * and try to {@link #matchFragment(String, int, int, MinusculeMatcher.MatchingState)} for it. */ @Nullable private FList<TextRange> matchWildcards(@NotNull String name, int patternIndex, int nameIndex, MatchingState matchingState) { if (nameIndex < 0) { return null; } if (!isWildcard(patternIndex)) { if (patternIndex == myPattern.length) { return FList.emptyList(); } return matchFragment(name, patternIndex, nameIndex, matchingState); } do { patternIndex++; } while (isWildcard(patternIndex)); if (patternIndex == myPattern.length) { boolean space = isPatternChar(patternIndex - 1, ' '); // the trailing space should match if the pattern ends with the last word part, or only its first hump character if (space && nameIndex != name.length() && (patternIndex < 2 || !NameUtil.isWordStart(myPattern[patternIndex - 2]))) { int spaceIndex = name.indexOf(' ', nameIndex); if (spaceIndex >= 0) { return FList.<TextRange>emptyList().prepend(TextRange.from(spaceIndex, 1)); } return null; } return FList.emptyList(); } FList<TextRange> ranges = matchFragment(name, patternIndex, nameIndex, matchingState); if (ranges != null) { return ranges; } return matchSkippingWords(name, patternIndex, nameIndex, true, matchingState); } /** * Enumerates places in name that could be matched by the pattern at patternIndex position * and invokes {@link #matchFragment(String, int, int, MinusculeMatcher.MatchingState)} at those candidate positions */ @Nullable private FList<TextRange> matchSkippingWords(@NotNull String name, final int patternIndex, int nameIndex, boolean allowSpecialChars, MatchingState matchingState) { boolean star = isPatternChar(patternIndex - 1, '*'); final char p = myPattern[patternIndex]; while (true) { int nextOccurrence = star ? indexOfIgnoreCase(name, nameIndex + 1, p, patternIndex, matchingState.isAsciiName) : indexOfWordStart(name, patternIndex, nameIndex); if (nextOccurrence < 0) { return null; } // pattern humps are allowed to match in words separated by " ()", lowercase characters aren't if (!allowSpecialChars && !myHasSeparators && !myHasHumps && StringUtil.containsAnyChar(name, myHardSeparators, nameIndex, nextOccurrence)) { return null; } // if the user has typed a dot, don't skip other dots between humps // but one pattern dot may match several name dots if (!allowSpecialChars && myHasDots && !isPatternChar(patternIndex - 1, '.') && StringUtil.contains(name, nameIndex, nextOccurrence, '.')) { return null; } // uppercase should match either uppercase or a word start if (!isUpperCase[patternIndex] || Character.isUpperCase(name.charAt(nextOccurrence)) || isWordStart(name, nextOccurrence) || // accept uppercase matching lowercase if the whole prefix is uppercase and case sensitivity allows that !myHasHumps && myOptions != NameUtil.MatchingCaseSensitivity.ALL) { FList<TextRange> ranges = matchFragment(name, patternIndex, nextOccurrence, matchingState); if (ranges != null) { return ranges; } } nameIndex = nextOccurrence; } } private boolean charEquals(char patternChar, int patternIndex, char c, boolean isIgnoreCase) { return patternChar == c || isIgnoreCase && (toLowerCase[patternIndex] == c || toUpperCase[patternIndex] == c); } @Nullable private FList<TextRange> matchFragment(@NotNull String name, int patternIndex, int nameIndex, MatchingState matchingState) { if (matchingState.hasFailed(patternIndex, nameIndex)) { return null; } FList<TextRange> result = doMatchFragments(name, patternIndex, nameIndex, matchingState); if (result == null) { matchingState.registerFailure(patternIndex, nameIndex); } return result; } /** * Attempts to match an alphanumeric sequence of pattern (starting at patternIndex) * to some continuous substring of name, starting from nameIndex. */ private FList<TextRange> doMatchFragments(String name, int patternIndex, int nameIndex, MatchingState matchingState) { if (!isFirstCharMatching(name, nameIndex, patternIndex)) { return null; } // exact middle matches have to be at least of length 3, to prevent too many irrelevant matches int minFragment = isPatternChar(patternIndex - 1, '*') && !isWildcard(patternIndex + 1) && Character.isLetterOrDigit(name.charAt(nameIndex)) && !isWordStart(name, nameIndex) ? 3 : 1; int i = 1; boolean ignoreCase = myOptions != NameUtil.MatchingCaseSensitivity.ALL; while (nameIndex + i < name.length() && patternIndex + i < myPattern.length && charEquals(myPattern[patternIndex+i], patternIndex+i, name.charAt(nameIndex + i), ignoreCase)) { if (isUpperCase[patternIndex + i] && myHasHumps) { // when an uppercase pattern letter matches lowercase name letter, try to find an uppercase (better) match further in the name if (myPattern[patternIndex + i] != name.charAt(nameIndex + i)) { if (i < minFragment) { return null; } int nextWordStart = indexOfWordStart(name, patternIndex + i, nameIndex + i); FList<TextRange> ranges = matchWildcards(name, patternIndex + i, nextWordStart, matchingState); if (ranges != null) { return prependRange(ranges, nameIndex, i); } // at least three consecutive uppercase letters shouldn't match lowercase if (i > 1 && isUpperCase[patternIndex + i - 1] && isUpperCase[patternIndex + i - 2]) { // but if there's a lowercase after them, it can match (in case shift was released a bit later) if (nameIndex + i + 1 == name.length() || patternIndex + i + 1 < myPattern.length && !isLowerCase[patternIndex + i + 1]) { return null; } } } } i++; } // we've found the longest fragment matching pattern and name if (patternIndex + i >= myPattern.length) { return FList.<TextRange>emptyList().prepend(TextRange.from(nameIndex, i)); } // try to match the remainder of pattern with the remainder of name // it may not succeed with the longest matching fragment, then try shorter matches while (i >= minFragment || isWildcard(patternIndex + i)) { FList<TextRange> ranges = isWildcard(patternIndex + i) ? matchWildcards(name, patternIndex + i, nameIndex + i, matchingState) : matchSkippingWords(name, patternIndex + i, nameIndex + i, false, matchingState); if (ranges != null) { return prependRange(ranges, nameIndex, i); } i--; } return null; } private boolean isFirstCharMatching(@NotNull String name, int nameIndex, int patternIndex) { if (nameIndex >= name.length()) return false; boolean ignoreCase = myOptions != NameUtil.MatchingCaseSensitivity.ALL; char patternChar = myPattern[patternIndex]; if (!charEquals(patternChar, patternIndex, name.charAt(nameIndex), ignoreCase)) return false; if (myOptions == NameUtil.MatchingCaseSensitivity.FIRST_LETTER && (patternIndex == 0 || patternIndex == 1 && isWildcard(0)) && hasCase(patternChar) && Character.isUpperCase(patternChar) != Character.isUpperCase(name.charAt(0))) { return false; } return true; } private static boolean hasCase(char patternChar) { return Character.isUpperCase(patternChar) || Character.isLowerCase(patternChar); } private boolean isWildcard(int patternIndex) { if (patternIndex >= 0 && patternIndex < myPattern.length) { char pc = myPattern[patternIndex]; return pc == ' ' || pc == '*'; } return false; } private boolean isPatternChar(int patternIndex, char c) { return patternIndex >= 0 && patternIndex < myPattern.length && myPattern[patternIndex] == c; } private int indexOfWordStart(@NotNull String name, int patternIndex, int startFrom) { final char p = myPattern[patternIndex]; if (startFrom >= name.length() || myHasHumps && isLowerCase[patternIndex] && !(patternIndex > 0 && isWordSeparator[patternIndex - 1])) { return -1; } int nextWordStart = startFrom; while (true) { nextWordStart = nextWord(name, nextWordStart); if (nextWordStart >= name.length()) { return -1; } if (charEquals(p, patternIndex, name.charAt(nextWordStart), true)) { return nextWordStart; } } } private int indexOfIgnoreCase(String name, int fromIndex, char p, int patternIndex, boolean isAsciiName) { if (isAsciiName && IOUtil.isAscii(p)) { char pUpper = toUpperCase[patternIndex]; char pLower = toLowerCase[patternIndex]; for (int i = fromIndex; i < name.length(); i++) { char c = name.charAt(i); if (c == p || toUpperAscii(c) == pUpper || toLowerAscii(c) == pLower) { return i; } } return -1; } return StringUtil.indexOfIgnoreCase(name, p, fromIndex); } private static char toUpperAscii(char c) { if (c >= 'a' && c <= 'z') { return (char)(c + ('A' - 'a')); } return c; } private static char toLowerAscii(char c) { if (c >= 'A' && c <= 'Z') { return (char)(c - ('A' - 'a')); } return c; } @NonNls @Override public String toString() { return "MinusculeMatcher{myPattern=" + new String(myPattern) + ", myOptions=" + myOptions + '}'; } private static class MatchingState { private boolean myBusy; private int myNameLength; private boolean isAsciiName; private final BitSet myTable = new BitSet(); void initializeState(boolean isAscii, int length) { assert !myBusy; myBusy = true; myNameLength = length; isAsciiName = isAscii; myTable.clear(); } void releaseState() { assert myBusy; myBusy = false; } void registerFailure(int patternIndex, int nameIndex) { myTable.set(patternIndex * myNameLength + nameIndex); } boolean hasFailed(int patternIndex, int nameIndex) { return myTable.get(patternIndex * myNameLength + nameIndex); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.psi.dataFlow.reachingDefs; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import gnu.trove.TIntHashSet; import gnu.trove.TIntObjectHashMap; import gnu.trove.TIntObjectProcedure; import gnu.trove.TIntProcedure; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.GrControlFlowOwner; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement; import org.jetbrains.plugins.groovy.lang.psi.GroovyRecursiveElementVisitor; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrStatement; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMember; import org.jetbrains.plugins.groovy.lang.psi.controlFlow.Instruction; import org.jetbrains.plugins.groovy.lang.psi.controlFlow.ReadWriteVariableInstruction; import org.jetbrains.plugins.groovy.lang.psi.dataFlow.DFAEngine; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.ClosureSyntheticParameter; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.GroovyScriptClass; import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil; import java.util.*; import static org.jetbrains.plugins.groovy.lang.psi.controlFlow.OrderUtil.reversedPostOrder; /** * @author ven */ public class ReachingDefinitionsCollector { private ReachingDefinitionsCollector() { } @NotNull public static FragmentVariableInfos obtainVariableFlowInformation(@NotNull final GrStatement first, @NotNull final GrStatement last, @NotNull final GrControlFlowOwner flowOwner, @NotNull final Instruction[] flow) { final DefinitionMap dfaResult = inferDfaResult(flow); final LinkedHashSet<Integer> fragmentInstructions = getFragmentInstructions(first, last, flow); final int[] postorder = reversedPostOrder(flow); LinkedHashSet<Integer> reachableFromFragmentReads = getReachable(fragmentInstructions, flow, dfaResult, postorder); LinkedHashSet<Integer> fragmentReads = filterReads(fragmentInstructions, flow); final Map<String, VariableInfo> imap = new LinkedHashMap<>(); final Map<String, VariableInfo> omap = new LinkedHashMap<>(); final PsiManager manager = first.getManager(); for (final Integer ref : fragmentReads) { ReadWriteVariableInstruction rwInstruction = (ReadWriteVariableInstruction)flow[ref]; String name = rwInstruction.getVariableName(); final int[] defs = dfaResult.getDefinitions(ref); if (!allDefsInFragment(defs, fragmentInstructions)) { addVariable(name, imap, manager, getType(rwInstruction.getElement())); } } for (final Integer ref : reachableFromFragmentReads) { ReadWriteVariableInstruction rwInstruction = (ReadWriteVariableInstruction)flow[ref]; String name = rwInstruction.getVariableName(); final int[] defs = dfaResult.getDefinitions(ref); if (anyDefInFragment(defs, fragmentInstructions)) { for (int def : defs) { if (fragmentInstructions.contains(def)) { PsiType outputType = getType(flow[def].getElement()); addVariable(name, omap, manager, outputType); } } if (!allProperDefsInFragment(defs, ref, fragmentInstructions, postorder)) { PsiType inputType = getType(rwInstruction.getElement()); addVariable(name, imap, manager, inputType); } } } addClosureUsages(imap, omap, first, last, flowOwner); final VariableInfo[] iarr = filterNonlocals(imap, last); final VariableInfo[] oarr = filterNonlocals(omap, last); return new FragmentVariableInfos() { @Override public VariableInfo[] getInputVariableNames() { return iarr; } @Override public VariableInfo[] getOutputVariableNames() { return oarr; } }; } private static DefinitionMap inferDfaResult(Instruction[] flow) { final ReachingDefinitionsDfaInstance dfaInstance = new ReachingDefinitionsDfaInstance(flow); final ReachingDefinitionsSemilattice lattice = new ReachingDefinitionsSemilattice(); final DFAEngine<DefinitionMap> engine = new DFAEngine<>(flow, dfaInstance, lattice); return postprocess(engine.performForceDFA(), flow, dfaInstance); } private static void addClosureUsages(final Map<String, VariableInfo> imap, final Map<String, VariableInfo> omap, final GrStatement first, final GrStatement last, GrControlFlowOwner flowOwner) { flowOwner.accept(new GroovyRecursiveElementVisitor() { @Override public void visitClosure(@NotNull GrClosableBlock closure) { addUsagesInClosure(imap, omap, closure, first, last); super.visitClosure(closure); } private void addUsagesInClosure(final Map<String, VariableInfo> imap, final Map<String, VariableInfo> omap, final GrClosableBlock closure, final GrStatement first, final GrStatement last) { closure.accept(new GroovyRecursiveElementVisitor() { @Override public void visitReferenceExpression(@NotNull GrReferenceExpression refExpr) { if (refExpr.isQualified()) { return; } PsiElement resolved = refExpr.resolve(); if (!(resolved instanceof GrVariable)) { return; } GrVariable variable = (GrVariable)resolved; if (PsiTreeUtil.isAncestor(closure, variable, true)) { return; } if (variable instanceof ClosureSyntheticParameter && PsiTreeUtil.isAncestor(closure, ((ClosureSyntheticParameter)variable).getClosure(), false)) { return; } String name = variable.getName(); if (!(variable instanceof GrField)) { if (!isInFragment(first, last, resolved)) { if (isInFragment(first, last, closure)) { addVariable(name, imap, variable.getManager(), variable.getType()); } } else { if (!isInFragment(first, last, closure)) { addVariable(name, omap, variable.getManager(), variable.getType()); } } } } }); } }); } private static void addVariable(String name, Map<String, VariableInfo> map, PsiManager manager, PsiType type) { VariableInfoImpl info = (VariableInfoImpl)map.get(name); if (info == null) { info = new VariableInfoImpl(name, manager); map.put(name, info); } info.addSubtype(type); } private static LinkedHashSet<Integer> filterReads(final LinkedHashSet<Integer> instructions, final Instruction[] flow) { final LinkedHashSet<Integer> result = new LinkedHashSet<>(); for (final Integer i : instructions) { final Instruction instruction = flow[i]; if (isReadInsn(instruction)) { result.add(i); } } return result; } private static boolean allDefsInFragment(int[] defs, LinkedHashSet<Integer> fragmentInstructions) { for (int def : defs) { if (!fragmentInstructions.contains(def)) return false; } return true; } private static boolean allProperDefsInFragment(int[] defs, int ref, LinkedHashSet<Integer> fragmentInstructions, int[] postorder) { for (int def : defs) { if (!fragmentInstructions.contains(def) && postorder[def] < postorder[ref]) return false; } return true; } private static boolean anyDefInFragment(int[] defs, LinkedHashSet<Integer> fragmentInstructions) { for (int def : defs) { if (fragmentInstructions.contains(def)) return true; } return false; } @Nullable private static PsiType getType(PsiElement element) { if (element instanceof GrVariable) { return ((GrVariable)element).getTypeGroovy(); } else if (element instanceof GrReferenceExpression) return ((GrReferenceExpression)element).getType(); return null; } private static VariableInfo[] filterNonlocals(Map<String, VariableInfo> infos, GrStatement place) { List<VariableInfo> result = new ArrayList<>(); for (Iterator<VariableInfo> iterator = infos.values().iterator(); iterator.hasNext(); ) { VariableInfo info = iterator.next(); String name = info.getName(); GroovyPsiElement property = ResolveUtil.resolveProperty(place, name); if (property instanceof GrVariable) { iterator.remove(); } else if (property instanceof GrReferenceExpression) { GrMember member = PsiTreeUtil.getParentOfType(property, GrMember.class); if (member == null) { continue; } else if (!member.hasModifierProperty(PsiModifier.STATIC)) { if (member.getContainingClass() instanceof GroovyScriptClass) { //binding variable continue; } } } if (ResolveUtil.resolveClass(place, name) == null) { result.add(info); } } return result.toArray(new VariableInfo[result.size()]); } private static LinkedHashSet<Integer> getFragmentInstructions(GrStatement first, GrStatement last, Instruction[] flow) { LinkedHashSet<Integer> result = new LinkedHashSet<>(); for (Instruction instruction : flow) { if (isInFragment(instruction, first, last)) { result.add(instruction.num()); } } return result; } private static boolean isInFragment(Instruction instruction, GrStatement first, GrStatement last) { final PsiElement element = instruction.getElement(); if (element == null) return false; return isInFragment(first, last, element); } private static boolean isInFragment(GrStatement first, GrStatement last, PsiElement element) { final PsiElement parent = first.getParent(); if (!PsiTreeUtil.isAncestor(parent, element, true)) return false; PsiElement run = element; while (run.getParent() != parent) run = run.getParent(); return isBetween(first, last, run); } private static boolean isBetween(PsiElement first, PsiElement last, PsiElement run) { while (first != null && first != run) first = first.getNextSibling(); if (first == null) return false; while (last != null && last != run) last = last.getPrevSibling(); if (last == null) return false; return true; } private static LinkedHashSet<Integer> getReachable(final LinkedHashSet<Integer> fragmentInsns, final Instruction[] flow, final DefinitionMap dfaResult, final int[] postorder) { final LinkedHashSet<Integer> result = new LinkedHashSet<>(); for (final Instruction insn : flow) { if (isReadInsn(insn)) { final int ref = insn.num(); int[] definitions = dfaResult.getDefinitions(ref); if (definitions != null) { for (final int def : definitions) { if (fragmentInsns.contains(def) && (!fragmentInsns.contains(ref) || postorder[ref] < postorder[def] && checkPathIsOutsideOfFragment(def, ref, flow, fragmentInsns))) { result.add(ref); break; } } } } } return result; } private static boolean checkPathIsOutsideOfFragment(int def, int ref, Instruction[] flow, LinkedHashSet<Integer> fragmentInsns) { Boolean path = findPath(flow[def], ref, fragmentInsns, false, new HashMap<>()); assert path != null : "def=" + def + ", ref=" + ref; return path.booleanValue(); } /** * return true if path is outside of fragment, null if there is no pathand false if path is inside fragment */ @Nullable private static Boolean findPath(Instruction cur, int destination, LinkedHashSet<Integer> fragmentInsns, boolean wasOutside, HashMap<Instruction, Boolean> visited) { wasOutside = wasOutside || !fragmentInsns.contains(cur.num()); visited.put(cur, null); Iterable<? extends Instruction> instructions = cur.allSuccessors(); boolean pathExists = false; for (Instruction i : instructions) { if (i.num() == destination) return wasOutside; Boolean result; if (visited.containsKey(i)) { result = visited.get(i); } else { result = findPath(i, destination, fragmentInsns, wasOutside, visited); visited.put(i, result); } if (result != null) { if (result.booleanValue()) { visited.put(cur, true); return true; } pathExists = true; } } if (pathExists) { visited.put(cur, false); return false; } else { visited.put(cur, null); return null; } } private static boolean isReadInsn(Instruction insn) { return insn instanceof ReadWriteVariableInstruction && !((ReadWriteVariableInstruction)insn).isWrite(); } @SuppressWarnings({"UnusedDeclaration"}) private static String dumpDfaResult(ArrayList<TIntObjectHashMap<TIntHashSet>> dfaResult, ReachingDefinitionsDfaInstance dfa) { final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < dfaResult.size(); i++) { TIntObjectHashMap<TIntHashSet> map = dfaResult.get(i); buffer.append("At ").append(i).append(":\n"); map.forEachEntry(new TIntObjectProcedure<TIntHashSet>() { @Override public boolean execute(int i, TIntHashSet defs) { buffer.append(i).append(" -> "); defs.forEach(new TIntProcedure() { @Override public boolean execute(int i) { buffer.append(i).append(" "); return true; } }); return true; } }); buffer.append("\n"); } return buffer.toString(); } private static class VariableInfoImpl implements VariableInfo { @NotNull private final String myName; private final PsiManager myManager; @Nullable private PsiType myType; VariableInfoImpl(@NotNull String name, PsiManager manager) { myName = name; myManager = manager; } @Override @NotNull public String getName() { return myName; } @Override @Nullable public PsiType getType() { if (myType instanceof PsiIntersectionType) return ((PsiIntersectionType)myType).getConjuncts()[0]; return myType; } void addSubtype(PsiType t) { if (t != null) { if (myType == null) { myType = t; } else { if (!myType.isAssignableFrom(t)) { if (t.isAssignableFrom(myType)) { myType = t; } else { myType = TypesUtil.getLeastUpperBound(myType, t, myManager); } } } } } } @NotNull private static DefinitionMap postprocess(@NotNull final List<DefinitionMap> dfaResult, @NotNull Instruction[] flow, @NotNull ReachingDefinitionsDfaInstance dfaInstance) { DefinitionMap result = new DefinitionMap(); for (int i = 0; i < flow.length; i++) { Instruction insn = flow[i]; if (insn instanceof ReadWriteVariableInstruction) { ReadWriteVariableInstruction rwInsn = (ReadWriteVariableInstruction)insn; if (!rwInsn.isWrite()) { int idx = dfaInstance.getVarIndex(rwInsn.getVariableName()); result.copyFrom(dfaResult.get(i), idx, i); } } } return result; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.jvm.java.abi; import static org.junit.Assert.fail; import com.facebook.buck.jvm.java.testutil.compiler.Classes; import com.facebook.buck.jvm.java.testutil.compiler.TestCompiler; import com.facebook.buck.testutil.integration.TestDataHelper; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.function.Function; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.util.Elements; import org.junit.Rule; import org.junit.runners.Parameterized; import org.objectweb.asm.Opcodes; import org.objectweb.asm.tree.ClassNode; import org.objectweb.asm.tree.FieldNode; import org.objectweb.asm.tree.MethodNode; public class DescriptorAndSignatureFactoryTestBase { private static final String WITH_DEPS = "With Dependencies"; private static final String WITHOUT_DEPS = "Without Dependencies"; @Parameterized.Parameter public String testMode; @Parameterized.Parameters(name = "{0}") public static Object[] getParameters() { return new Object[] {WITH_DEPS, WITHOUT_DEPS}; } @Rule public TestCompiler correctClassCompiler = new TestCompiler(); @Rule public TestCompiler testCompiler = new TestCompiler(); private Classes correctClasses; protected Elements elements; List<String> errors = new ArrayList<>(); protected boolean isTestingWithDependencies() { return testMode == WITH_DEPS; } protected void test(TestRunnable r) throws Exception { // Always compile with dependencies, so that we have the correct output to compare to generateCorrectClassFiles(); runTest(r); } private void generateCorrectClassFiles() throws IOException { correctClassCompiler.addSourceFile(getSourceFile("Foo.java")); correctClassCompiler.addClasspathSourceFile(getSourceFile("Dependency.java")); correctClassCompiler.addClasspathSourceFile(getSourceFile("DependencyException.java")); correctClassCompiler.addClasspathSourceFile(getSourceFile("DependencyInterface.java")); correctClassCompiler.compile(); correctClasses = correctClassCompiler.getClasses(); } private void runTest(TestRunnable r) throws Exception { testCompiler.addSourceFile(getSourceFile("Foo.java")); if (testMode == WITH_DEPS) { testCompiler.addClasspathSourceFile(getSourceFile("Dependency.java")); testCompiler.addClasspathSourceFile(getSourceFile("DependencyException.java")); testCompiler.addClasspathSourceFile(getSourceFile("DependencyInterface.java")); } else { testCompiler.useFrontendOnlyJavacTask(); } testCompiler.setProcessors( Collections.singletonList( new AbstractProcessor() { @Override public Set<String> getSupportedOptions() { return Collections.emptySet(); } @Override public Set<String> getSupportedAnnotationTypes() { return Collections.singleton("*"); } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.RELEASE_8; } @Override public boolean process( Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { try { r.run(); } catch (Exception e) { throw new AssertionError(e); } } return false; } })); elements = testCompiler.getElements(); testCompiler.enter(); } private Path getSourceFile(String filename) { return TestDataHelper.getTestDataScenario(this, "descriptor_and_signature_factories") .resolve(filename); } protected List<String> getTestErrors( Function<FieldNode, String> fieldNodeExpectedValueGetter, Function<MethodNode, String> methodNodeExpectedValueGetter, Function<ClassNode, String> classNodeExpectedValueGetter, Function<Element, String> elementActualValueGetter) throws IOException { TypeElement fooElement = elements.getTypeElement("com.facebook.foo.Foo"); findErrors( fooElement, fieldNodeExpectedValueGetter, methodNodeExpectedValueGetter, classNodeExpectedValueGetter, elementActualValueGetter); return errors; } private void findErrors( TypeElement typeElement, Function<FieldNode, String> fieldNodeExpectedValueGetter, Function<MethodNode, String> methodNodeExpectedValueGetter, Function<ClassNode, String> classNodeExpectedValueGetter, Function<Element, String> elementActualValueGetter) throws IOException { ClassNode typeNode = getClassNode(elements.getBinaryName(typeElement).toString()); for (Element enclosedElement : typeElement.getEnclosedElements()) { Name elementName = enclosedElement.getSimpleName(); String actual = elementActualValueGetter.apply(enclosedElement); switch (enclosedElement.getKind()) { case FIELD: checkValue( "Field", elementName, fieldNodeExpectedValueGetter.apply(getFieldNode(typeNode, elementName)), actual); break; case CONSTRUCTOR: case METHOD: checkValue( "Method", elementName, methodNodeExpectedValueGetter.apply(getMethodNode(typeNode, elementName)), actual); break; case ANNOTATION_TYPE: case CLASS: case ENUM: case INTERFACE: ClassNode innerTypeNode = getClassNode(elements.getBinaryName((TypeElement) enclosedElement).toString()); checkValue( "Class", elementName, classNodeExpectedValueGetter.apply(innerTypeNode), actual); findErrors( (TypeElement) enclosedElement, fieldNodeExpectedValueGetter, methodNodeExpectedValueGetter, classNodeExpectedValueGetter, elementActualValueGetter); break; // $CASES-OMITTED$ default: fail( String.format( "Didn't implement testing for element kind %s", enclosedElement.getKind())); continue; } } } private void checkValue(String type, Name elementName, String expected, String actual) { if (expected != actual && (expected == null || !expected.equals(actual))) { errors.add( String.format( "%s %s:\n\tExpected: %s\n\tActual: %s", type, elementName, expected, actual)); } } private FieldNode getFieldNode(ClassNode classNode, Name name) { return classNode.fields.stream() .filter(field -> name.contentEquals(field.name)) .findFirst() .orElse(null); } private MethodNode getMethodNode(ClassNode classNode, Name name) { return classNode.methods.stream() .filter(field -> name.contentEquals(field.name)) .findFirst() .orElse(null); } private ClassNode getClassNode(String classBinaryName) throws IOException { ClassNode classNode = new ClassNode(Opcodes.ASM7); correctClasses.acceptClassVisitor(classBinaryName, 0, classNode); return classNode; } public interface TestRunnable { void run() throws Exception; } }
package jason.datastructure; import java.util.ArrayList; /* * https://www.topcoder.com/community/data-science/data-science-tutorials/using-tries/ * Trie performs better when number of entries is big. * Trie can perform prefix lookup. Hastable cann't. * * The tries can insert and findUsingArray strings in O(L) time (where L represent the length of a single word). This is much faster than set , * but is it a bit faster than a hash table. The set <string> and the hash tables can only findUsingArray in a dictionary words that match exactly with the single word that we are finding; the trie allow us to findUsingArray words that have a single character different, a prefix in common, a character missing, etc. */ public class Trie<T>{ public static final int SymbolLength=256; public static class Entry<E>{ public String key; public E value; public Entry(String k, E v){ key=k; value=v; } } //a tree node in the Trie structure public static class Node<E> { /* * For an internal node, the value can be null. If the value is null for internal node, * it means the key for this internal node does not exists. * Value should exist for all external nodes. */ E value; Node<E>[] children=new Node[SymbolLength]; } Node<T> root; int size=0; public Trie(){ root=new Node<T>(); } public int size() { return size; } public void put(String key, T value) { if (key==null) { return; } if (value==null) { delete(key); } if (key.length()==0) { if (root.value==null) { size++; } root.value=value; return; } put(root, key, value, 0); } public T get(String key) { if (key.equals("")) { return root.value; } Node<T> valueNode=findPrefixNode(root, key, 0); if (valueNode==null) { return null; } return valueNode.value; } public boolean contains(String key) { return get(key)!=null; } private T get(Node<T> parent, String key, int childKeyPosition) { char ch=key.charAt(childKeyPosition); Node<T> current=parent.children[ch]; if (current==null) { return null; } if (childKeyPosition==key.length()-1) { return current.value; } else { return get(current, key, childKeyPosition+1); } } public void delete(String key) { if (key.equals("")) { if (root.value!=null) { size--; } root.value=null; return; } delete(root, key, 0); } private void delete(Node<T> parent, String key, int childKeyPosition) { char ch=key.charAt(childKeyPosition); Node<T> current=parent.children[ch]; if (current==null) { return; } if (childKeyPosition==key.length()-1) { //reach the end if (current.value!=null) { size--; } current.value=null; } else { delete(current, key, childKeyPosition+1); //shrink the tree. if (current.value==null && !hasChildren(current)) { parent.children[ch]=null; } } } private boolean hasChildren(Node<T> node) { for (Node<T> child: node.children) { if (child!=null) { return true; } } return false; } private void put(Node<T> parent, String key, T value, int childKeyPosition) { char ch=key.charAt(childKeyPosition); Node<T> current=parent.children[ch]; if (current==null) { //no such children, add one. current=new Node<T>(); parent.children[ch]=current; } //we reach the end of the key, if (childKeyPosition==key.length()-1) { if (current.value==null) { size++; } current.value=value; return; } else { put(current, key, value, childKeyPosition+1); } } public ArrayList<Entry<T>> collect() { ArrayList<Entry<T>> entries=new ArrayList<Entry<T>>(size); collect(root, new StringBuilder(""), entries); return entries; } public ArrayList<Entry<T>> collectPrefix(String prefix) { ArrayList<Entry<T>> entries=new ArrayList<Entry<T>>(size); Node<T> prefixNode=findPrefixNode(root, prefix, 0); if (prefixNode==null) { return entries; } collect(prefixNode, new StringBuilder(prefix), entries); return entries; } protected void collect(Node<T> start,StringBuilder startkey, ArrayList<Entry<T>> entries) { if (start.value!=null) { entries.add(new Entry<T>(startkey.toString(), start.value)); } int startKeyLength=startkey.length(); for (int i=0; i<SymbolLength; i++) { if (start.children[i]!=null) { collect(start.children[i], startkey.append((char)i), entries); startkey.setLength(startKeyLength); } } } protected Node<T> findPrefixNode(Node<T> parent, String key, int childKeyPosition){ char ch=key.charAt(childKeyPosition); Node<T> current=parent.children[ch]; if (current==null) { return null; } if (childKeyPosition==key.length()-1) { return current; } else { return findPrefixNode(current, key, childKeyPosition+1); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_CHECKPOINT_INTERVAL_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; import java.io.FileNotFoundException; import java.io.IOException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Provides a <i>trash</i> feature. Files are moved to a user's trash * directory, a subdirectory of their home directory named ".Trash". Files are * initially moved to a <i>current</i> sub-directory of the trash directory. * Within that sub-directory their original path is preserved. Periodically * one may checkpoint the current trash and remove older checkpoints. (This * design permits trash management without enumeration of the full trash * content, without date support in the filesystem, and without clock * synchronization.) */ @InterfaceAudience.Private @InterfaceStability.Evolving public class TrashPolicyDefault extends TrashPolicy { private static final Logger LOG = LoggerFactory.getLogger(TrashPolicyDefault.class); private static final Path CURRENT = new Path("Current"); private static final FsPermission PERMISSION = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE); private static final DateFormat CHECKPOINT = new SimpleDateFormat("yyMMddHHmmss"); /** Format of checkpoint directories used prior to Hadoop 0.23. */ private static final DateFormat OLD_CHECKPOINT = new SimpleDateFormat("yyMMddHHmm"); private static final int MSECS_PER_MINUTE = 60*1000; private long emptierInterval; public TrashPolicyDefault() { } private TrashPolicyDefault(FileSystem fs, Configuration conf) throws IOException { initialize(conf, fs); } /** * @deprecated Use {@link #initialize(Configuration, FileSystem)} instead. */ @Override @Deprecated public void initialize(Configuration conf, FileSystem fs, Path home) { this.fs = fs; this.deletionInterval = (long)(conf.getFloat( FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT) * MSECS_PER_MINUTE); this.emptierInterval = (long)(conf.getFloat( FS_TRASH_CHECKPOINT_INTERVAL_KEY, FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT) * MSECS_PER_MINUTE); } @Override public void initialize(Configuration conf, FileSystem fs) { this.fs = fs; this.deletionInterval = (long)(conf.getFloat( FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT) * MSECS_PER_MINUTE); this.emptierInterval = (long)(conf.getFloat( FS_TRASH_CHECKPOINT_INTERVAL_KEY, FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT) * MSECS_PER_MINUTE); if (deletionInterval < 0) { LOG.warn("Invalid value {} for deletion interval," + " deletion interaval can not be negative." + "Changing to default value 0", deletionInterval); this.deletionInterval = 0; } } private Path makeTrashRelativePath(Path basePath, Path rmFilePath) { return Path.mergePaths(basePath, rmFilePath); } @Override public boolean isEnabled() { return deletionInterval > 0; } @SuppressWarnings("deprecation") @Override public boolean moveToTrash(Path path) throws IOException { if (!isEnabled()) return false; if (!path.isAbsolute()) // make path absolute path = new Path(fs.getWorkingDirectory(), path); // check that path exists fs.getFileStatus(path); String qpath = fs.makeQualified(path).toString(); Path trashRoot = fs.getTrashRoot(path); Path trashCurrent = new Path(trashRoot, CURRENT); if (qpath.startsWith(trashRoot.toString())) { return false; // already in trash } if (trashRoot.getParent().toString().startsWith(qpath)) { throw new IOException("Cannot move \"" + path + "\" to the trash, as it contains the trash"); } Path trashPath = makeTrashRelativePath(trashCurrent, path); Path baseTrashPath = makeTrashRelativePath(trashCurrent, path.getParent()); IOException cause = null; // try twice, in case checkpoint between the mkdirs() & rename() for (int i = 0; i < 2; i++) { try { if (!fs.mkdirs(baseTrashPath, PERMISSION)) { // create current LOG.warn("Can't create(mkdir) trash directory: " + baseTrashPath); return false; } } catch (FileAlreadyExistsException e) { // find the path which is not a directory, and modify baseTrashPath // & trashPath, then mkdirs Path existsFilePath = baseTrashPath; while (!fs.exists(existsFilePath)) { existsFilePath = existsFilePath.getParent(); } baseTrashPath = new Path(baseTrashPath.toString().replace( existsFilePath.toString(), existsFilePath.toString() + Time.now()) ); trashPath = new Path(baseTrashPath, trashPath.getName()); // retry, ignore current failure --i; continue; } catch (IOException e) { LOG.warn("Can't create trash directory: " + baseTrashPath, e); cause = e; break; } try { // if the target path in Trash already exists, then append with // a current time in millisecs. String orig = trashPath.toString(); while(fs.exists(trashPath)) { trashPath = new Path(orig + Time.now()); } // move to current trash fs.rename(path, trashPath, Rename.TO_TRASH); LOG.info("Moved: '" + path + "' to trash at: " + trashPath); return true; } catch (IOException e) { cause = e; } } throw (IOException) new IOException("Failed to move to trash: " + path).initCause(cause); } @SuppressWarnings("deprecation") @Override public void createCheckpoint() throws IOException { createCheckpoint(new Date()); } @SuppressWarnings("deprecation") public void createCheckpoint(Date date) throws IOException { Collection<FileStatus> trashRoots = fs.getTrashRoots(false); for (FileStatus trashRoot: trashRoots) { LOG.info("TrashPolicyDefault#createCheckpoint for trashRoot: " + trashRoot.getPath()); createCheckpoint(trashRoot.getPath(), date); } } @Override public void deleteCheckpoint() throws IOException { Collection<FileStatus> trashRoots = fs.getTrashRoots(false); for (FileStatus trashRoot : trashRoots) { LOG.info("TrashPolicyDefault#deleteCheckpoint for trashRoot: " + trashRoot.getPath()); deleteCheckpoint(trashRoot.getPath()); } } @Override public Path getCurrentTrashDir() { return new Path(fs.getTrashRoot(null), CURRENT); } @Override public Path getCurrentTrashDir(Path path) throws IOException { return new Path(fs.getTrashRoot(path), CURRENT); } @Override public Runnable getEmptier() throws IOException { return new Emptier(getConf(), emptierInterval); } protected class Emptier implements Runnable { private Configuration conf; private long emptierInterval; Emptier(Configuration conf, long emptierInterval) throws IOException { this.conf = conf; this.emptierInterval = emptierInterval; if (emptierInterval > deletionInterval || emptierInterval <= 0) { LOG.info("The configured checkpoint interval is " + (emptierInterval / MSECS_PER_MINUTE) + " minutes." + " Using an interval of " + (deletionInterval / MSECS_PER_MINUTE) + " minutes that is used for deletion instead"); this.emptierInterval = deletionInterval; } LOG.info("Namenode trash configuration: Deletion interval = " + (deletionInterval / MSECS_PER_MINUTE) + " minutes, Emptier interval = " + (this.emptierInterval / MSECS_PER_MINUTE) + " minutes."); } @Override public void run() { if (emptierInterval == 0) return; // trash disabled long now = Time.now(); long end; while (true) { end = ceiling(now, emptierInterval); try { // sleep for interval Thread.sleep(end - now); } catch (InterruptedException e) { break; // exit on interrupt } try { now = Time.now(); if (now >= end) { Collection<FileStatus> trashRoots; trashRoots = fs.getTrashRoots(true); // list all trash dirs for (FileStatus trashRoot : trashRoots) { // dump each trash if (!trashRoot.isDirectory()) continue; try { TrashPolicyDefault trash = new TrashPolicyDefault(fs, conf); trash.deleteCheckpoint(trashRoot.getPath()); trash.createCheckpoint(trashRoot.getPath(), new Date(now)); } catch (IOException e) { LOG.warn("Trash caught: "+e+". Skipping " + trashRoot.getPath() + "."); } } } } catch (Exception e) { LOG.warn("RuntimeException during Trash.Emptier.run(): ", e); } } try { fs.close(); } catch(IOException e) { LOG.warn("Trash cannot close FileSystem: ", e); } } private long ceiling(long time, long interval) { return floor(time, interval) + interval; } private long floor(long time, long interval) { return (time / interval) * interval; } @VisibleForTesting protected long getEmptierInterval() { return this.emptierInterval/MSECS_PER_MINUTE; } } private void createCheckpoint(Path trashRoot, Date date) throws IOException { if (!fs.exists(new Path(trashRoot, CURRENT))) { return; } Path checkpointBase; synchronized (CHECKPOINT) { checkpointBase = new Path(trashRoot, CHECKPOINT.format(date)); } Path checkpoint = checkpointBase; Path current = new Path(trashRoot, CURRENT); int attempt = 0; while (true) { try { fs.rename(current, checkpoint, Rename.NONE); LOG.info("Created trash checkpoint: " + checkpoint.toUri().getPath()); break; } catch (FileAlreadyExistsException e) { if (++attempt > 1000) { throw new IOException("Failed to checkpoint trash: " + checkpoint); } checkpoint = checkpointBase.suffix("-" + attempt); } } } private void deleteCheckpoint(Path trashRoot) throws IOException { LOG.info("TrashPolicyDefault#deleteCheckpoint for trashRoot: " + trashRoot); FileStatus[] dirs = null; try { dirs = fs.listStatus(trashRoot); // scan trash sub-directories } catch (FileNotFoundException fnfe) { return; } long now = Time.now(); for (int i = 0; i < dirs.length; i++) { Path path = dirs[i].getPath(); String dir = path.toUri().getPath(); String name = path.getName(); if (name.equals(CURRENT.getName())) { // skip current continue; } long time; try { time = getTimeFromCheckpoint(name); } catch (ParseException e) { LOG.warn("Unexpected item in trash: "+dir+". Ignoring."); continue; } if ((now - deletionInterval) > time) { if (fs.delete(path, true)) { LOG.info("Deleted trash checkpoint: "+dir); } else { LOG.warn("Couldn't delete checkpoint: " + dir + " Ignoring."); } } } } private long getTimeFromCheckpoint(String name) throws ParseException { long time; try { synchronized (CHECKPOINT) { time = CHECKPOINT.parse(name).getTime(); } } catch (ParseException pe) { // Check for old-style checkpoint directories left over // after an upgrade from Hadoop 1.x synchronized (OLD_CHECKPOINT) { time = OLD_CHECKPOINT.parse(name).getTime(); } } return time; } }
/*L * Copyright Ekagra Software Technologies Ltd. * Copyright SAIC, SAIC-Frederick * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cacore-sdk/LICENSE.txt for details. */ package gov.nih.nci.system.web.struts.action; import gov.nih.nci.system.util.SystemConstant; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import org.apache.log4j.Logger; import org.apache.struts2.ServletActionContext; import org.apache.struts2.dispatcher.SessionMap; import org.apache.struts2.util.ServletContextAware; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionSupport; public class Result extends BaseActionSupport { private static final long serialVersionUID = 1234567890L; private static Logger log = Logger.getLogger(Result.class.getName()); //Query parameters private String query; private String btnSearch; private String searchObj; private String selectedDomain; public String execute() throws Exception { HttpServletRequest request = ServletActionContext.getRequest(); SessionMap session = (SessionMap) ActionContext.getContext().get(ActionContext.SESSION.toString()); debugSessionAttributes(session); // BEGIN - build query String selectedSearchDomain=null; String query=null; String submitValue = getBtnSearch(); log.debug("submitValue: " + submitValue); String className = getSelectedDomain(); log.debug("className (selectedDomain): "+ getSelectedDomain()); if(submitValue != null && submitValue.equalsIgnoreCase("Submit")) { query = "GetHTML?query="; selectedSearchDomain = getSearchObj(); log.debug("selectedSearchDomain: "+ selectedSearchDomain); if (selectedSearchDomain != null && !selectedSearchDomain.equals("Please choose")) { String rolename=null; if (!selectedSearchDomain.equalsIgnoreCase(className)){ rolename = getRolename(selectedSearchDomain); selectedSearchDomain = removeRolename(selectedSearchDomain); } query += selectedSearchDomain + "&"; if (rolename != null && rolename.length() > 0){ query += "rolename="+rolename + "&"; } if (className != null && !className.equals("Please choose")) { query += className; log.debug("query with search object = " + query); query += generateQuery(request); } } else { if (className != null && !className.equals("Please choose")) { query += className + "&" + className; log.debug("query with no search object = " + query); query += generateQuery(request); } } String username = (String) session.get("Username"); String password = (String) session.get("Password"); if ((username != null) && (username.trim()).length() > 0) query = query + "&username=" + username; if ((password != null) && (password.trim()).length() > 0) query = query + "&password=" + password; log.debug("query: " + query); log.debug("query: " + query); setQuery(query); } // END - build query return SUCCESS; } public String getQuery() { return query; } public void setQuery(String query) { this.query = query; } public String getBtnSearch() { return btnSearch; } public void setBtnSearch(String btnSearch) { this.btnSearch = btnSearch; } public String getSearchObj() { return searchObj; } public void setSearchObj(String searchObj) { this.searchObj = searchObj; } public String getSelectedDomain() { return selectedDomain; } public void setSelectedDomain(String selectedDomain) { this.selectedDomain = selectedDomain; } private String generateQuery(HttpServletRequest request){ StringBuilder sb = new StringBuilder(); Enumeration<String> parameters = request.getParameterNames(); Map<String, Map<String, List<Object>>> isoDataTypeNodes = new HashMap<String, Map<String, List<Object>>>(); while(parameters.hasMoreElements()) { String parameterName = (String)parameters.nextElement(); log.debug("param = " + parameterName); if(!parameterName.equals("klassName") && !parameterName.equals("searchObj") && !parameterName.equals("BtnSearch") && !parameterName.equals("username") && !parameterName.equals("password") && !parameterName.equals("selectedDomain")) { String parameterValue = (request.getParameter(parameterName)).trim(); if (parameterValue.length() > 0) { log.debug("parameterValue: " + parameterValue); if (parameterName.indexOf('.') > 0) { // ISO data type parameter saveIsoNode(isoDataTypeNodes, parameterName, parameterValue); } else { // non-ISO data type parameter sb.append("[@").append(parameterName).append("=") .append(parameterValue).append("]"); } } } } Set<String> isoDataTypeNodeNames = isoDataTypeNodes.keySet(); Iterator iter = isoDataTypeNodeNames.iterator(); String nodeName = null; while (iter.hasNext()){ nodeName = (String)iter.next(); sb.append("[@").append(nodeName).append("="); generateIsoQuery(isoDataTypeNodes.get(nodeName), sb); sb.append("]"); } //Change 'part#' references to just 'part' return sb.toString().replaceAll("part(\\d+)", "part"); } private void saveIsoNode(Map<String, Map<String, List<Object>>> isoDataTypeNodes, String parameterName, String parameterValue){ String isoParamPrefix = parameterName.substring(0, parameterName.lastIndexOf('.')); log.debug("isoParamPrefix: " + isoParamPrefix); String[] isoParentNodes = isoParamPrefix.split("\\."); log.debug("isoParentNodes: " + isoParentNodes); Object childNode = null; Object parentNode = isoDataTypeNodes.get(isoParentNodes[0]); if (parentNode==null){ // initialize Map<String,List<Object>> map = new HashMap<String,List<Object>>(); isoDataTypeNodes.put(isoParentNodes[0],map); parentNode = isoDataTypeNodes.get(isoParentNodes[0]); } for(int i=1; i < isoParentNodes.length; i++){ String isoParentNodeName = isoParentNodes[i]; childNode = ((Map<String, List<Object>>)parentNode).get(isoParentNodeName); if (childNode==null){ Map<String,List<Object>> map = new HashMap<String,List<Object>>(); ArrayList<Object> tempList = new ArrayList<Object>(); tempList.add(map); ((Map<String, List<Object>>)parentNode).put(isoParentNodeName,tempList); parentNode = map; } else { parentNode = ((ArrayList<Object>)childNode).get(0); } } String isoParamKey = parameterName.substring(parameterName.lastIndexOf('.')+1); log.debug("isoParamKey: " + isoParamKey); log.debug("parameterValue: " + parameterValue); List<Object> nodeList = new ArrayList<Object>(); nodeList.add(parameterValue); ((Map<String, List<Object>>)parentNode).put(isoParamKey,nodeList); } private void generateIsoQuery(Map<String, List<Object>> isoDataTypeNode, StringBuilder query){ String parentNodeName = null; Set<String> isoParentNodeNames = isoDataTypeNode.keySet(); Iterator iter = isoParentNodeNames.iterator(); while (iter.hasNext()){ parentNodeName = (String)iter.next(); log.debug("key: " + parentNodeName); query.append("[@").append(parentNodeName).append("="); List<Object> valueList = isoDataTypeNode.get(parentNodeName); for(Object nodeElement : valueList){ if (nodeElement instanceof String){ query.append((String)nodeElement).append("]"); } else if (nodeElement instanceof java.util.HashMap){ generateIsoQuery((Map<String, List<Object>>)nodeElement, query); query.append("]"); } } } } private String getRolename(String selectedSearchDomain){ return selectedSearchDomain.substring(0,selectedSearchDomain.indexOf(" ")); } private String removeRolename(String selectedSearchDomain){ int beginIndex = selectedSearchDomain.indexOf("(")+1; int endIndex = selectedSearchDomain.indexOf(")"); return selectedSearchDomain.substring(beginIndex, endIndex); } }
package com.mackie.rustyjvm; interface TestVMInterfaceA {} interface TestVMInterfaceB extends TestVMInterfaceA {} interface TestVMInterfaceC extends TestVMInterfaceA {} interface TestVMInterfaceD extends TestVMInterfaceB, TestVMInterfaceC {} class TestVMSuper implements TestVMInterfaceA { protected long superLong; protected int superInt; public TestVMSuper(int intv) { superInt = intv; } public long virtualMethod(long a) { TestVM.nativeInt(200); TestVM.nativeLong(a); return a + 100; } public static double staticMethod(double a) { TestVM.nativeInt(2); TestVM.nativeDouble(a); a = a * 3; return a; } private void privateMethod() { TestVM.nativeInt(1337); } } class TestVMOtherSuper implements TestVMInterfaceB { public long virtualMethod(long a) { TestVM.nativeInt(2000); TestVM.nativeLong(a); return a + 100; } public static double staticMethod(double a) { TestVM.nativeInt(3); TestVM.nativeDouble(a); a = a * 4; return a; } } class TestVMOther extends TestVMOtherSuper { public long virtualMethod(long a) { TestVM.nativeInt(1000); a = super.virtualMethod(a + 10); TestVM.nativeInt(3000); TestVM.nativeLong(a); return a; } public static double staticMethod(double a) { TestVM.nativeInt(4); TestVM.nativeDouble(a); a = a * 5; return a; } } public class TestVM extends TestVMSuper implements TestVMInterfaceD { public static native void nativeBoolean(boolean i); public static native void nativeChar(char i); public static native void nativeByte(byte i); public static native void nativeShort(short i); public static native void nativeInt(int i); public static native void nativeLong(long i); public static native void nativeDouble(double i); public static native void nativeFloat(float i); public static native void nativeString(String s); public static void simple() { int a = 1; nativeInt(a); } public static void invoke() { TestVM vm = new TestVM(1, 1); TestVMSuper vmSuper = vm; TestVMSuper vmSuperReal = new TestVMSuper(2); TestVMOther other = new TestVMOther(); TestVMOtherSuper otherSuper = other; TestVMOtherSuper otherSuperReal = new TestVMOtherSuper(); nativeLong(vm.virtualMethod(1)); nativeLong(vmSuper.virtualMethod(1)); nativeLong(vmSuperReal.virtualMethod(1)); nativeLong(other.virtualMethod(1)); nativeLong(otherSuper.virtualMethod(1)); nativeLong(otherSuperReal.virtualMethod(1)); nativeDouble(vm.staticMethod(1)); nativeDouble(vmSuper.staticMethod(1)); nativeDouble(vmSuperReal.staticMethod(1)); nativeDouble(other.staticMethod(1)); nativeDouble(otherSuper.staticMethod(1)); nativeDouble(otherSuperReal.staticMethod(1)); vm.privateMethod(); } public static double staticMethod(double a) { nativeInt(1); nativeDouble(a); a = a * 2; return a; } public long virtualMethod(long a) { nativeInt(100); a = super.virtualMethod(a + 10); nativeInt(300); nativeLong(a); return a; } private void privateMethod() { nativeInt(42); } private static void castinstanceof() { Object vm = new TestVM(1, 1); Object vmSuperReal = new TestVMSuper(2); Object other = new TestVMOther(); Object nullObject = null; Object array = new TestVMSuper[0]; Object intArray = new int[2]; nativeBoolean(vm instanceof TestVM); nativeBoolean(vm instanceof TestVMSuper); nativeBoolean(vm instanceof TestVMOther); nativeBoolean(vm instanceof Object); nativeBoolean(vm instanceof TestVMInterfaceA); nativeBoolean(vm instanceof TestVMInterfaceB); nativeBoolean(vm instanceof TestVMInterfaceC); nativeBoolean(vm instanceof TestVMInterfaceD); nativeBoolean(vmSuperReal instanceof TestVM); nativeBoolean(vmSuperReal instanceof TestVMSuper); nativeBoolean(vmSuperReal instanceof TestVMOther); nativeBoolean(vmSuperReal instanceof Object); nativeBoolean(vmSuperReal instanceof TestVMInterfaceA); nativeBoolean(vmSuperReal instanceof TestVMInterfaceB); nativeBoolean(vmSuperReal instanceof TestVMInterfaceC); nativeBoolean(vmSuperReal instanceof TestVMInterfaceD); nativeBoolean(other instanceof TestVMOther); nativeBoolean(other instanceof Object); nativeBoolean(other instanceof TestVMInterfaceA); nativeBoolean(other instanceof TestVMInterfaceB); nativeBoolean(other instanceof TestVMInterfaceC); nativeBoolean(other instanceof TestVMInterfaceD); nativeBoolean(nullObject instanceof TestVM); nativeBoolean(nullObject instanceof TestVMSuper); nativeBoolean(nullObject instanceof TestVMOther); nativeBoolean(nullObject instanceof Object); nativeBoolean(nullObject instanceof String); nativeBoolean(nullObject instanceof TestVMInterfaceA); nativeBoolean(nullObject instanceof TestVMInterfaceB); nativeBoolean(nullObject instanceof TestVMInterfaceC); nativeBoolean(nullObject instanceof TestVMInterfaceD); nativeBoolean(array instanceof TestVM[]); nativeBoolean(array instanceof TestVMSuper[]); nativeBoolean(array instanceof TestVMOther[]); nativeBoolean(array instanceof Object[]); nativeBoolean(array instanceof String[]); nativeBoolean(array instanceof TestVMInterfaceA[]); nativeBoolean(array instanceof TestVMInterfaceB[]); nativeBoolean(array instanceof TestVMInterfaceC[]); nativeBoolean(array instanceof TestVMInterfaceD[]); nativeBoolean(array instanceof Cloneable); nativeBoolean(array instanceof java.io.Serializable); nativeBoolean(array instanceof TestVMInterfaceA); nativeBoolean(intArray instanceof int[]); nativeBoolean(intArray instanceof double[]); vmSuperReal = (TestVMSuper) vm; vmSuperReal = (TestVMSuper) null; } private static void add() { int a = 4; // first is constant folded by the compiler // second is calculated by us nativeInt(2 + 4); nativeInt(2 + a); a = 0x7FFFFFFF; nativeInt(0x7FFFFFFF + 0x7FFFFFFF); nativeInt(0x7FFFFFFF + a); nativeInt(-1 + 0x7FFFFFFF); nativeInt(-1 + a); long l = 4L; nativeLong(2L + 4L); nativeLong(2L + l); l = 0x7FFFFFFFL; nativeLong(0x7FFFFFFFL + 0x7FFFFFFFL); nativeLong(0x7FFFFFFFL + l); l = 0x7FFFFFFFFFFFFFFFL; nativeLong(0x7FFFFFFFFFFFFFFFL + 0x7FFFFFFFFFFFFFFFL); nativeLong(0x7FFFFFFFFFFFFFFFL + l); nativeLong(-1 + 0x7FFFFFFFFFFFFFFFL); nativeLong(-1 + l); float f = 0.1f; nativeFloat(0.1f + 2f); nativeFloat(f + 2f); double d = 0.1; nativeDouble(0.1 + 2); nativeDouble(d + 2); // TODO Test starnger float numbers? } private static void sub() { int a = 4; nativeInt(2 - 4); nativeInt(2 - a); a = 0x7FFFFFFF; nativeInt(0x80000000 - 0x7FFFFFFF); nativeInt(0x80000000 - a); long l = 4L; nativeLong(2L - 4L); nativeLong(2L - l); l = 0x7FFFFFFFL; nativeLong(0x80000000L - 0x7FFFFFFFL); nativeLong(0x80000000L - l); l = 0x7FFFFFFFFFFFFFFFL; nativeLong(0x8000000000000000L - 0x7FFFFFFFFFFFFFFFL); nativeLong(0x8000000000000000L - l); float f = 0.1f; nativeFloat(0.1f - 2f); nativeFloat(f - 2f); double d = 0.1; nativeDouble(0.1 - 2); nativeDouble(d - 2); // TODO Test starnger float numbers? } private static void mul() { int a = 4; nativeInt(2 * 4); nativeInt(2 * a); nativeInt(0x40000001 * 4); nativeInt(0x40000001 * a); long l = 4L; nativeLong(2L * 4L); nativeLong(2L * l); nativeLong(0x40000001L * 4L * 4); nativeLong(0x40000001L * l * 4); nativeLong(0x4000000000000001L * 4L); nativeLong(0x4000000000000001L * l); float f = 0.1f; nativeFloat(0.1f * 2f); nativeFloat(f * 2f); double d = 0.1; nativeDouble(0.1 * 2); nativeDouble(d * 2); // TODO Test starnger float numbers? } private static void div() { int a = 4; nativeInt(6 / 4); nativeInt(6 / a); nativeInt(-6 / 4); nativeInt(-6 / a); a = -1; nativeInt(0x80000000 / -1); nativeInt(0x80000000 / a); // TODO test divide by 0 long l = 4L; nativeLong(6L / 4L); nativeLong(6L / l); nativeLong(-6L / 4L); nativeLong(-6L / l); l = -1; nativeLong(0x8000000000000000L / -1L); nativeLong(0x8000000000000000L / l); // TODO test divide by 0 float f = 0.1f; nativeFloat(0.1f / 2f); nativeFloat(f / 2f); double d = 0.1; nativeDouble(0.1 / 2); nativeDouble(d / 2); // TODO Test starnger float numbers? } private static void rem() { int a = 4; nativeInt(6 % 4); nativeInt(6 % a); nativeInt(-6 % 4); nativeInt(-6 % a); a = -1; nativeInt(0x80000000 % -1); nativeInt(0x80000000 % a); // TODO test divide by 0 long l = 4L; nativeLong(6L % 4L); nativeLong(6L % l); nativeLong(-6L % 4L); nativeLong(-6L % l); l = -1; nativeLong(0x8000000000000000L % -1L); nativeLong(0x8000000000000000L % l); // TODO test divide by 0 float f = 2.1f; nativeFloat(2.1f % 2f); nativeFloat(f % 2f); double d = 2.1; nativeDouble(2.1 % 2); nativeDouble(d % 2); // TODO Test starnger float numbers? } private static void neg() { int a = 4; nativeInt(-4); nativeInt(-a); a = -1; nativeInt(-(-1)); nativeInt(-a); a = 0x80000000; nativeInt(-(0x80000000)); nativeInt(-a); long l = 4L; nativeLong(-4L); nativeLong(-l); l = -1L; nativeLong(-(-1L)); nativeLong(-l); l = 0x8000000000000000L; nativeLong(-(0x8000000000000000L)); nativeLong(-l); float f = 0.1f; nativeFloat(-0.1f); nativeFloat(-f); double d = 0.1; nativeDouble(-0.1); nativeDouble(-d); // TODO Test starnger float numbers? } private static void shift() { // shift left int a = 0xF; nativeInt(0xF << 4); nativeInt(a << 4); nativeInt(0xF << 33); nativeInt(a << 33); a = 1; nativeInt(1 << 31); nativeInt(a << 31); a = 0x80000000; nativeInt(0x80000000 << 1); nativeInt(a << 1); long l = 0xFL; nativeLong(0xFL << 4); nativeLong(l << 4); nativeLong(0xFL << 65); nativeLong(l << 65); l = 1; nativeLong(1L << 63); nativeLong(l << 63); l = 0x8000000000000000L; nativeLong(0x8000000000000000L << 1); nativeLong(l << 1); // shift right a = 0xFF; nativeInt(0xFF >> 4); nativeInt(a >> 4); nativeInt(0xFF >> 33); nativeInt(a >> 33); a = 0x80000000; nativeInt(0x80000000 >> 1); nativeInt(a >> 1); a = -1; nativeInt(-1 >> 1); nativeInt(a >> 1); l = 0xFFL; nativeLong(0xFFL >> 4); nativeLong(l >> 4); nativeLong(0xFFL >> 65); nativeLong(l >> 65); l = 0x8000000000000000L; nativeLong(0x8000000000000000L >> 1); nativeLong(l >> 1); l = -1; nativeLong(-1 >> 1); nativeLong(l >> 1); // unsigned shift right a = 0xFF; nativeInt(0xFF >>> 4); nativeInt(a >>> 4); nativeInt(0xFF >>> 33); nativeInt(a >>> 33); a = 0x80000000; nativeInt(0x80000000 >>> 1); nativeInt(a >>> 1); a = -1; nativeInt(-1 >>> 1); nativeInt(a >>> 1); l = 0xFFL; nativeLong(0xFFL >>> 4); nativeLong(l >>> 4); nativeLong(0xFFL >>> 65); nativeLong(l >>> 65); l = 0x8000000000000000L; nativeLong(0x8000000000000000L >>> 1); nativeLong(l >>> 1); l = -1; nativeLong(-1L >>> 1); nativeLong(l >>> 1); } private static void bitops() { int a = 12; // 0b1100 nativeInt(12 & 10); // 0b1010 nativeInt(a & 10); nativeInt(12 | 10); nativeInt(a | 10); nativeInt(12 ^ 10); nativeInt(a ^ 10); long l = 12L; nativeLong(12L & 10L); nativeLong(l & 10L); nativeLong(12L | 10L); nativeLong(l | 10L); nativeLong(12L ^ 10L); nativeLong(l ^ 10L); } private static void iinc() { int a = 0x7FFFFFFF; a += 1; nativeInt(a); a -= 1; nativeInt(a); a += -15; nativeInt(a); } private static void constants() { nativeInt(0); nativeInt(1337); nativeInt(0x4000000); nativeFloat(0f); nativeFloat(1f); nativeFloat(2f); nativeFloat(1.337f); nativeDouble(0); nativeDouble(1); nativeDouble(1.337); nativeLong(0L); nativeLong(1L); nativeLong(1337L); nativeString(null); // TODO test constant string } private static void conversions() { int a = 0x1FF; nativeByte((byte) 0x1FF); nativeByte((byte) a); a = 0x1FFFF; nativeShort((short) 0x1FFFF); nativeShort((short) a); a = 0x1FFFF; nativeChar((char) 0x1FFFF); nativeChar((char) a); // TODO test more numbers (NaN, inf,...) a = 5; nativeLong((long) 5); nativeLong((long) a); nativeFloat((float) 5); nativeFloat((float) a); nativeDouble((double) 5); nativeDouble((double) a); long l = 0x100000001L; nativeInt((int) 0x100000001L); nativeInt((int) l); nativeFloat((float) 0x100000001L); nativeFloat((float) l); nativeDouble((double) 0x100000001L); nativeDouble((double) l); float f = -2.1f; nativeInt((int) -2.1f); nativeInt((int) f); nativeLong((long) -2.1f); nativeLong((long) f); nativeDouble((double) -2.1f); nativeDouble((double) f); double d = -2.1; nativeInt((int) -2.1); nativeInt((int) d); nativeLong((long) -2.1); nativeLong((long) d); nativeFloat((float) -2.1); nativeFloat((float) d); } public static void jumps() { for(int i = 0; i < 2; i++) { nativeInt(-10 + i); } int i = 1; Object o = null; if(i < 1) { nativeInt(0); } if(i <= 1) { nativeInt(1); } if(i == 1) { nativeInt(2); } if(i != 1) { nativeInt(3); } if(i >= 1) { nativeInt(4); } if(i > 1) { nativeInt(5); } if(i < 0) { nativeInt(6); } if(i <= 0) { nativeInt(7); } if(i == 0) { nativeInt(8); } if(i != 0) { nativeInt(9); } if(i >= 0) { nativeInt(10); } if(i > 0) { nativeInt(11); } if(o == o) { nativeInt(12); } if(o != o) { nativeInt(13); } if(o == null) { nativeInt(14); } if(o != null) { nativeInt(15); } float f = 0.9f; double d = 1.1; long l = 1; nativeBoolean(d < 1.0); nativeBoolean(d > 1.0); nativeBoolean(f < 1.0f); nativeBoolean(f > 1.0f); nativeBoolean(l == 1); nativeBoolean(l > 1); nativeBoolean(l < 1); d = Double.NaN; f = Float.NaN; nativeBoolean(d < 1.0); nativeBoolean(d > 1.0); nativeBoolean(f < 1.0f); nativeBoolean(f > 1.0f); } private static void arrays() { long[][] l = new long[2][3]; int[][] i = new int[][] {{1}}; long[][] l2 = new long[2][2]; int[][] i2 = new int[][] {{1}}; long[] l3 = new long[] {1, 2}; nativeLong(l[0][1]); l[0][1] = 5; nativeLong(l[0][1]); nativeInt(i[0][0]); i[0][0] = 2; nativeInt(i[0][0]); nativeLong(l2[0][1]); l2[0] = l[0]; nativeLong(l2[0][1]); nativeInt(i2[0][0]); i2[0] = i[0]; nativeInt(i2[0][0]); nativeLong(l3[0]); nativeLong(l3[1]); nativeInt(l2.length); nativeInt(l2[0].length); nativeInt(l2[1].length); } private int intField; private long longField = 2; private double doubleField; public TestVM(int a, int b) { super(b); intField = a; doubleField = a * 2; } private static void object() { TestVM a = new TestVM(10, 50); nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); a.intField = 20; nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); a.longField += 22; nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); a.doubleField *= 2; nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); a.superLong += 2; nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); a.superInt = 200; nativeInt(a.intField); nativeLong(a.longField); nativeDouble(a.doubleField); nativeLong(a.superLong); nativeInt(a.superInt); } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.config; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.map.listener.MapPartitionLostListener; import com.hazelcast.nio.serialization.Data; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.Warning; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.EventListener; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class MapConfigTest { @Test public void testGetName() { assertNull(new MapConfig().getName()); } @Test public void testSetName() { assertEquals("map-test-name", new MapConfig().setName("map-test-name").getName()); } @Test public void testGetBackupCount() { assertEquals(MapConfig.DEFAULT_BACKUP_COUNT, new MapConfig().getBackupCount()); } @Test public void testSetBackupCount() { assertEquals(0, new MapConfig().setBackupCount(0).getBackupCount()); assertEquals(1, new MapConfig().setBackupCount(1).getBackupCount()); assertEquals(2, new MapConfig().setBackupCount(2).getBackupCount()); assertEquals(3, new MapConfig().setBackupCount(3).getBackupCount()); } @Test(expected = IllegalArgumentException.class) public void testSetBackupCountLowerLimit() { new MapConfig().setBackupCount(MapConfig.MIN_BACKUP_COUNT - 1); } @Test public void testGetEvictionPercentage() { assertEquals(MapConfig.DEFAULT_EVICTION_PERCENTAGE, new MapConfig().getEvictionPercentage()); } @Test public void testMinEvictionCheckMillis() throws Exception { assertEquals(MapConfig.DEFAULT_MIN_EVICTION_CHECK_MILLIS, new MapConfig().getMinEvictionCheckMillis()); } @Test public void testSetEvictionPercentage() { assertEquals(50, new MapConfig().setEvictionPercentage(50).getEvictionPercentage()); } @Test(expected = IllegalArgumentException.class) public void testSetEvictionPercentageLowerLimit() { new MapConfig().setEvictionPercentage(MapConfig.MIN_EVICTION_PERCENTAGE - 1); } @Test(expected = IllegalArgumentException.class) public void testSetEvictionPercentageUpperLimit() { new MapConfig().setEvictionPercentage(MapConfig.MAX_EVICTION_PERCENTAGE + 1); } @Test public void testGetTimeToLiveSeconds() { assertEquals(MapConfig.DEFAULT_TTL_SECONDS, new MapConfig().getTimeToLiveSeconds()); } @Test public void testSetTimeToLiveSeconds() { assertEquals(1234, new MapConfig().setTimeToLiveSeconds(1234).getTimeToLiveSeconds()); } @Test public void testGetMaxIdleSeconds() { assertEquals(MapConfig.DEFAULT_MAX_IDLE_SECONDS, new MapConfig().getMaxIdleSeconds()); } @Test public void testSetMaxIdleSeconds() { assertEquals(1234, new MapConfig().setMaxIdleSeconds(1234).getMaxIdleSeconds()); } @Test public void testGetMaxSize() { assertEquals(MaxSizeConfig.DEFAULT_MAX_SIZE, new MapConfig().getMaxSizeConfig().getSize()); } @Test public void testSetMaxSize() { assertEquals(1234, new MapConfig().getMaxSizeConfig().setSize(1234).getSize()); } @Test public void testSetMaxSizeMustBePositive() { assertTrue(new MapConfig().getMaxSizeConfig().setSize(-1).getSize() > 0); } @Test public void testGetEvictionPolicy() { assertEquals(MapConfig.DEFAULT_EVICTION_POLICY, new MapConfig().getEvictionPolicy()); } @Test public void testSetEvictionPolicy() { assertEquals(EvictionPolicy.LRU, new MapConfig().setEvictionPolicy(EvictionPolicy.LRU).getEvictionPolicy()); } @Test public void testGetMapStoreConfig() { MapStoreConfig mapStoreConfig = new MapConfig().getMapStoreConfig(); assertNotNull(mapStoreConfig); assertFalse(mapStoreConfig.isEnabled()); } @Test public void testSetMapStoreConfig() { MapStoreConfig mapStoreConfig = new MapStoreConfig(); assertEquals(mapStoreConfig, new MapConfig().setMapStoreConfig(mapStoreConfig).getMapStoreConfig()); } @Test public void testGetNearCacheConfig() { assertNull(new MapConfig().getNearCacheConfig()); } @Test public void testSetNearCacheConfig() { NearCacheConfig nearCacheConfig = new NearCacheConfig(); assertEquals(nearCacheConfig, new MapConfig().setNearCacheConfig(nearCacheConfig).getNearCacheConfig()); } @Test public void configSetsForDefaultAlwaysIssue466() { Config config = new XmlConfigBuilder().build(); MapStoreConfig mapStoreConfig = new MapStoreConfig(); mapStoreConfig.setEnabled(true); mapStoreConfig.setWriteDelaySeconds(0); mapStoreConfig.setClassName("com.hazelcast.examples.DummyStore"); config.getMapConfig("test").setMapStoreConfig(mapStoreConfig); config.getMapConfig("default").setMapStoreConfig(null); assertNotNull(config.getMapConfig("test").getMapStoreConfig()); } @Test(expected = IllegalArgumentException.class) public void setAsyncBackupCount_whenItsNegative() { MapConfig config = new MapConfig(); config.setAsyncBackupCount(-1); } @Test public void setAsyncBackupCount_whenItsZero() { MapConfig config = new MapConfig(); config.setAsyncBackupCount(0); } @Test(expected = IllegalArgumentException.class) public void setAsyncBackupCount_whenTooLarge() { MapConfig config = new MapConfig(); // max allowed is 6 config.setAsyncBackupCount(200); } @Test(expected = IllegalArgumentException.class) public void setBackupCount_whenItsNegative() { MapConfig config = new MapConfig(); config.setBackupCount(-1); } @Test public void setBackupCount_whenItsZero() { MapConfig config = new MapConfig(); config.setBackupCount(0); } @Test(expected = IllegalArgumentException.class) public void setBackupCount_tooLarge() { MapConfig config = new MapConfig(); // max allowed is 6 config.setBackupCount(200); } @Test(expected = java.lang.UnsupportedOperationException.class) public void testReadOnlyMapStoreConfigSetWriteBatchSize() { new MapStoreConfigReadOnly(new MapStoreConfig()).setWriteBatchSize(1); } @Test(expected = java.lang.UnsupportedOperationException.class) public void testReadOnlyMapStoreConfigSetInitialLoadMode() { new MapStoreConfigReadOnly(new MapStoreConfig()).setInitialLoadMode(MapStoreConfig.InitialLoadMode.EAGER); } @Test public void testMapPartitionLostListenerConfig() { MapConfig mapConfig = new MapConfig(); MapPartitionLostListener listener = mock(MapPartitionLostListener.class); mapConfig.addMapPartitionLostListenerConfig(new MapPartitionLostListenerConfig(listener)); MapPartitionLostListenerConfig listenerConfig = new MapPartitionLostListenerConfig(); listenerConfig.setImplementation(listener); mapConfig.addMapPartitionLostListenerConfig(listenerConfig); List<MapPartitionLostListenerConfig> listenerConfigs = mapConfig.getPartitionLostListenerConfigs(); assertEquals(2, listenerConfigs.size()); assertEquals(listener, listenerConfigs.get(0).getImplementation()); assertEquals(listener, listenerConfigs.get(1).getImplementation()); } @Test(expected = UnsupportedOperationException.class) public void testMapPartitionLostListenerReadOnlyConfig_withClassName() { MapPartitionLostListenerConfigReadOnly readOnly = new MapPartitionLostListenerConfig().getAsReadOnly(); readOnly.setClassName("com.hz"); } @Test(expected = UnsupportedOperationException.class) public void testMapPartitionLostListenerReadOnlyConfig_withImplementation() { MapPartitionLostListener listener = mock(MapPartitionLostListener.class); MapPartitionLostListenerConfig listenerConfig = new MapPartitionLostListenerConfig(listener); MapPartitionLostListenerConfigReadOnly readOnly = listenerConfig.getAsReadOnly(); assertEquals(listener, readOnly.getImplementation()); readOnly.setImplementation(mock(MapPartitionLostListener.class)); } @Test(expected = UnsupportedOperationException.class) public void testMapPartitionLostListenerReadOnlyConfig_withEventListenerImplementation() { MapPartitionLostListenerConfigReadOnly readOnly = new MapPartitionLostListenerConfig().getAsReadOnly(); readOnly.setImplementation(mock(EventListener.class)); } @Test public void testMapPartitionLostListener_equalsWithClassName() { MapPartitionLostListenerConfig config1 = new MapPartitionLostListenerConfig(); config1.setClassName("com.hz"); MapPartitionLostListenerConfig config2 = new MapPartitionLostListenerConfig(); config2.setClassName("com.hz"); MapPartitionLostListenerConfig config3 = new MapPartitionLostListenerConfig(); config3.setClassName("com.hz2"); assertEquals(config1, config2); assertNotEquals(config1, config3); assertNotEquals(config2, config3); } @Test public void testMapPartitionLostListener_equalsWithImplementation() { MapPartitionLostListener listener = mock(MapPartitionLostListener.class); MapPartitionLostListenerConfig config1 = new MapPartitionLostListenerConfig(); config1.setImplementation(listener); MapPartitionLostListenerConfig config2 = new MapPartitionLostListenerConfig(); config2.setImplementation(listener); MapPartitionLostListenerConfig config3 = new MapPartitionLostListenerConfig(); assertEquals(config1, config2); assertNotEquals(config1, config3); assertNotEquals(config2, config3); } @Test(expected = ConfigurationException.class) public void givenCacheDeserializedValuesSetToALWAYS_whenSetOptimizeQueriesToFalse_thenThrowConfigurationException() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setCacheDeserializedValues(CacheDeserializedValues.ALWAYS); // when mapConfig.setOptimizeQueries(false); } @Test(expected = ConfigurationException.class) public void givenCacheDeserializedValuesSetToNEVER_whenSetOptimizeQueriesToTrue_thenThrowConfigurationException() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setCacheDeserializedValues(CacheDeserializedValues.NEVER); // when mapConfig.setOptimizeQueries(true); } @Test public void givenCacheDeserializedValuesIsDefault_whenSetOptimizeQueriesToTrue_thenSetCacheDeserializedValuesToALWAYS() { // given MapConfig mapConfig = new MapConfig(); // when mapConfig.setOptimizeQueries(true); // then CacheDeserializedValues cacheDeserializedValues = mapConfig.getCacheDeserializedValues(); assertEquals(CacheDeserializedValues.ALWAYS, cacheDeserializedValues); } @Test public void givenCacheDeserializedValuesIsDefault_thenIsOptimizeQueriesReturnFalse() { // given MapConfig mapConfig = new MapConfig(); // then boolean optimizeQueries = mapConfig.isOptimizeQueries(); assertFalse(optimizeQueries); } @Test public void givenCacheDeserializedValuesIsDefault_whenSetCacheDeserializedValuesToALWAYS_thenIsOptimizeQueriesReturnTrue() { // given MapConfig mapConfig = new MapConfig(); // when mapConfig.setCacheDeserializedValues(CacheDeserializedValues.ALWAYS); // then boolean optimizeQueries = mapConfig.isOptimizeQueries(); assertTrue(optimizeQueries); } @Test public void givenCacheDeserializedValuesIsDefault_whenSetCacheDeserializedValuesToNEVER_thenIsOptimizeQueriesReturnFalse() { // given MapConfig mapConfig = new MapConfig(); // when mapConfig.setCacheDeserializedValues(CacheDeserializedValues.NEVER); // then boolean optimizeQueries = mapConfig.isOptimizeQueries(); assertFalse(optimizeQueries); } @Test(expected = ConfigurationException.class) public void givenSetOptimizeQueryIsTrue_whenSetCacheDeserializedValuesToNEVER_thenThrowConfigurationException() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setOptimizeQueries(true); // when mapConfig.setCacheDeserializedValues(CacheDeserializedValues.NEVER); } @Test(expected = ConfigurationException.class) public void givenSetOptimizeQueryIsFalse_whenSetCacheDeserializedValuesToALWAYS_thenThrowConfigurationException() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setOptimizeQueries(false); // when mapConfig.setCacheDeserializedValues(CacheDeserializedValues.ALWAYS); } @Test(expected = ConfigurationException.class) public void givenSetOptimizeQueryIsTrue_whenSetCacheDeserializedValuesToINDEX_ONLY_thenThrowConfigurationException() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setOptimizeQueries(true); // when mapConfig.setCacheDeserializedValues(CacheDeserializedValues.INDEX_ONLY); } @Test @Ignore(value = "this MapStoreConfig does not override equals/hashcode -> this cannot pass right now") public void givenSetCacheDeserializedValuesIsINDEX_ONLY_whenComparedWithOtherConfigWhereCacheIsINDEX_ONLY_thenReturnTrue() { // given MapConfig mapConfig = new MapConfig(); mapConfig.setCacheDeserializedValues(CacheDeserializedValues.INDEX_ONLY); // when MapConfig otherMapConfig = new MapConfig(); otherMapConfig.setCacheDeserializedValues(CacheDeserializedValues.INDEX_ONLY); // then assertEquals(mapConfig, otherMapConfig); } @Test public void givenDefaultConfig_whenSerializedAndDeserialized_noExceptionIsThrown() { MapConfig mapConfig = new MapConfig(); InternalSerializationService serializationService = new DefaultSerializationServiceBuilder().build(); Data data = serializationService.toData(mapConfig); MapConfig clone = serializationService.toObject(data); } @Test public void testEqualsAndHashCode() { EqualsVerifier.forClass(MapConfig.class) .allFieldsShouldBeUsedExcept("readOnly") .suppress(Warning.NULL_FIELDS, Warning.NONFINAL_FIELDS) .withPrefabValues(MaxSizeConfig.class, new MaxSizeConfig(300, MaxSizeConfig.MaxSizePolicy.PER_PARTITION), new MaxSizeConfig(100, MaxSizeConfig.MaxSizePolicy.PER_NODE)) .withPrefabValues(MapStoreConfig.class, new MapStoreConfig().setEnabled(true).setClassName("red"), new MapStoreConfig().setEnabled(true).setClassName("black")) .withPrefabValues(NearCacheConfig.class, new NearCacheConfig(10, 20, false, InMemoryFormat.BINARY), new NearCacheConfig(15, 25, true, InMemoryFormat.OBJECT)) .withPrefabValues(WanReplicationRef.class, new WanReplicationRef().setName("red"), new WanReplicationRef().setName("black")) .withPrefabValues(PartitioningStrategyConfig.class, new PartitioningStrategyConfig("red"), new PartitioningStrategyConfig("black")) .withPrefabValues(MapConfigReadOnly.class, new MapConfigReadOnly(new MapConfig("red")), new MapConfigReadOnly(new MapConfig("black"))) .verify(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.RandomizedContext; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.transport.TransportSettings; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; import static org.elasticsearch.test.NodeRoles.dataNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * A test that keep a singleton node started for all tests that can be used to get * references to Guice injectors in unit tests. */ public abstract class ESSingleNodeTestCase extends ESTestCase { private static Node NODE = null; protected void startNode(long seed) throws Exception { assert NODE == null; NODE = RandomizedContext.current().runWithPrivateRandomness(seed, this::newNode); // we must wait for the node to actually be up and running. otherwise the node might have started, // elected itself master but might not yet have removed the // SERVICE_UNAVAILABLE/1/state not recovered / initialized block ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForGreenStatus().get(); assertFalse(clusterHealthResponse.isTimedOut()); client().admin().indices() .preparePutTemplate("one_shard_index_template") .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)).get(); client().admin().indices() .preparePutTemplate("random-soft-deletes-template") .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000))) .get(); } private static void stopNode() throws IOException, InterruptedException { Node node = NODE; NODE = null; IOUtils.close(node); if (node != null && node.awaitClose(10, TimeUnit.SECONDS) == false) { throw new AssertionError("Node couldn't close within 10 seconds."); } } @Override public void setUp() throws Exception { super.setUp(); //the seed has to be created regardless of whether it will be used or not, for repeatability long seed = random().nextLong(); // Create the node lazily, on the first test. This is ok because we do not randomize any settings, // only the cluster name. This allows us to have overridden properties for plugins and the version to use. if (NODE == null) { startNode(seed); } } @Override public void tearDown() throws Exception { logger.trace("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName()); ensureNoInitializingShards(); SearchService searchService = getInstanceFromNode(SearchService.class); assertThat(searchService.getActiveContexts(), equalTo(0)); assertThat(searchService.getOpenScrollContexts(), equalTo(0)); super.tearDown(); assertAcked( client().admin().indices().prepareDelete("*") .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .get()); Metadata metadata = client().admin().cluster().prepareState().get().getState().getMetadata(); assertThat("test leaves persistent cluster metadata behind: " + metadata.persistentSettings().keySet(), metadata.persistentSettings().size(), equalTo(0)); assertThat("test leaves transient cluster metadata behind: " + metadata.transientSettings().keySet(), metadata.transientSettings().size(), equalTo(0)); GetIndexResponse indices = client().admin().indices().prepareGetIndex() .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .addIndices("*") .get(); assertThat("test leaves indices that were not deleted: " + Strings.arrayToCommaDelimitedString(indices.indices()), indices.indices(), equalTo(Strings.EMPTY_ARRAY)); if (resetNodeAfterTest()) { assert NODE != null; stopNode(); //the seed can be created within this if as it will either be executed before every test method or will never be. startNode(random().nextLong()); } } @BeforeClass public static void setUpClass() throws Exception { stopNode(); } @AfterClass public static void tearDownClass() throws Exception { stopNode(); } /** * This method returns <code>true</code> if the node that is used in the background should be reset * after each test. This is useful if the test changes the cluster state metadata etc. The default is * <code>false</code>. */ protected boolean resetNodeAfterTest() { return false; } /** The plugin classes that should be added to the node. */ protected Collection<Class<? extends Plugin>> getPlugins() { return Collections.emptyList(); } /** Helper method to create list of plugins without specifying generic types. */ @SafeVarargs @SuppressWarnings("varargs") // due to type erasure, the varargs type is non-reifiable, which causes this warning protected final Collection<Class<? extends Plugin>> pluginList(Class<? extends Plugin>... plugins) { return Arrays.asList(plugins); } /** Additional settings to add when creating the node. Also allows overriding the default settings. */ protected Settings nodeSettings() { return Settings.EMPTY; } /** True if a dummy http transport should be used, or false if the real http transport should be used. */ protected boolean addMockHttpTransport() { return true; } @Override protected List<String> filteredWarnings() { return Stream.concat(super.filteredWarnings().stream(), List.of("[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release! " + "See the breaking changes documentation for the next major version.").stream()) .collect(Collectors.toList()); } private Node newNode() { final Path tempDir = createTempDir(); final String nodeName = nodeSettings().get(Node.NODE_NAME_SETTING.getKey(), "node_s_0"); Settings settings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", random().nextLong())) .put(DestructiveOperations.REQUIRES_NAME_SETTING.getKey(), false) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo")) // TODO: use a consistent data path for custom paths // This needs to tie into the ESIntegTestCase#indexSettings() method .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir().getParent()) .put(Node.NODE_NAME_SETTING.getKey(), nodeName) .put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 1) // limit the number of threads created .put("transport.type", getTestTransportType()) .put(TransportSettings.PORT.getKey(), ESTestCase.getPortRange()) .put(dataNode()) .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) // default the watermarks low values to prevent tests from failing on nodes without enough disk space .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b") // turning on the real memory circuit breaker leads to spurious test failures. As have no full control over heap usage, we // turn it off for these tests. .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false) .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes .putList(INITIAL_MASTER_NODES_SETTING.getKey(), nodeName) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Collection<Class<? extends Plugin>> plugins = getPlugins(); if (plugins.contains(getTestTransportPlugin()) == false) { plugins = new ArrayList<>(plugins); plugins.add(getTestTransportPlugin()); } if (addMockHttpTransport()) { plugins.add(MockHttpTransport.TestPlugin.class); } plugins.add(MockScriptService.TestPlugin.class); Node node = new MockNode(settings, plugins, forbidPrivateIndexSettings()); try { node.start(); } catch (NodeValidationException e) { throw new RuntimeException(e); } return node; } /** * Returns a client to the single-node cluster. */ public Client client() { return wrapClient(NODE.client()); } public Client wrapClient(final Client client) { return client; } /** * Return a reference to the singleton node. */ protected Node node() { return NODE; } /** * Get an instance for a particular class using the injector of the singleton node. */ protected <T> T getInstanceFromNode(Class<T> clazz) { return NODE.injector().getInstance(clazz); } /** * Create a new index on the singleton node with empty index settings. */ protected IndexService createIndex(String index) { return createIndex(index, Settings.EMPTY); } /** * Create a new index on the singleton node with the provided index settings. */ protected IndexService createIndex(String index, Settings settings) { return createIndex(index, settings, null); } /** * Create a new index on the singleton node with the provided index settings. */ protected IndexService createIndex(String index, Settings settings, XContentBuilder mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (mappings != null) { createIndexRequestBuilder.setMapping(mappings); } return createIndex(index, createIndexRequestBuilder); } /** * Create a new index on the singleton node with the provided index settings. */ protected IndexService createIndex(String index, Settings settings, String type, String... mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null) { createIndexRequestBuilder.setMapping(mappings); } return createIndex(index, createIndexRequestBuilder); } protected IndexService createIndex(String index, CreateIndexRequestBuilder createIndexRequestBuilder) { assertAcked(createIndexRequestBuilder.get()); // Wait for the index to be allocated so that cluster state updates don't override // changes that would have been done locally ClusterHealthResponse health = client().admin().cluster() .health(Requests.clusterHealthRequest(index).waitForYellowStatus().waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true)).actionGet(); assertThat(health.getStatus(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW)); assertThat("Cluster must be a single node cluster", health.getNumberOfDataNodes(), equalTo(1)); IndicesService instanceFromNode = getInstanceFromNode(IndicesService.class); return instanceFromNode.indexServiceSafe(resolveIndex(index)); } public Index resolveIndex(String index) { GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetadata.SETTING_INDEX_UUID); return new Index(index, uuid); } /** * Create a new search context. */ protected SearchContext createSearchContext(IndexService indexService) { return new TestSearchContext(indexService); } /** * Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations. * It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating * are now allocated and started. */ public ClusterHealthStatus ensureGreen(String... indices) { return ensureGreen(TimeValue.timeValueSeconds(30), indices); } /** * Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations. * It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating * are now allocated and started. * * @param timeout time out value to set on {@link org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest} */ public ClusterHealthStatus ensureGreen(TimeValue timeout, String... indices) { ClusterHealthResponse actionGet = client().admin().cluster() .health(Requests.clusterHealthRequest(indices).timeout(timeout).waitForGreenStatus().waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true)).actionGet(); if (actionGet.isTimedOut()) { logger.info("ensureGreen timed out, cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); assertThat("timed out waiting for green state", actionGet.isTimedOut(), equalTo(false)); } assertThat(actionGet.getStatus(), equalTo(ClusterHealthStatus.GREEN)); logger.debug("indices {} are green", indices.length == 0 ? "[_all]" : indices); return actionGet.getStatus(); } @Override protected NamedXContentRegistry xContentRegistry() { return getInstanceFromNode(NamedXContentRegistry.class); } protected boolean forbidPrivateIndexSettings() { return true; } /** * waits until all shard initialization is completed. * * inspired by {@link ESRestTestCase} */ protected void ensureNoInitializingShards() { ClusterHealthResponse actionGet = client().admin() .cluster() .health(Requests.clusterHealthRequest("_all").waitForNoInitializingShards(true)) .actionGet(); assertFalse("timed out waiting for shards to initialize", actionGet.isTimedOut()); } }
/** * Copyright (c) 2013, Sana * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Sana nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL Sana BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.sana.android.content; import java.io.File; import java.io.FileNotFoundException; import org.sana.android.db.DBUtils; import org.sana.android.db.DatabaseManager; import org.sana.android.db.DatabaseOpenHelper; import org.sana.android.db.TableHelper; import org.sana.android.db.impl.ConceptsHelper; import org.sana.android.db.impl.EncounterTasksHelper; import org.sana.android.db.impl.EncountersHelper; import org.sana.android.db.impl.EventsHelper; import org.sana.android.db.impl.InstructionsHelper; import org.sana.android.db.impl.NotificationsHelper; import org.sana.android.db.impl.ObservationsHelper; import org.sana.android.db.impl.ObserversHelper; import org.sana.android.db.impl.ProceduresHelper; import org.sana.android.db.impl.SubjectsHelper; import android.content.ContentProvider; import android.content.ContentUris; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import android.net.Uri; import android.os.ParcelFileDescriptor; import android.text.TextUtils; import android.util.Log; /** * Abstract implementation of of {@link android.content.ContentProvider} for * classes extending {@link org.sana.api.IModel} each of which maps to a table within * the database. This implementation uses the * {@link org.sana.android.content.Uris Uris} for mapping the * {@link android.net.Uri Uri's} to each table and extensions of the * {@link org.sana.android.db.TableHelper} class to handle interactions for * each table. Extending classes should only need to implement the {@link #onCreate()} * method which provide the database name and version. * * @author Sana Development * */ public abstract class ModelContentProvider extends ContentProvider { public static final String TAG = ModelContentProvider.class.getSimpleName(); public static final String AUTHORITY = "org.sana.provider"; protected static final String DATABASE = "models.db"; protected DatabaseOpenHelper mOpener; protected DatabaseManager mManager; // match types public static final int ITEMS = 0; public static final int ITEM_ID = 1; static final ModelMatcher mMatcher = ModelMatcher.getInstance(); protected String getTable(Uri uri){ return getTableHelper(uri).getTable(); } protected TableHelper<?> getTableHelper(Uri uri){ int match = Uris.getContentDescriptor(uri); switch(match){ case(Uris.CONCEPT): return ConceptsHelper.getInstance(); case(Uris.ENCOUNTER): return EncountersHelper.getInstance(); case(Uris.EVENT): return EventsHelper.getInstance(); case(Uris.INSTRUCTION): return InstructionsHelper.getInstance(); case(Uris.NOTIFICATION): return NotificationsHelper.getInstance(); case(Uris.OBSERVATION): return ObservationsHelper.getInstance(); case(Uris.OBSERVER): return ObserversHelper.getInstance(); case(Uris.PROCEDURE): return ProceduresHelper.getInstance(); case(Uris.SUBJECT): return SubjectsHelper.getInstance(); case(Uris.ENCOUNTER_TASK): return EncounterTasksHelper.getInstance(); default: throw new IllegalArgumentException("Invalid uri in " +"getTableHelper(): " + uri.toString()); } } /* (non-Javadoc) * @see android.content.ContentProvider#delete(android.net.Uri, java.lang.String, java.lang.String[]) */ @Override public synchronized int delete(Uri uri, String selection, String[] selectionArgs) { Log.d(TAG, "delete() uri=" + uri + ", selection= " + selection + ", selectionArgs=" + ((selectionArgs != null)?TextUtils.join(",", selectionArgs):"null") + " );"); String whereClause = DBUtils.getWhereClause(uri, Uris.getDescriptor(uri), selection); switch(Uris.getTypeDescriptor(uri)){ case(Uris.ITEM_ID): selection = DBUtils.getWhereClauseWithID(uri, selection); break; case(Uris.ITEM_UUID): selection = DBUtils.getWhereClauseWithUUID(uri, selection); default: } TableHelper<?> helper = getTableHelper(uri); String table = helper.getTable(); SQLiteDatabase db = DatabaseManager.getInstance().openDatabase(); int count = db.delete(table, selection, selectionArgs); DatabaseManager.getInstance().closeDatabase(); getContext().getContentResolver().notifyChange(uri, null); return count; } /* (non-Javadoc) * @see android.content.ContentProvider#getType(android.net.Uri) */ @Override public String getType(Uri uri) { return Uris.getType(uri); } /* (non-Javadoc) * @see android.content.ContentProvider#insert(android.net.Uri, android.content.ContentValues) */ @Override public synchronized Uri insert(Uri uri, ContentValues values) { Log.d(TAG, "insert(" + uri.toString() +", N = " + String.valueOf((values == null)?0:values.size()) + " values.)"); TableHelper<?> helper = getTableHelper(uri); // set default insert values and execute values = helper.onInsert(values); String table = helper.getTable(); SQLiteDatabase db = DatabaseManager.getInstance().openDatabase();//mOpener.getWritableDatabase(); long id = db.insert(table, null, values); DatabaseManager.getInstance().closeDatabase(); Uri result = ContentUris.withAppendedId(uri, id); getContext().getContentResolver().notifyChange(uri, null); Log.d(TAG, "insert(): Successfully inserted => " + result); return result; } /* (non-Javadoc) * @see android.content.ContentProvider#query(android.net.Uri, java.lang.String[], java.lang.String, java.lang.String[], java.lang.String) */ @Override public synchronized Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { Log.d(TAG, ".query(" + uri.toString() +");"); TableHelper<?> helper = getTableHelper(uri); // set query and execute sortOrder = (TextUtils.isEmpty(sortOrder))? helper.onSort(uri): sortOrder; switch(Uris.getTypeDescriptor(uri)){ case(Uris.ITEM_ID): selection = DBUtils.getWhereClauseWithID(uri, selection); break; case(Uris.ITEM_UUID): selection = DBUtils.getWhereClauseWithUUID(uri, selection); default: } Log.d(TAG, ".query(.) selection = " + selection); String uriQS = DBUtils.convertUriQueryToSelect(uri); Log.d(TAG, ".query(.) uri qs = " + selection); if(!TextUtils.isEmpty(uriQS)){ selection = String.format("%s %s", selection, uriQS); Log.d(TAG, ".query(.) selection --> " + selection); } SQLiteQueryBuilder qb = new SQLiteQueryBuilder(); qb.setTables(helper.getTable()); SQLiteDatabase db = DatabaseManager.getInstance().openDatabase();//mOpener.getReadableDatabase(); //Cursor cursor = helper.onQuery(db, projection, selection, selectionArgs, sortOrder); Cursor cursor = qb.query(db, projection, selection, selectionArgs, null, null, sortOrder); cursor.setNotificationUri(getContext().getContentResolver(), uri); Log.d(TAG, ".query(" + uri.toString() +") count = " + ((cursor!=null)?cursor.getCount():0)); return cursor; } /* (non-Javadoc) * @see android.content.ContentProvider#update(android.net.Uri, android.content.ContentValues, java.lang.String, java.lang.String[]) */ @Override public synchronized int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { Log.d(TAG, ".update(" + uri.toString() +");");//mOpener.getWritableDatabase(); // set any default update values TableHelper<?> helper = getTableHelper(uri); values = helper.onUpdate(uri, values); String table = helper.getTable(); // set selection and execute switch(Uris.getTypeDescriptor(uri)){ case(Uris.ITEM_ID): selection = DBUtils.getWhereClauseWithID(uri, selection); break; case(Uris.ITEM_UUID): selection = DBUtils.getWhereClauseWithUUID(uri, selection); default: } SQLiteDatabase db = DatabaseManager.getInstance().openDatabase(); int result = db.update(table, values, selection, selectionArgs); DatabaseManager.getInstance().closeDatabase(); getContext().getContentResolver().notifyChange(uri, null); return result; } /* * (non-Javadoc) * @see android.content.ContentProvider#openFile(android.net.Uri, java.lang.String) */ @Override public ParcelFileDescriptor openFile(Uri uri, String mode) throws FileNotFoundException { Log.i(TAG, "openFile()" + uri); Log.d(TAG,"...uri: " + uri); Log.d(TAG,"...mode: " + mode); String ext = getTableHelper(uri).getFileExtension(); int match = Uris.getContentDescriptor(uri); switch(match){ case(Uris.OBSERVATION): case(Uris.SUBJECT): break; default: throw new FileNotFoundException("Unsupported content type. No files."); } TableHelper<?> helper = getTableHelper(uri); String column = helper.getFileColumn(); Cursor c = query(uri, new String[]{ column }, null, null, null); String path = null; if (c != null) { try{ if(c.moveToFirst()){ // Should never get more than one back if(c.getCount() > 1) throw new IllegalArgumentException( "Vaild for single row only"); // get file and open path = c.getString(0); Log.d(TAG, "...opening file path: " + path); } else { throw new IllegalArgumentException("Invalid Uri: " + uri); } } finally { c.close(); } } File fopen; int modeBits = modeToMode(mode); // Create file if (TextUtils.isEmpty(path)){ Log.d(TAG,"...path was empty."); if(modeBits == ParcelFileDescriptor.MODE_READ_ONLY) throw new IllegalArgumentException("Read only open on empty File path " +"in column '" + column + "' for uri: " + uri); // Open in read write with no file name long id = ContentUris.parseId(uri); File dir = getContext().getExternalFilesDir(helper.getTable()); boolean created = dir.mkdirs(); Log.d(TAG,"...created parent dirs: " + created); //File dir = new File(fDir,helper.getTable()); dir.mkdirs(); fopen = new File(dir,String.format("%s.%s", id,ext)); // Update file column with absolute path ContentValues values = new ContentValues(); values.put(column, fopen.getAbsolutePath()); int updated = getContext().getContentResolver().update(uri, values, null,null); Log.d(TAG, "updated: " + updated + ", file: " + fopen.getAbsolutePath()); } else { fopen = new File(path); } Log.d(TAG,"...opening file: " + fopen.getAbsolutePath()); Log.d(TAG,"...opening in mode: " + mode); return ParcelFileDescriptor.open(fopen, modeBits); } protected final int modeToMode(String mode){ int modeBits; if ("r".equals(mode)) { modeBits = ParcelFileDescriptor.MODE_READ_ONLY; } else if ("w".equals(mode) || "wt".equals(mode)) { modeBits = ParcelFileDescriptor.MODE_WRITE_ONLY | ParcelFileDescriptor.MODE_CREATE | ParcelFileDescriptor.MODE_TRUNCATE; } else if ("wa".equals(mode)) { modeBits = ParcelFileDescriptor.MODE_WRITE_ONLY | ParcelFileDescriptor.MODE_CREATE | ParcelFileDescriptor.MODE_APPEND; } else if ("rw".equals(mode)) { modeBits = ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE; } else if ("rwt".equals(mode)) { modeBits = ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE | ParcelFileDescriptor.MODE_TRUNCATE; } else { throw new IllegalArgumentException("Bad mode: " + mode); } return modeBits; } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.util.vma; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * General statistics from current state of Allocator. * * <h3>Layout</h3> * * <pre><code> * struct VmaStats { * {@link VmaStatInfo VmaStatInfo} memoryType[32]; * {@link VmaStatInfo VmaStatInfo} memoryHeap[16]; * {@link VmaStatInfo VmaStatInfo} total; * }</code></pre> */ public class VmaStats extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int MEMORYTYPE, MEMORYHEAP, TOTAL; static { Layout layout = __struct( __array(VmaStatInfo.SIZEOF, VmaStatInfo.ALIGNOF, 32), __array(VmaStatInfo.SIZEOF, VmaStatInfo.ALIGNOF, 16), __member(VmaStatInfo.SIZEOF, VmaStatInfo.ALIGNOF) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); MEMORYTYPE = layout.offsetof(0); MEMORYHEAP = layout.offsetof(1); TOTAL = layout.offsetof(2); } /** * Creates a {@code VmaStats} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VmaStats(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** @return a {@link VmaStatInfo}.Buffer view of the {@code memoryType} field. */ @NativeType("VmaStatInfo[32]") public VmaStatInfo.Buffer memoryType() { return nmemoryType(address()); } /** @return a {@link VmaStatInfo} view of the struct at the specified index of the {@code memoryType} field. */ public VmaStatInfo memoryType(int index) { return nmemoryType(address(), index); } /** @return a {@link VmaStatInfo}.Buffer view of the {@code memoryHeap} field. */ @NativeType("VmaStatInfo[16]") public VmaStatInfo.Buffer memoryHeap() { return nmemoryHeap(address()); } /** @return a {@link VmaStatInfo} view of the struct at the specified index of the {@code memoryHeap} field. */ public VmaStatInfo memoryHeap(int index) { return nmemoryHeap(address(), index); } /** @return a {@link VmaStatInfo} view of the {@code total} field. */ public VmaStatInfo total() { return ntotal(address()); } // ----------------------------------- /** Returns a new {@code VmaStats} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VmaStats malloc() { return wrap(VmaStats.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VmaStats} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VmaStats calloc() { return wrap(VmaStats.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VmaStats} instance allocated with {@link BufferUtils}. */ public static VmaStats create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VmaStats.class, memAddress(container), container); } /** Returns a new {@code VmaStats} instance for the specified memory address. */ public static VmaStats create(long address) { return wrap(VmaStats.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VmaStats createSafe(long address) { return address == NULL ? null : wrap(VmaStats.class, address); } /** * Returns a new {@link VmaStats.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VmaStats.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VmaStats.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VmaStats.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VmaStats.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VmaStats.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VmaStats.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VmaStats.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VmaStats.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } // ----------------------------------- /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VmaStats mallocStack() { return malloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VmaStats callocStack() { return calloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VmaStats mallocStack(MemoryStack stack) { return malloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VmaStats callocStack(MemoryStack stack) { return calloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VmaStats.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VmaStats.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VmaStats.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VmaStats.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); } /** * Returns a new {@code VmaStats} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VmaStats malloc(MemoryStack stack) { return wrap(VmaStats.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VmaStats} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VmaStats calloc(MemoryStack stack) { return wrap(VmaStats.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VmaStats.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VmaStats.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VmaStats.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VmaStats.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #memoryType}. */ public static VmaStatInfo.Buffer nmemoryType(long struct) { return VmaStatInfo.create(struct + VmaStats.MEMORYTYPE, 32); } /** Unsafe version of {@link #memoryType(int) memoryType}. */ public static VmaStatInfo nmemoryType(long struct, int index) { return VmaStatInfo.create(struct + VmaStats.MEMORYTYPE + check(index, 32) * VmaStatInfo.SIZEOF); } /** Unsafe version of {@link #memoryHeap}. */ public static VmaStatInfo.Buffer nmemoryHeap(long struct) { return VmaStatInfo.create(struct + VmaStats.MEMORYHEAP, 16); } /** Unsafe version of {@link #memoryHeap(int) memoryHeap}. */ public static VmaStatInfo nmemoryHeap(long struct, int index) { return VmaStatInfo.create(struct + VmaStats.MEMORYHEAP + check(index, 16) * VmaStatInfo.SIZEOF); } /** Unsafe version of {@link #total}. */ public static VmaStatInfo ntotal(long struct) { return VmaStatInfo.create(struct + VmaStats.TOTAL); } // ----------------------------------- /** An array of {@link VmaStats} structs. */ public static class Buffer extends StructBuffer<VmaStats, Buffer> implements NativeResource { private static final VmaStats ELEMENT_FACTORY = VmaStats.create(-1L); /** * Creates a new {@code VmaStats.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VmaStats#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VmaStats getElementFactory() { return ELEMENT_FACTORY; } /** @return a {@link VmaStatInfo}.Buffer view of the {@code memoryType} field. */ @NativeType("VmaStatInfo[32]") public VmaStatInfo.Buffer memoryType() { return VmaStats.nmemoryType(address()); } /** @return a {@link VmaStatInfo} view of the struct at the specified index of the {@code memoryType} field. */ public VmaStatInfo memoryType(int index) { return VmaStats.nmemoryType(address(), index); } /** @return a {@link VmaStatInfo}.Buffer view of the {@code memoryHeap} field. */ @NativeType("VmaStatInfo[16]") public VmaStatInfo.Buffer memoryHeap() { return VmaStats.nmemoryHeap(address()); } /** @return a {@link VmaStatInfo} view of the struct at the specified index of the {@code memoryHeap} field. */ public VmaStatInfo memoryHeap(int index) { return VmaStats.nmemoryHeap(address(), index); } /** @return a {@link VmaStatInfo} view of the {@code total} field. */ public VmaStatInfo total() { return VmaStats.ntotal(address()); } } }
package mcjty.rftoolsdim.dimensions.world; import mcjty.lib.varia.Logging; import mcjty.rftoolsdim.api.dimension.IRFToolsWorldProvider; import mcjty.rftoolsdim.config.GeneralConfiguration; import mcjty.rftoolsdim.config.PowerConfiguration; import mcjty.rftoolsdim.dimensions.DimensionInformation; import mcjty.rftoolsdim.dimensions.DimensionStorage; import mcjty.rftoolsdim.dimensions.ModDimensions; import mcjty.rftoolsdim.dimensions.RfToolsDimensionManager; import mcjty.rftoolsdim.dimensions.description.WeatherDescriptor; import mcjty.rftoolsdim.dimensions.types.ControllerType; import mcjty.rftoolsdim.dimensions.types.PatreonType; import mcjty.rftoolsdim.dimensions.types.SkyType; import mcjty.rftoolsdim.dimensions.types.TerrainType; import mcjty.rftoolsdim.network.PacketGetDimensionEnergy; import mcjty.rftoolsdim.network.RFToolsDimMessages; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.DimensionType; import net.minecraft.world.World; import net.minecraft.world.WorldProvider; import net.minecraft.world.WorldServer; import net.minecraft.world.biome.Biome; import net.minecraft.world.biome.BiomeProvider; import net.minecraft.world.biome.BiomeProviderSingle; import net.minecraft.world.gen.IChunkGenerator; import net.minecraft.world.storage.WorldInfo; import net.minecraftforge.common.DimensionManager; import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import java.util.Set; //@Optional.InterfaceList(@Optional.Interface(iface = "ivorius.reccomplex.dimensions.DimensionDictionary$Handler", modid = "reccomplex")) public class GenericWorldProvider extends WorldProvider implements /*@todo implements DimensionDictionary.Handler,*/ IRFToolsWorldProvider { public static final String RFTOOLS_DIMENSION = "rftools_dimension"; private DimensionInformation dimensionInformation; private DimensionStorage storage; private long seed; private Set<String> dimensionTypes = null; // Used for Recurrent Complex support private long calculateSeed(long seed, int dim) { if (dimensionInformation == null || dimensionInformation.getWorldVersion() < DimensionInformation.VERSION_DIMLETSSEED) { return dim * 13L + seed; } else { return dimensionInformation.getDescriptor().calculateSeed(seed); } } public World getWorld() { return world; } @Override public DimensionType getDimensionType() { return ModDimensions.rftoolsType; } @Override public long getSeed() { if (dimensionInformation == null || dimensionInformation.getWorldVersion() < DimensionInformation.VERSION_CORRECTSEED) { return super.getSeed(); } else { return seed; } } public void setDimensionInformation(DimensionInformation info) { dimensionInformation = info; setupSkyRenderers(); } public DimensionInformation getDimensionInformation() { if (dimensionInformation == null) { int dim = getDimension(); if(FMLCommonHandler.instance().getEffectiveSide() == Side.CLIENT) { dimensionInformation = RfToolsDimensionManager.getDimensionManagerClient().getDimensionInformation(dim); } else { // Note: we cannot use world here since we are possibly still busy setting up our world so the 'mapStorage' // is always correct here. So we have to use the overworld. dimensionInformation = RfToolsDimensionManager.getDimensionManager(DimensionManager.getWorld(0)).getDimensionInformation(dim); } if (dimensionInformation == null) { Logging.getLogger().catching(new RuntimeException("Dimension information for dimension " + dim + " is missing!")); } else { setSeed(dim); // setupProviderInfo(); } } return dimensionInformation; } @Override public String getSaveFolder() { return "RFTOOLS" + getDimension(); } // @Override // @Optional.Method(modid = "reccomplex") // public Set<String> getDimensionTypes() { // getDimensionInformation(); // if (dimensionInformation == null) { // return Collections.EMPTY_SET; // } // if (dimensionTypes == null) { // dimensionTypes = new HashSet<String>(); // dimensionTypes.add(DimensionDictionary.INFINITE); // dimensionTypes.add("RFTOOLS_DIMENSION"); // // @todo temporary. This should probably be in the TerrainType enum. // switch (dimensionInformation.getTerrainType()) { // case TERRAIN_VOID: // case TERRAIN_ISLAND: // case TERRAIN_ISLANDS: // case TERRAIN_CHAOTIC: // case TERRAIN_PLATEAUS: // case TERRAIN_GRID: // dimensionTypes.add(DimensionDictionary.NO_TOP_LIMIT); // dimensionTypes.add(DimensionDictionary.NO_BOTTOM_LIMIT); // break; // case TERRAIN_FLAT: // case TERRAIN_AMPLIFIED: // case TERRAIN_NORMAL: // case TERRAIN_NEARLANDS: // dimensionTypes.add(DimensionDictionary.NO_TOP_LIMIT); // dimensionTypes.add(DimensionDictionary.BOTTOM_LIMIT); // break; // case TERRAIN_CAVERN_OLD: // dimensionTypes.add(DimensionDictionary.BOTTOM_LIMIT); // dimensionTypes.add(DimensionDictionary.TOP_LIMIT); // break; // case TERRAIN_CAVERN: // case TERRAIN_LOW_CAVERN: // case TERRAIN_FLOODED_CAVERN: // dimensionTypes.add(DimensionDictionary.BOTTOM_LIMIT); // dimensionTypes.add(DimensionDictionary.NO_TOP_LIMIT); // break; // } // if (dimensionInformation.hasStructureType(StructureType.STRUCTURE_RECURRENTCOMPLEX)) { // Collections.addAll(dimensionTypes, dimensionInformation.getDimensionTypes()); // } // } // return dimensionTypes; // } // private void setSeed(int dim) { if (dimensionInformation == null) { if (world == null) { return; } dimensionInformation = RfToolsDimensionManager.getDimensionManager(world).getDimensionInformation(dim); if (dimensionInformation == null) { Logging.log("Error: setSeed() called with null diminfo. Error ignored!"); return; } } long forcedSeed = dimensionInformation.getForcedDimensionSeed(); if (forcedSeed != 0) { Logging.log("Forced seed for dimension " + dim + ": " + forcedSeed); seed = forcedSeed; } else { long baseSeed = dimensionInformation.getBaseSeed(); if (baseSeed != 0) { seed = calculateSeed(baseSeed, dim) ; } else { seed = calculateSeed(world.getSeed(), dim) ; } } // seed = dimensionInformation.getBaseSeed(); // System.out.println("seed = " + seed); } private DimensionStorage getStorage() { if (storage == null) { storage = DimensionStorage.getDimensionStorage(world); } return storage; } // @Override // public void registerWorldChunkManager() { // getDimensionInformation(); // setupProviderInfo(); // } @Override protected void generateLightBrightnessTable() { getDimensionInformation(); if (dimensionInformation != null && dimensionInformation.getTerrainType() == TerrainType.TERRAIN_INVERTIGO) { for (int i = 0; i <= 15; ++i) { float f1 = 1.0F - i / 15.0F; this.lightBrightnessTable[i] = (1.0F - f1) / (f1 * 3.0F + 1.0F) * 1.0F + 1.0F; } return; } super.generateLightBrightnessTable(); } @Override public BiomeProvider getBiomeProvider() { if (biomeProvider == null) { createBiomeProviderInternal(); } return biomeProvider; } @Override protected void init() { super.init(); if (world instanceof WorldServer) { createBiomeProviderInternal(); return; } // We are on a client here and we don't have sufficient information right here (dimension information has not synced yet) biomeProvider = null; } private void createBiomeProviderInternal() { getDimensionInformation(); if (dimensionInformation != null) { ControllerType type = dimensionInformation.getControllerType(); if (type == ControllerType.CONTROLLER_SINGLE) { this.biomeProvider = new BiomeProviderSingle(dimensionInformation.getBiomes().get(0)); } else if (type == ControllerType.CONTROLLER_DEFAULT) { WorldInfo worldInfo = world.getWorldInfo(); worldInfo = new WorldInfo(worldInfo) { @Override public long getSeed() { return seed; } }; this.biomeProvider = new BiomeProvider(worldInfo); } else { this.biomeProvider = new GenericBiomeProvider(world.getWorldInfo()) { @Override public DimensionInformation getDimensionInformation() { return dimensionInformation; // Hack to get the dimension information in the superclass. } }; } } else { this.biomeProvider = new BiomeProvider(world.getWorldInfo()); } if (dimensionInformation != null) { this.hasSkyLight = dimensionInformation.getTerrainType().hasSky(); setupSkyRenderers(); } } public void setupSkyRenderers() { if (world.isRemote) { SkyType skyType; if (dimensionInformation.isPatreonBitSet(PatreonType.PATREON_DARKCORVUS)) { skyType = SkyType.SKY_STARS3; } else { skyType = dimensionInformation.getSkyDescriptor().getSkyType(); } if (!hasSkyLight) { SkyRenderer.registerNoSky(this); } else if (skyType.skyboxType != null) { SkyRenderer.registerSkybox(this, skyType); } else if (skyType == SkyType.SKY_ENDER) { SkyRenderer.registerEnderSky(this); } else { SkyRenderer.registerSky(this, dimensionInformation); } if (dimensionInformation.isPatreonBitSet(PatreonType.PATREON_KENNEY)) { SkyRenderer.registerKenneyCloudRenderer(this); } } } @Override public double getHorizon() { getDimensionInformation(); if (dimensionInformation != null && dimensionInformation.getTerrainType().hasNoHorizon()) { return 0; } else { return super.getHorizon(); } } @Override public boolean isSurfaceWorld() { getDimensionInformation(); if (dimensionInformation == null) { return super.isSurfaceWorld(); } return dimensionInformation.getTerrainType().hasSky(); } // @Override // public String getDimensionName() { // return RFTOOLS_DIMENSION; // } // // @Override // public String getWelcomeMessage() { // return "Entering the rftools dimension!"; // } @Override public boolean canRespawnHere() { return false; } @Override public WorldSleepResult canSleepAt(EntityPlayer player, BlockPos pos) { switch (GeneralConfiguration.bedBehaviour) { case 0: Logging.message(player, "You cannot sleep in this dimension!"); return WorldSleepResult.DENY; // In case 1, just do the usual thing (this typically mean explosion). case 2: player.setSpawnChunk(pos, true, getDimension()); Logging.message(player, "Spawn point set!"); return WorldSleepResult.DENY; } return super.canSleepAt(player, pos); } @Override public int getRespawnDimension(EntityPlayerMP player) { getDimensionInformation(); if (GeneralConfiguration.respawnSameDim || (dimensionInformation != null && dimensionInformation.isRespawnHere())) { DimensionStorage dimensionStorage = getStorage(); long power = dimensionStorage.getEnergyLevel(getDimension()); if (power < 1000) { return GeneralConfiguration.spawnDimension; } else { return getDimension(); } } return GeneralConfiguration.spawnDimension; } @Override public IChunkGenerator createChunkGenerator() { int dim = getDimension(); setSeed(dim); return new GenericChunkGenerator(world, seed); } @Override public Biome getBiomeForCoords(BlockPos pos) { return super.getBiomeForCoords(pos); } @Override public int getActualHeight() { return 256; } private static long lastFogTime = 0; @SideOnly(Side.CLIENT) @Override public float getCloudHeight() { getDimensionInformation(); if (dimensionInformation != null && dimensionInformation.getTerrainType() == TerrainType.TERRAIN_INVERTIGO) { return 5; } return super.getCloudHeight(); } @Override @SideOnly(Side.CLIENT) public Vec3d getFogColor(float angle, float dt) { int dim = getDimension(); if (System.currentTimeMillis() - lastFogTime > 1000) { lastFogTime = System.currentTimeMillis(); RFToolsDimMessages.INSTANCE.sendToServer(new PacketGetDimensionEnergy(dim)); } float factor = calculatePowerBlackout(dim); getDimensionInformation(); Vec3d color = super.getFogColor(angle, dt); if (dimensionInformation == null) { return color; } else { float r = dimensionInformation.getSkyDescriptor().getFogColorFactorR() * factor; float g = dimensionInformation.getSkyDescriptor().getFogColorFactorG() * factor; float b = dimensionInformation.getSkyDescriptor().getFogColorFactorB() * factor; return new Vec3d(color.x * r, color.y * g, color.z * b); } } private static long lastTime = 0; @Override @SideOnly(Side.CLIENT) public Vec3d getSkyColor(Entity cameraEntity, float partialTicks) { int dim = getDimension(); if (System.currentTimeMillis() - lastTime > 1000) { lastTime = System.currentTimeMillis(); RFToolsDimMessages.INSTANCE.sendToServer(new PacketGetDimensionEnergy(dim)); } float factor = calculatePowerBlackout(dim); getDimensionInformation(); Vec3d skyColor = super.getSkyColor(cameraEntity, partialTicks); if (dimensionInformation == null) { return skyColor; } else { float r = dimensionInformation.getSkyDescriptor().getSkyColorFactorR() * factor; float g = dimensionInformation.getSkyDescriptor().getSkyColorFactorG() * factor; float b = dimensionInformation.getSkyDescriptor().getSkyColorFactorB() * factor; return new Vec3d(skyColor.x * r, skyColor.y * g, skyColor.z * b); } } @Override @SideOnly(Side.CLIENT) public Vec3d getCloudColor(float partialTicks) { getDimensionInformation(); Vec3d cloudColor = super.getCloudColor(partialTicks); if (dimensionInformation == null || dimensionInformation.isPatreonBitSet(PatreonType.PATREON_KENNEY)) { return cloudColor; } else { float r = dimensionInformation.getSkyDescriptor().getCloudColorFactorR(); float g = dimensionInformation.getSkyDescriptor().getCloudColorFactorG(); float b = dimensionInformation.getSkyDescriptor().getCloudColorFactorB(); return new Vec3d(cloudColor.x * r, cloudColor.y * g, cloudColor.z * b); } } private float calculatePowerBlackout(int dim) { float factor = 1.0f; long power = getStorage().getEnergyLevel(dim); if (power < PowerConfiguration.DIMPOWER_WARN3) { factor = ((float) power) / PowerConfiguration.DIMPOWER_WARN3 * 0.2f; } else if (power < PowerConfiguration.DIMPOWER_WARN2) { factor = (float) (power - PowerConfiguration.DIMPOWER_WARN3) / (PowerConfiguration.DIMPOWER_WARN2 - PowerConfiguration.DIMPOWER_WARN3) * 0.3f + 0.2f; } else if (power < PowerConfiguration.DIMPOWER_WARN1) { factor = (float) (power - PowerConfiguration.DIMPOWER_WARN2) / (PowerConfiguration.DIMPOWER_WARN1 - PowerConfiguration.DIMPOWER_WARN2) * 0.3f + 0.5f; } else if (power < PowerConfiguration.DIMPOWER_WARN0) { factor = (float) (power - PowerConfiguration.DIMPOWER_WARN1) / (PowerConfiguration.DIMPOWER_WARN0 - PowerConfiguration.DIMPOWER_WARN1) * 0.2f + 0.8f; } return factor; } @Override @SideOnly(Side.CLIENT) public float getSunBrightness(float par1) { getDimensionInformation(); if (dimensionInformation == null) { return super.getSunBrightness(par1); } int dim = getDimension(); float factor = calculatePowerBlackout(dim); return super.getSunBrightness(par1) * dimensionInformation.getSkyDescriptor().getSunBrightnessFactor() * factor; } @Override @SideOnly(Side.CLIENT) public float getStarBrightness(float par1) { getDimensionInformation(); if (dimensionInformation == null) { return super.getStarBrightness(par1); } return super.getStarBrightness(par1) * dimensionInformation.getSkyDescriptor().getStarBrightnessFactor(); } @Override public void updateWeather() { super.updateWeather(); if (!world.isRemote) { getDimensionInformation(); if (dimensionInformation != null) { WeatherDescriptor descriptor = dimensionInformation.getWeatherDescriptor(); float rs = descriptor.getRainStrength(); if (rs > -0.5f) { world.rainingStrength = rs; if (Math.abs(world.rainingStrength) < 0.001) { world.prevRainingStrength = 0; world.rainingStrength = 0; world.getWorldInfo().setRaining(false); } } float ts = descriptor.getThunderStrength(); if (ts > -0.5f) { world.thunderingStrength = ts; if (Math.abs(world.thunderingStrength) < 0.001) { world.prevThunderingStrength = 0; world.thunderingStrength = 0; world.getWorldInfo().setThundering(false); } } } } } @Override public float calculateCelestialAngle(long time, float dt) { getDimensionInformation(); if (dimensionInformation == null) { return super.calculateCelestialAngle(time, dt); } if (!dimensionInformation.getTerrainType().hasSky()) { return 0.5F; } if (dimensionInformation.getCelestialAngle() == null) { if (dimensionInformation.getTimeSpeed() == null) { return super.calculateCelestialAngle(time, dt); } else { return super.calculateCelestialAngle((long) (time * dimensionInformation.getTimeSpeed()), dt); } } else { return dimensionInformation.getCelestialAngle(); } } //------------------------ RFToolsWorldProvider @Override public long getCurrentRF() { // DimensionStorage dimensionStorage = DimensionStorage.getDimensionStorage(worldObj); return getStorage().getEnergyLevel(getDimension()); } }
package org.jboss.aerogear.unifiedpush.rest.registry.installations; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.UUID; import javax.inject.Inject; import javax.ws.rs.client.Entity; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.DnsResolver; import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.BasicHttpClientConnectionManager; import org.apache.http.impl.conn.SystemDefaultDnsResolver; import org.jboss.aerogear.unifiedpush.api.Alias; import org.jboss.aerogear.unifiedpush.api.PushApplication; import org.jboss.aerogear.unifiedpush.cassandra.dao.AliasDao; import org.jboss.aerogear.unifiedpush.rest.RestEndpointTest; import org.jboss.aerogear.unifiedpush.rest.WebConfigTest; import org.jboss.aerogear.unifiedpush.rest.util.HttpBasicHelper; import org.jboss.aerogear.unifiedpush.service.annotations.LoggedInUser; import org.jboss.aerogear.unifiedpush.service.impl.AliasServiceImpl; import org.jboss.aerogear.unifiedpush.service.impl.spring.IKeycloakService; import org.jboss.aerogear.unifiedpush.service.impl.spring.OAuth2Configuration; import org.jboss.resteasy.client.jaxrs.ResteasyClient; import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder; import org.jboss.resteasy.client.jaxrs.ResteasyWebTarget; import org.junit.Assert; import org.junit.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import com.datastax.driver.core.utils.UUIDs; import com.google.gson.Gson; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT, classes = { WebConfigTest.class }) public class AliasEndpointTest extends RestEndpointTest { public static final String LOCALHOST = "localhost"; public static final UUID APP_ID = UUIDs.timeBased(); public static final String OTHER = "other-"; private static final String realmName = "test-app"; private static final String clientName = realmName; private static final String userName = "testUser@mail.com"; private static final String userMail = userName; private static final String userPassword = "password"; private static final String domain = "testDomain.com"; public static final String NEW = "new"; @Inject private IKeycloakService keycloakService; @Inject private AliasDao aliasDao; @Test public void isAssociatedPositiveWithClientNameAtRequest() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); final String hostName = clientName + keycloakService.separator() + domain; HttpClient httpClient = getHttpClientWithCustomDns(hostName); HttpGet httpRequest = new HttpGet("http://" + hostName +":8080/rest/alias/isassociated/" + userName); HttpResponse httpResponse; AliasServiceImpl.Associated associated = null; try { httpResponse = httpClient.execute(httpRequest); associated = parseResponse(httpResponse, AliasServiceImpl.Associated.class); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertNotNull(associated); assertEquals(true, associated.isAssociated()); assertEquals(clientName + keycloakService.separator() + domain, associated.getSubdomain()); } @Test public void isAssociatedPositiveWithoutClientNameAtRequest() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); HttpClient httpClient = getHttpClientWithCustomDns(domain); HttpGet httpRequest = new HttpGet("http://" + domain +":8080/rest/alias/isassociated/" + userName); HttpResponse httpResponse; AliasServiceImpl.Associated associated = null; try { httpResponse = httpClient.execute(httpRequest); final Class<AliasServiceImpl.Associated> AssociatedType = AliasServiceImpl.Associated.class; associated = parseResponse(httpResponse, AssociatedType); } catch (IOException e) { e.printStackTrace(); fail(); } assertNotNull(associated); assertEquals(true, associated.isAssociated()); assertEquals(clientName + keycloakService.separator() + domain, associated.getSubdomain()); } @Test public void isAssociatedNegativeWithOtherDomain() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); final String otherDomain = OTHER + domain; final String hostName = clientName + keycloakService.separator() + otherDomain; HttpClient httpClient = getHttpClientWithCustomDns(hostName); HttpGet httpRequest = new HttpGet("http://" + hostName +":8080/rest/alias/isassociated/" + userName); HttpResponse httpResponse; AliasServiceImpl.Associated associated = null; try { httpResponse = httpClient.execute(httpRequest); associated = parseResponse(httpResponse, AliasServiceImpl.Associated.class); } catch (IOException e) { e.printStackTrace(); fail(e.getMessage()); } assertNotNull(associated); assertFalse(associated.isAssociated()); } @Test public void isAssociatedNegativeUserInOtherRealm() { UUID newAppId = UUIDs.timeBased(); UUID userId = UUIDs.timeBased(); UUID newUserId = UUIDs.timeBased(); String newRealmName = NEW + realmName; String newClientName = NEW + clientName; String newUserName = NEW + userName; String newUserMail = NEW + userMail; init(domain, newRealmName, newClientName, newUserName, newUserMail, userPassword, newAppId, newUserId); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); final String hostName = clientName + keycloakService.separator() + domain; HttpClient httpClient = getHttpClientWithCustomDns(hostName); HttpGet httpRequest = new HttpGet("http://" + hostName +":8080/rest/alias/isassociated/" + newUserName); HttpResponse httpResponse; AliasServiceImpl.Associated associated = null; try { httpResponse = httpClient.execute(httpRequest); final Class<AliasServiceImpl.Associated> AssociatedType = AliasServiceImpl.Associated.class; associated = parseResponse(httpResponse, AssociatedType); } catch (IOException e) { e.printStackTrace(); fail(); } assertNotNull(associated); assertEquals(false, associated.isAssociated()); } @Test public void isAssociatedNegativeUserDoesntExist() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); HttpClient httpClient = getHttpClientWithCustomDns(domain); String notUser = "Not" + userName; HttpGet httpRequest = new HttpGet("http://" + domain +":8080/rest/alias/isassociated/" + notUser); HttpResponse httpResponse; AliasServiceImpl.Associated associated = null; try { httpResponse = httpClient.execute(httpRequest); final Class<AliasServiceImpl.Associated> AssociatedType = AliasServiceImpl.Associated.class; associated = parseResponse(httpResponse, AssociatedType); } catch (IOException e) { e.printStackTrace(); fail(); } assertNotNull(associated); assertEquals(false, associated.isAssociated()); } @Test public void registeredPositiveWithoutClientNameAtRequest() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); HttpClient httpClient = getHttpClientWithCustomDns(domain); HttpGet httpRequest = new HttpGet("http://" + domain +":8080/rest/alias/registered/" + userName); HttpResponse httpResponse; Boolean isRegistered = null; try { httpResponse = httpClient.execute(httpRequest); isRegistered = parseResponse(httpResponse, Boolean.class); } catch (IOException e) { e.printStackTrace(); fail(); } assertTrue(isRegistered); } @Test public void registeredPositiveWithClientNameAtRequest() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); final String hostName = clientName + keycloakService.separator() + domain; HttpClient httpClient = getHttpClientWithCustomDns(hostName); HttpGet httpRequest = new HttpGet("http://" + hostName +":8080/rest/alias/registered/" + userName); HttpResponse httpResponse; Boolean isRegistered = null; try { httpResponse = httpClient.execute(httpRequest); isRegistered = parseResponse(httpResponse, Boolean.class); } catch (IOException e) { e.printStackTrace(); fail(); } assertTrue(isRegistered); } @Test public void registeredNegativeUserDoesntExist() { UUID userId = UUIDs.timeBased(); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); HttpClient httpClient = getHttpClientWithCustomDns(domain); String notUser = "Not" + userName; HttpGet httpRequest = new HttpGet("http://" + domain +":8080/rest/alias/registered/" + notUser); HttpResponse httpResponse; Boolean isRegistered = null; try { httpResponse = httpClient.execute(httpRequest); isRegistered = parseResponse(httpResponse, Boolean.class); } catch (IOException e) { e.printStackTrace(); fail(); } assertNotNull(isRegistered); assertFalse(isRegistered); } @Test public void registeredNegativeUserRegisteredInOtherRealm() { UUID newAppId = UUIDs.timeBased(); UUID userId = UUIDs.timeBased(); UUID newUserId = UUIDs.timeBased(); String newRealmName = NEW + realmName; String newClientName = NEW + clientName; String newUserName = NEW + userName; String newUserMail = NEW + userMail; init(domain, newRealmName, newClientName, newUserName, newUserMail, userPassword, newAppId, newUserId); init(domain, realmName, clientName, userName, userMail, userPassword, APP_ID, userId); final String hostName = clientName + keycloakService.separator() + domain; HttpClient httpClient = getHttpClientWithCustomDns(hostName); HttpGet httpRequest = new HttpGet("http://" + hostName +":8080/rest/alias/registered/" + newUserName); HttpResponse httpResponse; Boolean isRegistered = null; try { httpResponse = httpClient.execute(httpRequest); isRegistered = parseResponse(httpResponse, Boolean.class); } catch (IOException e) { e.printStackTrace(); fail(); } assertNotNull(isRegistered); assertFalse(isRegistered); } private void init(String domain, String realmName, String clientName, String userName, String userMail, String userPassword, UUID appId, UUID userId) { System.setProperty(OAuth2Configuration.KEY_OAUTH2_ENFORE_DOMAIN, domain); //create realm keycloakService.createRealmIfAbsent(realmName); //create client PushApplication newApplication = new PushApplication(); newApplication.setName(clientName); newApplication.setPushApplicationID(appId.toString()); PushApplication oldApplication = pushApplicationService.findByName(clientName); if (oldApplication != null) { pushApplicationService.removePushApplication(oldApplication); } pushApplicationService.addPushApplication(newApplication, new LoggedInUser(userName)); keycloakService.createClientIfAbsent(newApplication); keycloakService.setDirectAccessGrantsEnabled(clientName, realmName, true); //create user at newly created realm keycloakService.delete(userName, clientName); keycloakService.createVerifiedUserIfAbsent(userName, userPassword, Collections.emptyList(), realmName); keycloakService.setPasswordUpdateRequired(userName, realmName, false); keycloakService.updateUserPassword(userName, userPassword, userPassword, clientName); //create user at DB String other = ""; Alias alias = new Alias(UUID.fromString(newApplication.getPushApplicationID()), userId, userMail, other); aliasDao.create(alias); } private <T> T parseResponse(HttpResponse httpResponse, Class<T> associatedType) throws IOException { byte[] bytes = new byte[httpResponse.getEntity().getContent().available()]; httpResponse.getEntity().getContent().read(bytes); String stringResponse = new String(bytes); Gson gson = new Gson(); return gson.fromJson(stringResponse, associatedType); } private HttpClient getHttpClientWithCustomDns(String hostName) { DnsResolver dnsResolver = new SystemDefaultDnsResolver() { @Override public InetAddress[] resolve(final String host) throws UnknownHostException { if (host.equalsIgnoreCase(hostName)) { /* If we match the host we're trying to talk to, return the IP address we want, not what is in DNS */ return new InetAddress[] { InetAddress.getByName(LOCALHOST) }; } else { /* Else, resolve it as we would normally */ return super.resolve(host); } } }; /* HttpClientConnectionManager allows us to use custom DnsResolver */ BasicHttpClientConnectionManager connManager = new BasicHttpClientConnectionManager( /* We're forced to create a SocketFactory Registry. Passing null doesn't force a default Registry, so we re-invent the wheel. */ RegistryBuilder.<ConnectionSocketFactory>create() .register("http", PlainConnectionSocketFactory.getSocketFactory()) .register("https", SSLConnectionSocketFactory.getSocketFactory()) .build(), null, /* Default ConnectionFactory */ null, /* Default SchemePortResolver */ dnsResolver /* Our DnsResolver */ ); /* build HttpClient that will use our DnsResolver */ return HttpClientBuilder.create() .setConnectionManager(connManager) .build(); } @Test public void registerAlias() { ResteasyClient client = new ResteasyClientBuilder().build(); ResteasyWebTarget target = client .target(getRestFullPath() + "/alias"); Alias original = new Alias(UUID.fromString(DEFAULT_APP_ID), UUIDs.timeBased(), "Supprot@AeroGear.org"); // Create Alias Response response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS) .post(Entity.entity(original, MediaType.APPLICATION_JSON_TYPE)); assertEquals(200, response.getStatus()); response.close(); // Query for previously created alias by alias lower(name) target = client.target(getRestFullPath() + "/alias/name/" + original.getEmail().toLowerCase()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).get(); Assert.assertTrue(response.getStatus() == 200); Alias alias = response.readEntity(Alias.class); Assert.assertTrue(alias != null & alias.getEmail().equals(original.getEmail())); response.close(); // Query for previously created alias by alias id target = client .target(getRestFullPath() + "/alias/" + original.getId()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).get(); Assert.assertTrue(response.getStatus() == 200); alias = response.readEntity(Alias.class); Assert.assertTrue(alias != null & alias.getEmail().equals(original.getEmail())); response.close(); } @Test public void registerAliases() { ResteasyWebTarget target = getAllAliasesTarget(getRestFullPath()); List<Alias> aliases = new ArrayList<>(); aliases.add(new Alias(UUID.fromString(DEFAULT_APP_ID), UUIDs.timeBased(), "Supprot@AeroGear.org")); aliases.add(new Alias(UUID.fromString(DEFAULT_APP_ID), UUIDs.timeBased(), "Test@AeroGear.org")); aliases.add(new Alias(UUID.fromString(DEFAULT_APP_ID), UUIDs.timeBased(), "Help@AeroGear.org")); // Create 3 Aliases Response response = target.request().post(Entity.entity(aliases, MediaType.APPLICATION_JSON_TYPE)); assertEquals(200, response.getStatus()); response.close(); // Re-Create 3 Aliases response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS) .post(Entity.entity(aliases, MediaType.APPLICATION_JSON_TYPE)); Assert.assertTrue(response.getStatus() == 200); response.close(); // Re-Create 2 first Aliases response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS) .post(Entity.entity(aliases.subList(0, 1), MediaType.APPLICATION_JSON_TYPE)); Assert.assertTrue(response.getStatus() == 200); response.close(); // Query for previously created aliases for (Alias alias : aliases) { target = getAliasByNameTarget(getRestFullPath(), alias.getEmail()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).get(); Assert.assertTrue(response.getStatus() == 200); Alias aliasObj = response.readEntity(Alias.class); Assert.assertTrue(aliasObj != null & aliasObj.getEmail().equals(alias.getEmail())); response.close(); } } @Test public void delete() { ResteasyClient client = new ResteasyClientBuilder().build(); ResteasyWebTarget target = client .target(getRestFullPath() + "/alias"); Alias original = new Alias(UUID.fromString(DEFAULT_APP_ID), UUIDs.timeBased(), "Supprot888@AeroGear.org"); // Create Alias Response response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS) .post(Entity.entity(original, MediaType.APPLICATION_JSON_TYPE)); assertEquals(200, response.getStatus()); response.close(); // Query for previously created alias by alias lower(name) target = client.target(getRestFullPath() + "/alias/name/" + original.getEmail().toLowerCase()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).get(); Assert.assertTrue(response.getStatus() == 200); Alias alias = response.readEntity(Alias.class); Assert.assertTrue(alias != null & alias.getEmail().equals(original.getEmail())); Assert.assertTrue(alias != null & alias.getId().equals(original.getId())); response.close(); // Delete alias target = client .target(getRestFullPath() + "/alias/" + alias.getId()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).delete(); Assert.assertTrue(response.getStatus() == 200); response.close(); // Query for previously deleted alias by alias id target = client.target(getRestFullPath() + "/alias/" + original.getId()); response = HttpBasicHelper.basic(target.request(), DEFAULT_APP_ID, DEFAULT_APP_PASS).get(); Assert.assertTrue(response.getStatus() == 200); Assert.assertTrue(!response.hasEntity()); response.close(); } }
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.jdbc; import java.sql.SQLException; import javax.sql.DataSource; import javax.sql.XADataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.tomcat.jdbc.pool.DataSourceProxy; import org.springframework.beans.factory.BeanFactoryUtils; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionOutcome; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.SpringBootCondition; import org.springframework.boot.autoconfigure.jdbc.DataSourceInitializerPostProcessor.Registrar; import org.springframework.boot.autoconfigure.jdbc.metadata.DataSourcePoolMetadataProvidersConfiguration; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Condition; import org.springframework.context.annotation.ConditionContext; import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.core.type.AnnotatedTypeMetadata; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; /** * {@link EnableAutoConfiguration Auto-configuration} for {@link DataSource}. * * @author Dave Syer * @author Phillip Webb */ @Configuration @ConditionalOnClass({ DataSource.class, EmbeddedDatabaseType.class }) @EnableConfigurationProperties(DataSourceProperties.class) @Import({ Registrar.class, DataSourcePoolMetadataProvidersConfiguration.class }) public class DataSourceAutoConfiguration { private static Log logger = LogFactory.getLog(DataSourceAutoConfiguration.class); /** * Determines if the {@code dataSource} being used by Spring was created from * {@link EmbeddedDataSourceConfiguration}. * @return true if the data source was auto-configured. */ public static boolean containsAutoConfiguredDataSource( ConfigurableListableBeanFactory beanFactory) { try { BeanDefinition beanDefinition = beanFactory.getBeanDefinition("dataSource"); return EmbeddedDataSourceConfiguration.class.getName().equals( beanDefinition.getFactoryBeanName()); } catch (NoSuchBeanDefinitionException ex) { return false; } } @Conditional(DataSourceAutoConfiguration.EmbeddedDataSourceCondition.class) @ConditionalOnMissingBean({ DataSource.class, XADataSource.class }) @Import(EmbeddedDataSourceConfiguration.class) protected static class EmbeddedConfiguration { } @Configuration @ConditionalOnMissingBean(DataSourceInitializer.class) protected static class DataSourceInitializerConfiguration { @Bean public DataSourceInitializer dataSourceInitializer() { return new DataSourceInitializer(); } } @Conditional(DataSourceAutoConfiguration.NonEmbeddedDataSourceCondition.class) @ConditionalOnMissingBean({ DataSource.class, XADataSource.class }) protected static class NonEmbeddedConfiguration { @Autowired private DataSourceProperties properties; @Bean @ConfigurationProperties(prefix = DataSourceProperties.PREFIX) public DataSource dataSource() { DataSourceBuilder factory = DataSourceBuilder .create(this.properties.getClassLoader()) .driverClassName(this.properties.getDriverClassName()) .url(this.properties.getUrl()) .username(this.properties.getUsername()) .password(this.properties.getPassword()); return factory.build(); } } @Configuration @Conditional(DataSourceAutoConfiguration.DataSourceAvailableCondition.class) protected static class JdbcTemplateConfiguration { @Autowired(required = false) private DataSource dataSource; @Bean @ConditionalOnMissingBean(JdbcOperations.class) public JdbcTemplate jdbcTemplate() { return new JdbcTemplate(this.dataSource); } @Bean @ConditionalOnMissingBean(NamedParameterJdbcOperations.class) public NamedParameterJdbcTemplate namedParameterJdbcTemplate() { return new NamedParameterJdbcTemplate(this.dataSource); } } @Configuration @ConditionalOnProperty(prefix = "spring.datasource", name = "jmx-enabled") @ConditionalOnClass(name = "org.apache.tomcat.jdbc.pool.DataSourceProxy") @Conditional(DataSourceAutoConfiguration.DataSourceAvailableCondition.class) @ConditionalOnMissingBean(name = "dataSourceMBean") protected static class TomcatDataSourceJmxConfiguration { @Bean public Object dataSourceMBean(DataSource dataSource) { if (dataSource instanceof DataSourceProxy) { try { return ((DataSourceProxy) dataSource).createPool().getJmxPool(); } catch (SQLException ex) { logger.warn("Cannot expose DataSource to JMX (could not connect)"); } } return null; } } /** * {@link Condition} to test is a supported non-embedded {@link DataSource} type is * available. */ static class NonEmbeddedDataSourceCondition extends SpringBootCondition { @Override public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { if (getDataSourceClassLoader(context) != null) { return ConditionOutcome.match("supported DataSource class found"); } return ConditionOutcome.noMatch("missing supported DataSource"); } /** * Returns the class loader for the {@link DataSource} class. Used to ensure that * the driver class can actually be loaded by the data source. */ private ClassLoader getDataSourceClassLoader(ConditionContext context) { Class<?> dataSourceClass = new DataSourceBuilder(context.getClassLoader()) .findType(); return (dataSourceClass == null ? null : dataSourceClass.getClassLoader()); } } /** * {@link Condition} to detect when an embedded {@link DataSource} type can be used. */ static class EmbeddedDataSourceCondition extends SpringBootCondition { private final SpringBootCondition nonEmbedded = new NonEmbeddedDataSourceCondition(); @Override public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { if (anyMatches(context, metadata, this.nonEmbedded)) { return ConditionOutcome .noMatch("existing non-embedded database detected"); } EmbeddedDatabaseType type = EmbeddedDatabaseConnection.get( context.getClassLoader()).getType(); if (type == null) { return ConditionOutcome.noMatch("no embedded database detected"); } return ConditionOutcome.match("embedded database " + type + " detected"); } } /** * {@link Condition} to detect when a {@link DataSource} is available (either because * the user provided one or because one will be auto-configured) */ @Order(Ordered.LOWEST_PRECEDENCE - 10) static class DataSourceAvailableCondition extends SpringBootCondition { private final SpringBootCondition nonEmbedded = new NonEmbeddedDataSourceCondition(); private final SpringBootCondition embeddedCondition = new EmbeddedDataSourceCondition(); @Override public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) { if (hasBean(context, DataSource.class) || hasBean(context, XADataSource.class)) { return ConditionOutcome .match("existing bean configured database detected"); } if (anyMatches(context, metadata, this.nonEmbedded, this.embeddedCondition)) { return ConditionOutcome.match("existing auto database detected"); } return ConditionOutcome.noMatch("no existing bean configured database"); } private boolean hasBean(ConditionContext context, Class<?> type) { return BeanFactoryUtils.beanNamesForTypeIncludingAncestors( context.getBeanFactory(), type, true, false).length > 0; } } }
package me.reciprocal.api.auth; import java.io.UnsupportedEncodingException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Base64; import java.util.UUID; import me.reciprocal.api.app.exceptions.InternalServerErrorException; import me.reciprocal.api.auth.exceptions.InvalidAccessTokenException; import me.reciprocal.api.auth.exceptions.LoginException; import me.reciprocal.api.people.Person; import me.reciprocal.api.people.PersonRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping(value="/login") public class AuthService { @Autowired private AuthRepository authRepository; @Autowired private PersonRepository personRepository; @Value("@{reciprocal.auth.salt}") private String applicationSalt; private final static int HASH_ITERATION_COUNT = 1000; @RequestMapping(value="/", method=RequestMethod.POST) public AuthResponse login(@RequestParam(value="username", required=true) final String username, @RequestParam(value="password", required=true) final String password) throws LoginException, InternalServerErrorException { // ALWAYS call checkPassword() in order to be resistant towards time signature attacks and // prevent information leaking about whether there is an account with the username `username`. final Person person = this.personRepository.getByUsername(username); // TODO re-enable password checking in production /* if (!this.checkPassword(password, person)) { throw new LoginException(username); } */ String accessToken = AuthService.generateAccessToken(); while (this.authRepository.accessTokenExists(accessToken)) { accessToken = AuthService.generateAccessToken(); } this.authRepository.storeAccessToken(accessToken, person.getId()); // TODO: maybe delete this extra check? if (!this.authRepository.accessTokenExists(accessToken)) { throw new InternalServerErrorException( String.format("Failed to persist access token '%s' to store.", accessToken)); } return new AuthResponse(accessToken, person); } /** * Authorise an API request. * <p> * This method is used internally within the API for request authorisation. * * @param rawTokenValue the raw value of the HTTP Authorization header * @return the ID of the person associated with the specified access token * @throws InvalidAccessTokenException if the access token is not of the correct form */ public long authorise(final String rawTokenValue) throws InvalidAccessTokenException { String accessToken = null; try { accessToken = AuthService.extractAccessToken(rawTokenValue); } catch (IllegalArgumentException exception) { throw new InvalidAccessTokenException(accessToken); } if ((accessToken == null) || (accessToken.length() == 0)) { throw new InvalidAccessTokenException(accessToken); } final Long personId = this.authRepository.getPersonId(accessToken); if (personId == null) { throw new InvalidAccessTokenException(accessToken); } return personId; } /** * Extract access token from the raw Authorization HTTP header. * * @param rawTokenValue raw value of Authorization HTTP header * @return API access token of request * @throws IllegalArgumentException if {@code rawTokenValue} is too short or does not start with * 'Bearer ' */ private static String extractAccessToken(final String rawTokenValue) throws IllegalArgumentException { if ((rawTokenValue.length() < 7) || (!rawTokenValue.substring(0, 7).equals("Bearer "))) { throw new IllegalArgumentException("Access token does not begin with 'Bearer '."); } return rawTokenValue.substring(7); } /** * Generate an access token. * * @return an access token */ private static String generateAccessToken() { return UUID.randomUUID().toString().replace("-", ""); } /** * Checks if {@code password} is the correct password for the specified {@code person}. * <p> * Implementation is based on * <a href="https://www.owasp.org/index.php/Hashing_Java">OWASP guidelines</a>. * * @param password password to check * @param person Person entity claimed to have {@code password} as their password * @return true if the password is correct, false otherwise * @throws InternalServerErrorException if base64 decoding fails, the hashing algorithm used * cannot be found, or the byte[] encoding is not supported */ private boolean checkPassword(final String password, final Person person) throws InternalServerErrorException { boolean validParameters = false; String digest = "000000000000000000000000000="; String salt = "00000000000="; if (person != null) { validParameters = true; digest = person.getPassword(); salt = person.getSalt(); } try { Base64.Decoder decoder = Base64.getDecoder(); final byte[] byteDigest = decoder.decode(digest); final byte[] byteSalt = decoder.decode(salt); final byte[] generatedDigest = this.hash(AuthService.HASH_ITERATION_COUNT, password, byteSalt, this.applicationSalt.getBytes("UTF-8")); return Arrays.equals(generatedDigest, byteDigest) && validParameters; } catch (NoSuchAlgorithmException | UnsupportedEncodingException exception) { throw new InternalServerErrorException("API server failed to handle login inputs correctly."); } } /** * Hashes {@code plaintext}. * * @param iterationCount number of iterations of the hashing algorithm * @param plaintext plaintext input to be hashed * @param salt the salt to use in hashing the plaintext * @param applicationSalt the application salt * @return a byte[] of the hash of {@code plaintext} * @throws NoSuchAlgorithmException if the hashing algorithm used is not supported * @throws UnsupportedEncodingException if the string encoding used is not supported */ private byte[] hash(final int iterationCount, final String plaintext, final byte[] salt, final byte[] applicationSalt) throws NoSuchAlgorithmException, UnsupportedEncodingException { MessageDigest messageDigest = MessageDigest.getInstance("SHA-512"); messageDigest.reset(); messageDigest.update(generateExtendedSalt(salt, applicationSalt)); byte[] input = messageDigest.digest(plaintext.getBytes("UTF-8")); for (int i = 0; i < iterationCount; i++) { messageDigest.reset(); input = messageDigest.digest(input); } return input; } /** * Generates an extended salt by augmenting the provided salt with the application salt. * * @param salt the salt to extend * @param applicationSalt the application salt * @return the salt extended with the application salt * @throws UnsupportedEncodingException if the string encoding used is not supported */ private byte[] generateExtendedSalt(final byte[] salt, final byte[] applicationSalt) throws UnsupportedEncodingException { byte[] extendedSalt = Arrays.copyOf(applicationSalt, applicationSalt.length + salt.length); for (int i = applicationSalt.length; i < applicationSalt.length + salt.length; i++) { extendedSalt[i] = salt[i]; } return extendedSalt; } }
package com.example.wheather.view; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Color; import android.os.Bundle; import android.support.design.widget.CoordinatorLayout; import android.support.design.widget.NavigationView; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.example.wheather.R; import com.example.wheather.data.AppSettings; import com.example.wheather.data.mapper.DataConst; import com.example.wheather.data.DataManager; import com.example.wheather.data.gps.GPSTracker; import com.example.wheather.data.utils.LogUtils; import com.example.wheather.data.utils.Message; import com.example.wheather.view.fragments.HistoryFragment; import com.example.wheather.view.fragments.WeatherWeekFragment; import com.example.wheather.view.widget.TabletDrawerLayout; import com.google.gson.Gson; import rx.subscriptions.CompositeSubscription; public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener { private TabletDrawerLayout drawerLayout; private NavigationView navigationView; boolean isDrawerLocked = false; ProgressDialog progressDialog; CompositeSubscription subscription = new CompositeSubscription(); GPSTracker gpsTracker; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); initGPS(); initUI(); initModel(); } private void initGPS() { gpsTracker = new GPSTracker(MainActivity.this); if(gpsTracker.canGetLocation()){ double latitude = gpsTracker.getLatitude(); double longitude = gpsTracker.getLongitude(); AppSettings.setCoordLon(longitude); AppSettings.setCoordLat(latitude); LogUtils.E("Coords lat:"+latitude+"; lon"+longitude); gpsTracker.stopUsingGPS(); }else{ gpsTracker.showSettingsAlert(); } } @Override public void onDestroy(){ if(subscription!=null && !subscription.isUnsubscribed()) subscription.unsubscribe(); if(gpsTracker!=null) gpsTracker.stopUsingGPS(); super.onDestroy(); } private void initModel() { loadInitialFragment(); } private void loadInitialFragment() { FragmentManager fragmentManager = getSupportFragmentManager(); Fragment fragment; fragment = new WeatherWeekFragment(); navigationView.getMenu().getItem(1).setChecked(true); String tag = AppConst.FRAGMENT_WEEK; fragmentManager.beginTransaction() .add(R.id.detail_container, fragment, tag) .commit(); } private void initUI(){ Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); initDrawer(toolbar); findViewById(R.id.btNavExitApp).setOnClickListener(v -> finish()); } private String getFragmentFromPrefs(){ return getSharedPreferences(AppConst.THEME_PREFERENCE, MODE_PRIVATE) .getString(AppConst.EXTRA_FRAGMENT_KEY, AppConst.FRAGMENT_WEEK); } private void saveFragment(String fragmentValue){ getSharedPreferences(AppConst.THEME_PREFERENCE, MODE_PRIVATE).edit() .putString(AppConst.EXTRA_FRAGMENT_KEY, fragmentValue).apply(); } private void initDrawer(Toolbar toolbar) { drawerLayout = (TabletDrawerLayout) findViewById(R.id.drawer_layout); navigationView = (NavigationView) findViewById(R.id.nav_view); isDrawerLocked = checkIsTablet(); if(isDrawerLocked){ drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_OPEN); drawerLayout.setScrimColor(Color.TRANSPARENT); drawerLayout.setTablet(true); float navWidth = (float) getResources().getDimensionPixelSize(R.dimen.nav_width); offsetTabletViews(navWidth); drawerLayout.setDrawerListener(null); }else { ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(this, drawerLayout, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawerLayout.setDrawerListener(toggle); toggle.syncState(); } navigationView.setNavigationItemSelectedListener(this); } private void offsetTabletViews(float navWidth) { View mainContent = findViewById(R.id.main_content); mainContent.setX(navWidth); int deviceWidth = getResources().getDisplayMetrics().widthPixels; ViewGroup.LayoutParams layoutParams = mainContent.getLayoutParams(); layoutParams.width = deviceWidth - (int) navWidth; mainContent.setLayoutParams(layoutParams); View container = findViewById(R.id.detail_container); CoordinatorLayout.LayoutParams coorparams = (CoordinatorLayout.LayoutParams)container.getLayoutParams(); coorparams.width = deviceWidth - (int) navWidth; container.setLayoutParams(coorparams); } private boolean checkIsTablet(){ try { return getResources().getBoolean(R.bool.isTablet); } catch (Resources.NotFoundException ignore) { return false; } } @Override public void onResume(){ super.onResume(); if(isDrawerLocked) { drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_OPEN); }else { drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED); } } @Override public void onBackPressed() { LogUtils.E("onBackPressed"); if(isDrawerLocked){ if(AppSettings.isShowOnExit()){ Message.AlertClose(this).show(); }else { super.onBackPressed(); } } else { DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else { if(AppSettings.isShowOnExit()){ Message.AlertClose(this).show(); }else { super.onBackPressed(); } } } } @SuppressWarnings("StatementWithEmptyBody") @Override public boolean onNavigationItemSelected(MenuItem item) { int id = item.getItemId(); FragmentManager fragmentManager = getSupportFragmentManager(); Fragment fragment = null; String tag = null; if (id == R.id.nav_day) { if(DataManager.getInstance().getFirstDay()!=null) { Intent intent = new Intent(MainActivity.this, DetailActivity.class); String data = new Gson().toJson(DataManager.getInstance().getFirstDay()); intent.putExtra(AppConst.EXTRA_DAY_KEY, data); getSharedPreferences(AppConst.THEME_PREFERENCE, Context.MODE_PRIVATE).edit().putString(AppConst.THEME_KEY, DataConst.Theme.getThemeBy(DataManager.getInstance().getFirstDay().getConditionId())).apply(); startActivity(intent); } return true; } else if (id == R.id.nav_week) { item.setChecked(true); saveFragment(AppConst.FRAGMENT_WEEK); fragment = new WeatherWeekFragment(); tag = AppConst.FRAGMENT_WEEK; } else if (id == R.id.nav_history) { item.setChecked(true); saveFragment(AppConst.FRAGMENT_HISTORY); fragment = new HistoryFragment(); tag = AppConst.FRAGMENT_HISTORY; } else if (id == R.id.nav_settings) { startActivity(new Intent(MainActivity.this, SettingsActivity.class)); return true; } fragmentManager.beginTransaction() .add(R.id.detail_container, fragment, tag) .commit(); if(!isDrawerLocked) { DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); } return true; } }
package org.ovirt.engine.core.dao; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.ovirt.engine.core.common.businessentities.ArchitectureType; import org.ovirt.engine.core.common.businessentities.QuotaEnforcementTypeEnum; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.utils.Pair; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.Version; import org.ovirt.engine.core.dal.dbbroker.DbFacadeUtils; import org.ovirt.engine.core.utils.collections.MultiValueMapUtils; import org.springframework.jdbc.core.RowMapper; /** * <code>VmDAODbFacadeImpl</code> provides a concrete implementation of {@link VmDAO}. The functionality is code * refactored out of {@link Dorg.ovirt.engine.core.dal.dbbroker.DbFacad}. */ public class VmDAODbFacadeImpl extends BaseDAODbFacade implements VmDAO { @Override public VM get(Guid id) { return get(id, null, false); } @Override public VM get(Guid id, Guid userID, boolean isFiltered) { return getCallsHandler().executeRead("GetVmByVmGuid", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vm_guid", id).addValue("user_id", userID).addValue("is_filtered", isFiltered)); } @Override public VM getByNameForDataCenter(Guid dataCenterId, String name, Guid userID, boolean isFiltered) { return getCallsHandler().executeRead("GetVmByVmNameForDataCenter", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("data_center_id", dataCenterId).addValue("vm_name", name).addValue("user_id", userID).addValue("is_filtered", isFiltered)); } @Override public VM getForHibernationImage(Guid id) { return getCallsHandler().executeRead("GetVmByHibernationImageId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("image_id", id)); } @Override public Map<Boolean, List<VM>> getForDisk(Guid id, boolean includeVmsSnapshotAttachedTo) { Map<Boolean, List<VM>> result = new HashMap<Boolean, List<VM>>(); List<Pair<VM, VmDevice>> vms = getVmsWithPlugInfo(id); for (Pair<VM, VmDevice> pair : vms) { VmDevice device = pair.getSecond(); if (includeVmsSnapshotAttachedTo || device.getSnapshotId() == null) { MultiValueMapUtils.addToMap(device.getIsPlugged(), pair.getFirst(), result); } } return result; } @Override public List<VM> getAllVMsWithDisksOnOtherStorageDomain(Guid storageDomainGuid) { return getCallsHandler().executeReadList("GetAllVMsWithDisksOnOtherStorageDomain", VMRowMapper.instance, getCustomMapSqlParameterSource().addValue("storage_domain_id", storageDomainGuid)); } @Override public List<VM> getVmsListForDisk(Guid id, boolean includeVmsSnapshotAttachedTo) { List<VM> result = new ArrayList<>(); List<Pair<VM, VmDevice>> vms = getVmsWithPlugInfo(id); for (Pair<VM, VmDevice> pair : vms) { if (includeVmsSnapshotAttachedTo || pair.getSecond().getSnapshotId() == null) { result.add(pair.getFirst()); } } return result; } @Override public List<VM> getVmsListByInstanceType(Guid id) { return getCallsHandler().executeReadList("GetVmsByInstanceTypeId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("instance_type_id", id)); } public List<Pair<VM, VmDevice>> getVmsWithPlugInfo(Guid id) { return getCallsHandler().executeReadList ("GetVmsByDiskId", VMWithPlugInfoRowMapper.instance, getCustomMapSqlParameterSource().addValue("disk_guid", id)); } @Override public List<VM> getAllForUser(Guid id) { return getCallsHandler().executeReadList("GetVmsByUserId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("user_id", id)); } @Override public List<VM> getAllForUserWithGroupsAndUserRoles(Guid id) { return getCallsHandler().executeReadList("GetVmsByUserIdWithGroupsAndUserRoles", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("user_id", id)); } @Override public List<VM> getAllForAdGroupByName(String name) { return getCallsHandler().executeReadList("GetVmsByAdGroupNames", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("ad_group_names", name)); } @Override public List<VM> getAllWithTemplate(Guid id) { return getCallsHandler().executeReadList("GetVmsByVmtGuid", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vmt_guid", id)); } @Override public List<VM> getAllRunningForVds(Guid id) { return getCallsHandler().executeReadList("GetVmsRunningOnVds", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vds_id", id)); } @Override public List<VM> getAllRunningOnOrMigratingToVds(Guid id) { return getCallsHandler().executeReadList("GetVmsRunningOnOrMigratingToVds", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vds_id", id)); } @Override public Map<Guid, VM> getAllRunningByVds(Guid id) { HashMap<Guid, VM> map = new HashMap<Guid, VM>(); for (VM vm : getAllRunningForVds(id)) { map.put(vm.getId(), vm); } return map; } @Override public List<VM> getAllUsingQuery(String query) { return jdbcTemplate.query(query, VMRowMapper.instance); } @Override public List<VM> getAllForStorageDomain(Guid id) { return getCallsHandler().executeReadList("GetVmsByStorageDomainId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("storage_domain_id", id)); } @Override public List<VM> getAllVmsRelatedToQuotaId(Guid quotaId) { return getCallsHandler().executeReadList("getAllVmsRelatedToQuotaId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("quota_id", quotaId)); } @Override public List<VM> getVmsByIds(List<Guid> vmsIds) { return getCallsHandler().executeReadList("GetVmsByIds", VMRowMapper.instance, getCustomMapSqlParameterSource().addValue("vms_ids", StringUtils.join(vmsIds, ','))); } @Override public List<VM> getAllActiveForStorageDomain(Guid id) { return getCallsHandler().executeReadList("GetActiveVmsByStorageDomainId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("storage_domain_id", id)); } @Override public List<VM> getAll() { return getAll(null, false); } @Override public List<VM> getAll(Guid userID, boolean isFiltered) { return getCallsHandler().executeReadList("GetAllFromVms", VMRowMapper.instance, getCustomMapSqlParameterSource().addValue("user_id", userID).addValue("is_filtered", isFiltered)); } @Override public void saveIsInitialized(Guid vmid, boolean isInitialized) { getCallsHandler().executeModification("UpdateIsInitialized", getCustomMapSqlParameterSource() .addValue("vm_guid", vmid) .addValue("is_initialized", isInitialized)); } @Override public void remove(Guid id) { getCallsHandler().executeModification("DeleteVm", getCustomMapSqlParameterSource() .addValue("vm_guid", id)); } @Override public List<VM> getAllForNetwork(Guid id) { return getCallsHandler().executeReadList("GetVmsByNetworkId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("network_id", id)); } @Override public List<VM> getAllForVnicProfile(Guid vNicProfileId) { return getCallsHandler().executeReadList("GetVmsByVnicProfileId", VMRowMapper.instance, getCustomMapSqlParameterSource().addValue("vnic_profile_id", vNicProfileId)); } @Override public List<VM> getAllForVdsGroup(Guid vds_group_id) { return getCallsHandler().executeReadList("GetVmsByVdsGroupId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vds_group_id", vds_group_id)); } @Override public List<VM> getAllForVmPool(Guid vmPoolId) { return getCallsHandler().executeReadList("GetVmsByVmPoolId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vm_pool_id", vmPoolId)); } @Override public List<VM> getAllFailedAutoStartVms() { return getCallsHandler().executeReadList("GetFailedAutoStartVms", VMRowMapper.instance, getCustomMapSqlParameterSource()); } @Override public List<VM> getAllMigratingToHost(Guid vdsId) { return getCallsHandler().executeReadList("GetVmsMigratingToVds", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("vds_id", vdsId)); } @Override public void updateOriginalTemplateName(Guid originalTemplateId, String originalTemplateName) { getCallsHandler().executeModification("UpdateOriginalTemplateName", getCustomMapSqlParameterSource() .addValue("original_template_id", originalTemplateId) .addValue("original_template_name", originalTemplateName) ); } @Override public List<VM> getAllRunningByCluster(Guid clusterId) { return getCallsHandler().executeReadList("GetRunningVmsByClusterId", VMRowMapper.instance, getCustomMapSqlParameterSource() .addValue("cluster_id", clusterId)); } @Override public List<Guid> getVmIdsForVersionUpdate(Guid baseTemplateId) { return getCallsHandler().executeReadList("getVmIdsForVersionUpdate", createGuidMapper(), getCustomMapSqlParameterSource() .addValue("base_template_id", baseTemplateId)); } static final class VMRowMapper implements RowMapper<VM> { public static final VMRowMapper instance = new VMRowMapper(); @Override public VM mapRow(ResultSet rs, int rowNum) throws SQLException { VM entity = new VM(); entity.setStaticData(VmStaticDAODbFacadeImpl.getRowMapper().mapRow(rs, rowNum)); entity.setDynamicData(VmDynamicDAODbFacadeImpl.getRowMapper().mapRow(rs, rowNum)); entity.setStatisticsData(VmStatisticsDaoDbFacadeImpl.getRowMapper().mapRow(rs, rowNum)); entity.setQuotaName(rs.getString("quota_name")); entity.setQuotaEnforcementType(QuotaEnforcementTypeEnum.forValue(rs.getInt("quota_enforcement_type"))); entity.setVdsGroupName(rs.getString("vds_group_name")); entity.setVdsGroupDescription(rs.getString("vds_group_description")); entity.setVmtName(rs.getString("vmt_name")); entity.setVmtMemSizeMb(rs.getInt("vmt_mem_size_mb")); entity.setVmtOsId(rs.getInt("vmt_os")); entity.setVmtCreationDate(DbFacadeUtils.fromDate(rs.getTimestamp("vmt_creation_date"))); entity.setVmtChildCount(rs.getInt("vmt_child_count")); entity.setVmtNumOfCpus(rs.getInt("vmt_num_of_cpus")); entity.setVmtNumOfSockets(rs.getInt("vmt_num_of_sockets")); entity.setVmtCpuPerSocket(rs.getInt("vmt_cpu_per_socket")); entity.setVmtDescription(rs.getString("vmt_description")); entity.setVmPoolName(rs.getString("vm_pool_name")); entity.setVmPoolId(getGuid(rs, "vm_pool_id")); entity.setRunOnVdsName(rs.getString("run_on_vds_name")); entity.setVdsGroupCpuName(rs.getString("vds_group_cpu_name")); entity.setStoragePoolId(getGuidDefaultEmpty(rs, "storage_pool_id")); entity.setStoragePoolName(rs.getString("storage_pool_name")); entity.setTransparentHugePages(rs.getBoolean("transparent_hugepages")); entity.setVdsGroupCompatibilityVersion(new Version(rs.getString("vds_group_compatibility_version"))); entity.setTrustedService(rs.getBoolean("trusted_service")); entity.setClusterArch(ArchitectureType.forValue(rs.getInt("architecture"))); entity.setVmPoolSpiceProxy(rs.getString("vm_pool_spice_proxy")); entity.setVdsGroupSpiceProxy(rs.getString("vds_group_spice_proxy")); entity.setNextRunConfigurationExists(rs.getBoolean("next_run_config_exists")); return entity; } } private static final class VMWithPlugInfoRowMapper implements RowMapper<Pair<VM, VmDevice>> { public static final VMWithPlugInfoRowMapper instance = new VMWithPlugInfoRowMapper(); @Override public Pair<VM, VmDevice> mapRow(ResultSet rs, int rowNum) throws SQLException { @SuppressWarnings("synthetic-access") Pair<VM, VmDevice> entity = new Pair<>(); entity.setFirst(VMRowMapper.instance.mapRow(rs, rowNum)); entity.setSecond(VmDeviceDAODbFacadeImpl.VmDeviceRowMapper.instance.mapRow(rs, rowNum)); return entity; } } }
/* * Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 6595866 * @summary Test java.io.File operations with sym links * @build SymLinks Util * @run main SymLinks */ import java.io.*; import java.nio.file.*; import java.nio.file.attribute.*; import static java.nio.file.LinkOption.*; public class SymLinks { final static PrintStream out = System.out; final static File top = new File(System.getProperty("test.dir", ".")); // files used by the test final static File file = new File(top, "foofile"); final static File link2file = new File(top, "link2file"); final static File link2link2file = new File(top, "link2link2file"); final static File dir = new File(top, "foodir"); final static File link2dir = new File(top, "link2dir"); final static File link2link2dir = new File(top, "link2link2dir"); final static File link2nobody = new File(top, "link2nobody"); final static File link2link2nobody = new File(top, "link2link2nobody"); /** * Setup files, directories, and sym links used by test. */ static void setup() throws IOException { // link2link2file -> link2file -> foofile FileOutputStream fos = new FileOutputStream(file); try { fos.write(new byte[16*1024]); } finally { fos.close(); } mklink(link2file, file); mklink(link2link2file, link2file); // link2link2dir -> link2dir -> dir assertTrue(dir.mkdir()); mklink(link2dir, dir); mklink(link2link2dir, link2dir); // link2link2nobody -> link2nobody -> <does-not-exist> mklink(link2nobody, new File(top, "DoesNotExist")); mklink(link2link2nobody, link2nobody); } /** * Remove files, directories, and sym links used by test. */ static void cleanup() throws IOException { if (file != null) file.delete(); if (link2file != null) Files.deleteIfExists(link2file.toPath()); if (link2link2file != null) Files.deleteIfExists(link2link2file.toPath()); if (dir != null) dir.delete(); if (link2dir != null) Files.deleteIfExists(link2dir.toPath()); if (link2link2dir != null) Files.deleteIfExists(link2link2dir.toPath()); if (link2nobody != null) Files.deleteIfExists(link2nobody.toPath()); if (link2link2nobody != null) Files.deleteIfExists(link2link2nobody.toPath()); } /** * Creates a sym link source->target */ static void mklink(File source, File target) throws IOException { Files.createSymbolicLink(source.toPath(), target.toPath()); } /** * Returns true if the "link" exists and is a sym link. */ static boolean isSymLink(File link) { return Files.isSymbolicLink(link.toPath()); } /** * Returns the last modified time of a sym link. */ static long lastModifiedOfSymLink(File link) throws IOException { BasicFileAttributes attrs = Files.readAttributes(link.toPath(), BasicFileAttributes.class, NOFOLLOW_LINKS); assertTrue(attrs.isSymbolicLink()); return attrs.lastModifiedTime().toMillis(); } /** * Returns true if sym links are supported on the file system where * "dir" exists. */ static boolean supportsSymLinks(File dir) { Path link = dir.toPath().resolve("link"); Path target = dir.toPath().resolve("target"); try { Files.createSymbolicLink(link, target); Files.delete(link); return true; } catch (UnsupportedOperationException x) { return false; } catch (IOException x) { return false; } } static void assertTrue(boolean v) { if (!v) throw new RuntimeException("Test failed"); } static void assertFalse(boolean v) { assertTrue(!v); } static void header(String h) { out.println(); out.println(); out.println("-- " + h + " --"); } /** * Tests go here. */ static void go() throws IOException { // check setup assertTrue(file.isFile()); assertTrue(isSymLink(link2file)); assertTrue(isSymLink(link2link2file)); assertTrue(dir.isDirectory()); assertTrue(isSymLink(link2dir)); assertTrue(isSymLink(link2link2dir)); assertTrue(isSymLink(link2nobody)); assertTrue(isSymLink(link2link2nobody)); header("createNewFile"); assertFalse(file.createNewFile()); assertFalse(link2file.createNewFile()); assertFalse(link2link2file.createNewFile()); assertFalse(dir.createNewFile()); assertFalse(link2dir.createNewFile()); assertFalse(link2link2dir.createNewFile()); assertFalse(link2nobody.createNewFile()); assertFalse(link2link2nobody.createNewFile()); header("mkdir"); assertFalse(file.mkdir()); assertFalse(link2file.mkdir()); assertFalse(link2link2file.mkdir()); assertFalse(dir.mkdir()); assertFalse(link2dir.mkdir()); assertFalse(link2link2dir.mkdir()); assertFalse(link2nobody.mkdir()); assertFalse(link2link2nobody.mkdir()); header("delete"); File link = new File(top, "mylink"); try { mklink(link, file); assertTrue(link.delete()); assertTrue(!isSymLink(link)); assertTrue(file.exists()); mklink(link, link2file); assertTrue(link.delete()); assertTrue(!isSymLink(link)); assertTrue(link2file.exists()); mklink(link, dir); assertTrue(link.delete()); assertTrue(!isSymLink(link)); assertTrue(dir.exists()); mklink(link, link2dir); assertTrue(link.delete()); assertTrue(!isSymLink(link)); assertTrue(link2dir.exists()); mklink(link, link2nobody); assertTrue(link.delete()); assertTrue(!isSymLink(link)); assertTrue(isSymLink(link2nobody)); } finally { Files.deleteIfExists(link.toPath()); } header("renameTo"); File newlink = new File(top, "newlink"); assertTrue(link2file.renameTo(newlink)); try { assertTrue(file.exists()); assertTrue(isSymLink(newlink)); assertTrue(!isSymLink(link2file)); } finally { newlink.renameTo(link2file); // restore link } assertTrue(link2dir.renameTo(newlink)); try { assertTrue(dir.exists()); assertTrue(isSymLink(newlink)); assertTrue(!isSymLink(link2dir)); } finally { newlink.renameTo(link2dir); // restore link } header("list"); final String name = "entry"; File entry = new File(dir, name); try { assertTrue(dir.list().length == 0); // directory should be empty assertTrue(link2dir.list().length == 0); assertTrue(link2link2dir.list().length == 0); assertTrue(entry.createNewFile()); assertTrue(dir.list().length == 1); assertTrue(dir.list()[0].equals(name)); // access directory by following links assertTrue(link2dir.list().length == 1); assertTrue(link2dir.list()[0].equals(name)); assertTrue(link2link2dir.list().length == 1); assertTrue(link2link2dir.list()[0].equals(name)); // files that are not directories assertTrue(link2file.list() == null); assertTrue(link2nobody.list() == null); } finally { entry.delete(); } header("isXXX"); assertTrue(file.isFile()); assertTrue(link2file.isFile()); assertTrue(link2link2file.isFile()); assertTrue(dir.isDirectory()); assertTrue(link2dir.isDirectory()); assertTrue(link2link2dir.isDirectory()); // on Windows we test with the DOS hidden attribute set if (System.getProperty("os.name").startsWith("Windows")) { DosFileAttributeView view = Files .getFileAttributeView(file.toPath(), DosFileAttributeView.class); view.setHidden(true); try { assertTrue(file.isHidden()); assertTrue(link2file.isHidden()); assertTrue(link2link2file.isHidden()); } finally { view.setHidden(false); } assertFalse(file.isHidden()); assertFalse(link2file.isHidden()); assertFalse(link2link2file.isHidden()); } header("length"); long len = file.length(); assertTrue(len > 0L); // these tests should follow links assertTrue(link2file.length() == len); assertTrue(link2link2file.length() == len); assertTrue(link2nobody.length() == 0L); header("lastModified / setLastModified"); // need time to diff between link and file long origLastModified = file.lastModified(); assertTrue(origLastModified != 0L); try { Thread.sleep(2000); } catch (InterruptedException x) { } file.setLastModified(System.currentTimeMillis()); long lastModified = file.lastModified(); assertTrue(lastModified != origLastModified); assertTrue(lastModifiedOfSymLink(link2file) != lastModified); assertTrue(lastModifiedOfSymLink(link2link2file) != lastModified); assertTrue(link2file.lastModified() == lastModified); assertTrue(link2link2file.lastModified() == lastModified); assertTrue(link2nobody.lastModified() == 0L); origLastModified = dir.lastModified(); assertTrue(origLastModified != 0L); dir.setLastModified(0L); assertTrue(dir.lastModified() == 0L); assertTrue(link2dir.lastModified() == 0L); assertTrue(link2link2dir.lastModified() == 0L); dir.setLastModified(origLastModified); header("setXXX / canXXX"); assertTrue(file.canRead()); assertTrue(file.canWrite()); assertTrue(link2file.canRead()); assertTrue(link2file.canWrite()); assertTrue(link2link2file.canRead()); assertTrue(link2link2file.canWrite()); if (!Util.isPrivileged() && file.setReadOnly()) { assertFalse(file.canWrite()); assertFalse(link2file.canWrite()); assertFalse(link2link2file.canWrite()); assertTrue(file.setWritable(true)); // make writable assertTrue(file.canWrite()); assertTrue(link2file.canWrite()); assertTrue(link2link2file.canWrite()); assertTrue(link2file.setReadOnly()); // make read only assertFalse(file.canWrite()); assertFalse(link2file.canWrite()); assertFalse(link2link2file.canWrite()); assertTrue(link2link2file.setWritable(true)); // make writable assertTrue(file.canWrite()); assertTrue(link2file.canWrite()); assertTrue(link2link2file.canWrite()); } } public static void main(String[] args) throws IOException { if (supportsSymLinks(top)) { try { setup(); go(); } finally { cleanup(); } } } }
package ru.job4j.maps; import java.util.Arrays; import java.util.Iterator; /** * Represent my implementation of map. Holds elements as pair key - value. * Access to values executes by key. * * @param <K> type of key of stored elements. * @param <V> type of value of sored elements. * @author abondarev. * @since 10.09.2017. */ public class Directory<K, V> { /** * The default initial capacity. */ private static final int DEFAULT_CAPACITY = 1 << 4; /** * The default initial load factor. */ private static final double DEFAULT_LOAD_FACTOR = 0.75d; /** * The array represent inner container for storing elements. */ private Node<K, V>[] table; /** * Amount of stored elements. */ private int size; /** * Value of current capacity. */ private int capacity; /** * Value of load factor that represent percent of load the the table. */ private double loadFactor; /** * The threshold of the container after that inner container change itself * capacity (threshold = capacity * loadFactor). */ private int threshold; /** * The default constructor. */ public Directory() { this(DEFAULT_CAPACITY, DEFAULT_LOAD_FACTOR); } /** * The constructor takes value of capacity as parameter. * * @param capacity it's value of capacity. */ public Directory(int capacity) { this(capacity, DEFAULT_LOAD_FACTOR); } /** * The constructor takes two parameters(capacity & loadFactor) and based * on it values construct instance of directory. * * @param capacity it's value of capacity. * @param loadFactor it's value of loadFactor. */ public Directory(int capacity, double loadFactor) { if (capacity > Integer.MAX_VALUE / 2 || capacity < 0) { throw new IllegalArgumentException("Illegal value of capacity"); } if (loadFactor > 1d || loadFactor < 0.1d) { throw new IllegalArgumentException("Illegal value of loadFactor."); } this.capacity = capacity; this.loadFactor = loadFactor; this.threshold = tableSizeFor(this.capacity); this.table = new Node[this.capacity]; } /** * Insert element into inner container. * * @param key for access to value. * @param value of element. * @return return true if element added. */ public boolean insert(K key, V value) { boolean result = false; if (key != null) { checkSize(); int h = hash(key); int index = hashToIndex(h); if (this.table[index] == null) { this.table[index] = new Node<>(h, key, value); size++; result = true; } } return result; } /** * Returns value of element associated with key. * * @param key for access to value. * @return value of element or null if element associated with key not found. */ public V get(K key) { V result = null; if (key == null) { throw new IllegalArgumentException("Invalid argument"); } int index = hashToIndex(hash(key)); if (this.table[index] != null && this.table[index].key.equals(key)) { result = this.table[index].value; } return result; } /** * Deletes element from inner container by its key. * * @param key of element. * @return true if element deleted. */ public boolean delete(K key) { boolean result = false; if (key != null) { int index = hashToIndex(hash(key)); if (this.table[index] != null) { this.table[index] = null; size--; result = true; } } return false; } /** * Returns amount of stored elements. * * @return amount of stored elements. */ public int size() { return this.size; } /** * Returns iterator for the Directory elements. * * @return iterator for the Directory elements. */ public Iterator<Node<K, V>> iterator() { return new Iterator<Node<K, V>>() { private int pointer = 0; @Override public boolean hasNext() { boolean result = false; while (pointer < capacity) { if (table[pointer++] != null) { result = true; break; } } return result; } @Override public Node<K, V> next() { return table[pointer++]; } }; } /** * Calculate max amount elements hold in table after that table will be * resize. * * @param cap capacity of table. * @return threshold. */ private int tableSizeFor(int cap) { double result = (double) cap * loadFactor; return (int) result; } /** * Check size of the table and resize it if need. */ private void checkSize() { if (this.size + 1 >= threshold) { this.capacity = this.capacity << 1; this.threshold = tableSizeFor(this.capacity); this.table = Arrays.copyOf(this.table, this.capacity); } } /** * The class represents bucket for storing pair key - value. * * @param <K> type of key. * @param <V> type of value. */ private class Node<K, V> { /** * The hash of bucket. */ private final int hash; /** * The key for access to associated value. */ private final K key; /** * The value associated with key. */ private V value; /** * The constructor. * * @param hash for node. * @param key key for node. * @param value value for node. */ Node(int hash, K key, V value) { this.hash = hash; this.key = key; this.value = value; } } /** * Calculate hash-function for key specified as parameter. * * @param key instance for whose calculating hash-function. * @param <K> type of key. * @return result calculation hash-function. */ private static <K> int hash(K key) { int h = key.hashCode(); return h ^ (h >>> 16); } /** * Calculate index by hash of key. * * @param hash of key. * @return index for element. */ private int hashToIndex(int hash) { return hash & (this.capacity - 1); } }
/* * Copyright (C) 2018 the original author or authors. * * This file is part of jBB Application Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 */ package org.jbb.system.impl.logging; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import com.google.common.collect.Sets; import javax.validation.ConstraintViolation; import javax.validation.Validator; import org.jbb.lib.eventbus.JbbEventBus; import org.jbb.lib.logging.ConfigurationRepository; import org.jbb.lib.logging.jaxb.Configuration; import org.jbb.system.api.logging.LoggingConfigurationException; import org.jbb.system.api.logging.model.AddingModeGroup; import org.jbb.system.api.logging.model.AppLogger; import org.jbb.system.api.logging.model.LogAppender; import org.jbb.system.api.logging.model.LoggingConfiguration; import org.jbb.system.event.LogAppenderCreatedEvent; import org.jbb.system.event.LogAppenderRemovedEvent; import org.jbb.system.event.LogAppenderUpdatedEvent; import org.jbb.system.event.LoggerCreatedEvent; import org.jbb.system.event.LoggerRemovedEvent; import org.jbb.system.event.LoggerUpdatedEvent; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class DefaultLoggingSettingsServiceTest { @Mock private ConfigurationRepository configRepositoryMock; @Mock private LoggingConfigMapper configMapperMock; @Mock private AppenderEditor appenderEditorMock; @Mock private LoggerEditor loggerEditorMock; @Mock private StatusListenerEditor statusListenerEditorMock; @Mock private AppenderBrowser appenderBrowserMock; @Mock private LoggerBrowser loggerBrowserMock; @Mock private Validator validatorMock; @Mock private JbbEventBus eventBusMock; @InjectMocks private DefaultLoggingSettingsService loggingSettingsService; @Test public void shouldDelegateConfigurationFromRepository_toConfigMapper() { // given Configuration conf = mock(Configuration.class); given(configRepositoryMock.getConfiguration()).willReturn(conf); // when loggingSettingsService.getLoggingConfiguration(); // then Mockito.verify(configMapperMock, times(1)).buildConfiguration(eq(conf)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullAppenderPassedToAddMethod() { // when loggingSettingsService.addAppender(null); // then // throw NullPointerException } @Test(expected = LoggingConfigurationException.class) public void shouldThrowLoggingConfigurationException_whenValidationFailedDuringAddingAppender() { // given given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet(mock(ConstraintViolation.class))); // when loggingSettingsService.addAppender(mock(LogAppender.class)); // then // throw LoggingConfigurationException } @Test public void shouldDelegateToAppenderEditor_whenValidationPassedDuringAddingAppender() { // given LogAppender appender = mock(LogAppender.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.addAppender(appender); // then verify(appenderEditorMock, times(1)).add(eq(appender)); verify(validatorMock, times(1)).validate(any(), any(), eq(AddingModeGroup.class)); } @Test public void shouldEmitLogAppenderAddedEvent_whenAddingAppenderInvoked() { // given LogAppender appender = mock(LogAppender.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.addAppender(appender); // then verify(eventBusMock).post(isA(LogAppenderCreatedEvent.class)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullAppenderPassedToUpdateMethod() { // when loggingSettingsService.updateAppender(null); // then // throw NullPointerException } @Test(expected = LoggingConfigurationException.class) public void shouldThrowLoggingConfigurationException_whenValidationFailedDuringUpdatingAppender() { // given given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet(mock(ConstraintViolation.class))); // when loggingSettingsService.updateAppender(mock(LogAppender.class)); // then // throw LoggingConfigurationException } @Test public void shouldDelegateToAppenderEditor_whenValidationPassedDuringUpdatingAppender() { // given LogAppender appender = mock(LogAppender.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.updateAppender(appender); // then verify(appenderEditorMock, times(1)).update(eq(appender)); verify(validatorMock, times(1)).validate(any()); } @Test public void shouldEmitLogAppenderUpdatedEvent_whenUpdatingAppenderInvoked() { // given LogAppender appender = mock(LogAppender.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.updateAppender(appender); // then verify(eventBusMock).post(isA(LogAppenderUpdatedEvent.class)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullAppenderPassedToDeleteMethod() { // when loggingSettingsService.deleteAppender(null); // then // throw NullPointerException } @Test public void shouldDelegateToAppenderEditor_whenDeleteAppender() { // given LogAppender appender = mock(LogAppender.class); // when loggingSettingsService.deleteAppender(appender); // then verify(appenderEditorMock, times(1)).delete(eq(appender)); } @Test public void shouldEmitLogAppenderRemovedEvent_whenRemovingAppenderInvoked() { // given LogAppender appender = mock(LogAppender.class); // when loggingSettingsService.deleteAppender(appender); // then verify(eventBusMock).post(isA(LogAppenderRemovedEvent.class)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullLoggerPassedToAddMethod() { // when loggingSettingsService.addLogger(null); // then // throw NullPointerException } @Test(expected = LoggingConfigurationException.class) public void shouldThrowLoggingConfigurationException_whenValidationFailedDuringAddingLogger() { // given given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet(mock(ConstraintViolation.class))); // when loggingSettingsService.addLogger(mock(AppLogger.class)); // then // throw LoggingConfigurationException } @Test public void shouldDelegateToLoggerEditor_whenValidationPassedDuringAddingLogger() { // given AppLogger logger = mock(AppLogger.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.addLogger(logger); // then verify(loggerEditorMock, times(1)).add(eq(logger)); verify(validatorMock, times(1)).validate(any(), any(), eq(AddingModeGroup.class)); } @Test public void shouldEmitLoggerAddedEvent_whenAddingLoggerInvoked() { // given AppLogger logger = mock(AppLogger.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.addLogger(logger); // then verify(eventBusMock).post(isA(LoggerCreatedEvent.class)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullLoggerPassedToUpdateMethod() { // when loggingSettingsService.updateLogger(null); // then // throw NullPointerException } @Test(expected = LoggingConfigurationException.class) public void shouldThrowLoggingConfigurationException_whenValidationFailedDuringUpdatingLogger() { // given given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet(mock(ConstraintViolation.class))); // when loggingSettingsService.updateLogger(mock(AppLogger.class)); // then // throw LoggingConfigurationException } @Test public void shouldDelegateToLoggerEditor_whenValidationPassedDuringUpdatingLogger() { // given AppLogger logger = mock(AppLogger.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.updateLogger(logger); // then verify(loggerEditorMock, times(1)).update(eq(logger)); verify(validatorMock, times(1)).validate(any()); } @Test public void shouldEmitLoggerUpdatedEvent_whenUpdatingLoggerInvoked() { // given AppLogger logger = mock(AppLogger.class); given(validatorMock.validate(any(), any())).willReturn(Sets.newHashSet()); // when loggingSettingsService.updateLogger(logger); // then verify(eventBusMock).post(isA(LoggerUpdatedEvent.class)); } @Test(expected = NullPointerException.class) public void shouldThrowNPE_whenNullLoggerPassedToDeleteMethod() { // when loggingSettingsService.deleteLogger(null); // then // throw NullPointerException } @Test public void shouldDelegateToLoggerEditor_whenDeleteLogger() { // given AppLogger logger = mock(AppLogger.class); // when loggingSettingsService.deleteLogger(logger); // then verify(loggerEditorMock, times(1)).delete(eq(logger)); } @Test public void shouldEmitLoggerRemovedEvent_whenRemovingLoggerInvoked() { // given AppLogger logger = mock(AppLogger.class); // when loggingSettingsService.deleteLogger(logger); // then verify(eventBusMock).post(isA(LoggerRemovedEvent.class)); } @Test public void shouldPersistNewConfig_withEnableDebugInfoFlag() { // given boolean enableDebugInfo = true; given(configRepositoryMock.getConfiguration()).willReturn(new Configuration()); // when loggingSettingsService.enableDebugLoggingFrameworkMode(enableDebugInfo); // then verify(statusListenerEditorMock, times(1)).setAppropriateStatusListener( any(Configuration.class), eq(enableDebugInfo) ); } @Test public void shouldPersistNewConfig_withPackagingData() { // given boolean packagingData = true; given(configRepositoryMock.getConfiguration()).willReturn(new Configuration()); // when loggingSettingsService.showPackagingData(packagingData); // then verify(configRepositoryMock, times(1)).persistNewConfiguration( argThat(Configuration::isPackagingData) ); } @Test public void shouldDelegateSearchQuery_toAppenderBrowser() { // given LoggingConfiguration loggingConfigurationMock = mock(LoggingConfiguration.class); given(configMapperMock.buildConfiguration(any())).willReturn(loggingConfigurationMock); // when loggingSettingsService.getAppender("foo"); // then verify(appenderBrowserMock, times(1)).searchForAppenderWithName(eq(loggingConfigurationMock), eq("foo")); } @Test public void shouldDelegateSearchQuery_toLoggerBrowser() { // given LoggingConfiguration loggingConfigurationMock = mock(LoggingConfiguration.class); given(configMapperMock.buildConfiguration(any())).willReturn(loggingConfigurationMock); // when loggingSettingsService.getLogger("bar"); // then verify(loggerBrowserMock, times(1)).searchForLoggerWithName(eq(loggingConfigurationMock), eq("bar")); } }
package at.grabner.example.circleprogressview; import android.os.AsyncTask; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.CompoundButton; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.Switch; import java.util.ArrayList; import java.util.List; import at.grabner.circleprogress.AnimationState; import at.grabner.circleprogress.AnimationStateChangedListener; import at.grabner.circleprogress.CircleProgressView; import at.grabner.circleprogress.TextMode; import at.grabner.circleprogress.UnitPosition; public class MainActivity extends AppCompatActivity { /** * The log tag. */ private final static String TAG = "MainActivity"; CircleProgressView mCircleView; Switch mSwitchSpin; Switch mSwitchShowUnit; SeekBar mSeekBar; SeekBar mSeekBarSpinnerLength; Boolean mShowUnit = true; Spinner mSpinner; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mCircleView = (CircleProgressView) findViewById(R.id.circleView); mCircleView.setOnProgressChangedListener(new CircleProgressView.OnProgressChangedListener() { @Override public void onProgressChanged(float value) { Log.d(TAG, "Progress Changed: " + value); } }); //value setting // mCircleView.setMaxValue(100); // mCircleView.setValue(0); // mCircleView.setValueAnimated(24); //growing/rotating counter-clockwise // mCircleView.setDirection(Direction.CCW) // //show unit // mCircleView.setUnit("%"); // mCircleView.setUnitVisible(mShowUnit); // // //text sizes // mCircleView.setTextSize(50); // text size set, auto text size off // mCircleView.setUnitSize(40); // if i set the text size i also have to set the unit size // mCircleView.setAutoTextSize(true); // enable auto text size, previous values are overwritten // //if you want the calculated text sizes to be bigger/smaller you can do so via // mCircleView.setUnitScale(0.9f); // mCircleView.setTextScale(0.9f); // //// //custom typeface //// Typeface font = Typeface.createFromAsset(getAssets(), "fonts/ANDROID_ROBOT.ttf"); //// mCircleView.setTextTypeface(font); //// mCircleView.setUnitTextTypeface(font); // // // //color // //you can use a gradient // mCircleView.setBarColor(getResources().getColor(R.color.primary), getResources().getColor(R.color.accent)); // // //colors of text and unit can be set via // mCircleView.setTextColor(Color.RED); // mCircleView.setTextColor(Color.BLUE); // //or to use the same color as in the gradient // mCircleView.setTextColorAuto(true); //previous set values are ignored // // //text mode // mCircleView.setText("Text"); //shows the given text in the circle view // mCircleView.setTextMode(TextMode.TEXT); // Set text mode to text to show text // // //in the following text modes, the text is ignored // mCircleView.setTextMode(TextMode.VALUE); // Shows the current value // mCircleView.setTextMode(TextMode.PERCENT); // Shows current percent of the current value from the max value //spinning // mCircleView.spin(); // start spinning // mCircleView.stopSpinning(); // stops spinning. Spinner gets shorter until it disappears. // mCircleView.setValueAnimated(24); // stops spinning. Spinner spins until on top. Then fills to set value. //animation callbacks //this example shows how to show a loading text if it is in spinning mode, and the current percent value otherwise. mCircleView.setShowTextWhileSpinning(true); // Show/hide text in spinning mode mCircleView.setText("Loading..."); mCircleView.setOnAnimationStateChangedListener( new AnimationStateChangedListener() { @Override public void onAnimationStateChanged(AnimationState _animationState) { switch (_animationState) { case IDLE: case ANIMATING: case START_ANIMATING_AFTER_SPINNING: mCircleView.setTextMode(TextMode.PERCENT); // show percent if not spinning mCircleView.setUnitVisible(mShowUnit); break; case SPINNING: mCircleView.setTextMode(TextMode.TEXT); // show text while spinning mCircleView.setUnitVisible(false); case END_SPINNING: break; case END_SPINNING_START_ANIMATING: break; } } } ); // region setup other ui elements //Setup Switch mSwitchSpin = (Switch) findViewById(R.id.switch1); mSwitchSpin.setOnCheckedChangeListener( new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { mCircleView.spin(); } else { mCircleView.stopSpinning(); } } } ); mSwitchShowUnit = (Switch) findViewById(R.id.switch2); mSwitchShowUnit.setChecked(mShowUnit); mSwitchShowUnit.setOnCheckedChangeListener( new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { mCircleView.setUnitVisible(isChecked); mShowUnit = isChecked; } } ); //Setup SeekBar mSeekBar = (SeekBar) findViewById(R.id.seekBar); mSeekBar.setMax(100); mSeekBar.setOnSeekBarChangeListener( new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { mCircleView.setValueAnimated(seekBar.getProgress(), 1500); mSwitchSpin.setChecked(false); } } ); mSeekBarSpinnerLength = (SeekBar) findViewById(R.id.seekBar2); mSeekBarSpinnerLength.setMax(360); mSeekBarSpinnerLength.setOnSeekBarChangeListener( new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { mCircleView.setSpinningBarLength(seekBar.getProgress()); } }); mSpinner = (Spinner) findViewById(R.id.spinner); List<String> list = new ArrayList<String>(); list.add("Left Top"); list.add("Left Bottom"); list.add("Right Top"); list.add("Right Bottom"); list.add("Top"); list.add("Bottom"); ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, list); mSpinner.setAdapter(dataAdapter); mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { switch (position) { case 0: mCircleView.setUnitPosition(UnitPosition.LEFT_TOP); break; case 1: mCircleView.setUnitPosition(UnitPosition.LEFT_BOTTOM); break; case 2: mCircleView.setUnitPosition(UnitPosition.RIGHT_TOP); break; case 3: mCircleView.setUnitPosition(UnitPosition.RIGHT_BOTTOM); break; case 4: mCircleView.setUnitPosition(UnitPosition.TOP); break; case 5: mCircleView.setUnitPosition(UnitPosition.BOTTOM); break; } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); mSpinner.setSelection(2); //endregion // new LongOperation().execute(); } @Override protected void onStart() { super.onStart(); } private class LongOperation extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { MainActivity.this.runOnUiThread(new Runnable() { @Override public void run() { mCircleView.setValue(0); mCircleView.spin(); } }); try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } return null; } @Override protected void onPostExecute(Void aVoid) { mCircleView.setValueAnimated(42); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package gobblin.cluster; import com.google.common.base.Joiner; import java.util.Map; import java.util.Set; import org.I0Itec.zkclient.DataUpdater; import org.apache.helix.AccessOption; import org.apache.helix.ConfigAccessor; import org.apache.helix.HelixAdmin; import org.apache.helix.HelixDataAccessor; import org.apache.helix.HelixManager; import org.apache.helix.PropertyPathConfig; import org.apache.helix.PropertyType; import org.apache.helix.ZNRecord; import org.apache.helix.manager.zk.ZKHelixAdmin; import org.apache.helix.manager.zk.ZKHelixDataAccessor; import org.apache.helix.manager.zk.ZkBaseDataAccessor; import org.apache.helix.manager.zk.ZkClient; import org.apache.helix.store.HelixPropertyStore; import org.apache.helix.store.zk.ZkHelixPropertyStore; import org.apache.helix.task.JobDag; import org.apache.helix.task.TaskConstants; import org.apache.helix.task.TaskDriver; import org.apache.helix.task.TaskState; import org.apache.helix.task.TaskUtil; import org.apache.helix.task.WorkflowConfig; import org.apache.helix.task.WorkflowContext; import org.apache.log4j.Logger; /** * #HELIX-0.6.7-WORKAROUND * Replacement TaskDriver methods to workaround bugs and changes in behavior for the 0.6.7 upgrade */ public class GobblinHelixTaskDriver { /** For logging */ private static final Logger LOG = Logger.getLogger(GobblinHelixTaskDriver.class); private final HelixDataAccessor _accessor; private final ConfigAccessor _cfgAccessor; private final HelixPropertyStore<ZNRecord> _propertyStore; private final HelixAdmin _admin; private final String _clusterName; private final TaskDriver _taskDriver; public GobblinHelixTaskDriver(HelixManager manager) { this(manager.getClusterManagmentTool(), manager.getHelixDataAccessor(), manager .getConfigAccessor(), manager.getHelixPropertyStore(), manager.getClusterName()); } public GobblinHelixTaskDriver(ZkClient client, String clusterName) { this(client, new ZkBaseDataAccessor<ZNRecord>(client), clusterName); } public GobblinHelixTaskDriver(ZkClient client, ZkBaseDataAccessor<ZNRecord> baseAccessor, String clusterName) { this(new ZKHelixAdmin(client), new ZKHelixDataAccessor(clusterName, baseAccessor), new ConfigAccessor(client), new ZkHelixPropertyStore<ZNRecord>(baseAccessor, PropertyPathConfig.getPath(PropertyType.PROPERTYSTORE, clusterName), null), clusterName); } public GobblinHelixTaskDriver(HelixAdmin admin, HelixDataAccessor accessor, ConfigAccessor cfgAccessor, HelixPropertyStore<ZNRecord> propertyStore, String clusterName) { _admin = admin; _accessor = accessor; _cfgAccessor = cfgAccessor; _propertyStore = propertyStore; _clusterName = clusterName; _taskDriver = new TaskDriver(admin, accessor, cfgAccessor, propertyStore, clusterName); } /** * Delete a job from an existing named queue, * the queue has to be stopped prior to this call * * @param queueName * @param jobName */ public void deleteJob(final String queueName, final String jobName) { WorkflowConfig workflowCfg = _taskDriver.getWorkflowConfig(queueName); if (workflowCfg == null) { throw new IllegalArgumentException("Queue " + queueName + " does not yet exist!"); } if (workflowCfg.isTerminable()) { throw new IllegalArgumentException(queueName + " is not a queue!"); } boolean isRecurringWorkflow = (workflowCfg.getScheduleConfig() != null && workflowCfg.getScheduleConfig().isRecurring()); if (isRecurringWorkflow) { WorkflowContext wCtx = _taskDriver.getWorkflowContext(queueName); String lastScheduledQueue = wCtx.getLastScheduledSingleWorkflow(); // delete the current scheduled one deleteJobFromScheduledQueue(lastScheduledQueue, jobName, true); // Remove the job from the original queue template's DAG removeJobFromDag(queueName, jobName); // delete the ideal state and resource config for the template job final String namespacedJobName = TaskUtil.getNamespacedJobName(queueName, jobName); _admin.dropResource(_clusterName, namespacedJobName); // Delete the job template from property store String jobPropertyPath = Joiner.on("/") .join(TaskConstants.REBALANCER_CONTEXT_ROOT, namespacedJobName); _propertyStore.remove(jobPropertyPath, AccessOption.PERSISTENT); } else { deleteJobFromScheduledQueue(queueName, jobName, false); } } /** * delete a job from a scheduled (non-recurrent) queue. * * @param queueName * @param jobName */ private void deleteJobFromScheduledQueue(final String queueName, final String jobName, boolean isRecurrent) { WorkflowConfig workflowCfg = _taskDriver.getWorkflowConfig(queueName); if (workflowCfg == null) { // When try to delete recurrent job, it could be either not started or finished. So // there may not be a workflow config. if (isRecurrent) { return; } else { throw new IllegalArgumentException("Queue " + queueName + " does not yet exist!"); } } WorkflowContext wCtx = _taskDriver.getWorkflowContext(queueName); if (wCtx != null && wCtx.getWorkflowState() == null) { throw new IllegalStateException("Queue " + queueName + " does not have a valid work state!"); } // #HELIX-0.6.7-WORKAROUND // This check is removed to get the same behavior as 0.6.6-SNAPSHOT until new APIs to support delete are provided //String workflowState = // (wCtx != null) ? wCtx.getWorkflowState().name() : TaskState.NOT_STARTED.name(); //if (workflowState.equals(TaskState.IN_PROGRESS.name())) { // throw new IllegalStateException("Queue " + queueName + " is still in progress!"); //} removeJob(queueName, jobName); } private boolean removeJobContext(HelixPropertyStore<ZNRecord> propertyStore, String jobResource) { return propertyStore.remove( Joiner.on("/").join(TaskConstants.REBALANCER_CONTEXT_ROOT, jobResource), AccessOption.PERSISTENT); } private void removeJob(String queueName, String jobName) { // Remove the job from the queue in the DAG removeJobFromDag(queueName, jobName); // delete the ideal state and resource config for the job final String namespacedJobName = TaskUtil.getNamespacedJobName(queueName, jobName); _admin.dropResource(_clusterName, namespacedJobName); // update queue's property to remove job from JOB_STATES if it is already started. removeJobStateFromQueue(queueName, jobName); // Delete the job from property store removeJobContext(_propertyStore, jobName); } /** Remove the job name from the DAG from the queue configuration */ private void removeJobFromDag(final String queueName, final String jobName) { final String namespacedJobName = TaskUtil.getNamespacedJobName(queueName, jobName); DataUpdater<ZNRecord> dagRemover = new DataUpdater<ZNRecord>() { @Override public ZNRecord update(ZNRecord currentData) { if (currentData == null) { LOG.error("Could not update DAG for queue: " + queueName + " ZNRecord is null."); return null; } // Add the node to the existing DAG JobDag jobDag = JobDag.fromJson( currentData.getSimpleField(WorkflowConfig.WorkflowConfigProperty.Dag.name())); Set<String> allNodes = jobDag.getAllNodes(); if (!allNodes.contains(namespacedJobName)) { LOG.warn( "Could not delete job from queue " + queueName + ", job " + jobName + " not exists"); return currentData; } String parent = null; String child = null; // remove the node from the queue for (String node : allNodes) { if (jobDag.getDirectChildren(node).contains(namespacedJobName)) { parent = node; jobDag.removeParentToChild(parent, namespacedJobName); } else if (jobDag.getDirectParents(node).contains(namespacedJobName)) { child = node; jobDag.removeParentToChild(namespacedJobName, child); } } if (parent != null && child != null) { jobDag.addParentToChild(parent, child); } jobDag.removeNode(namespacedJobName); // Save the updated DAG try { currentData .setSimpleField(WorkflowConfig.WorkflowConfigProperty.Dag.name(), jobDag.toJson()); } catch (Exception e) { throw new IllegalStateException( "Could not remove job " + jobName + " from DAG of queue " + queueName, e); } return currentData; } }; String path = _accessor.keyBuilder().resourceConfig(queueName).getPath(); if (!_accessor.getBaseDataAccessor().update(path, dagRemover, AccessOption.PERSISTENT)) { throw new IllegalArgumentException( "Could not remove job " + jobName + " from DAG of queue " + queueName); } } /** update queue's property to remove job from JOB_STATES if it is already started. */ private void removeJobStateFromQueue(final String queueName, final String jobName) { final String namespacedJobName = TaskUtil.getNamespacedJobName(queueName, jobName); String queuePropertyPath = Joiner.on("/") .join(TaskConstants.REBALANCER_CONTEXT_ROOT, queueName, TaskUtil.CONTEXT_NODE); DataUpdater<ZNRecord> updater = new DataUpdater<ZNRecord>() { @Override public ZNRecord update(ZNRecord currentData) { if (currentData != null) { Map<String, String> states = currentData.getMapField(WorkflowContext.JOB_STATES); if (states != null && states.containsKey(namespacedJobName)) { states.keySet().remove(namespacedJobName); } } return currentData; } }; if (!_propertyStore.update(queuePropertyPath, updater, AccessOption.PERSISTENT)) { LOG.warn("Fail to remove job state for job " + namespacedJobName + " from queue " + queueName); } } }
package com.nirima.jenkins.plugins.docker.builder; import com.github.dockerjava.api.DockerClient; import com.github.dockerjava.api.command.PullImageCmd; import com.github.dockerjava.api.exception.DockerClientException; import com.github.dockerjava.api.exception.DockerException; import com.github.dockerjava.api.exception.NotFoundException; import com.github.dockerjava.api.model.PullResponseItem; import com.github.dockerjava.core.command.PullImageResultCallback; import com.google.common.base.Strings; import com.nirima.jenkins.plugins.docker.DockerCloud; import com.nirima.jenkins.plugins.docker.DockerSimpleTemplate; import com.nirima.jenkins.plugins.docker.DockerTemplateBase; import hudson.Extension; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.model.Run; import hudson.model.TaskListener; import io.jenkins.docker.client.DockerAPI; import org.jenkinsci.plugins.docker.commons.credentials.DockerRegistryEndpoint; import org.jenkinsci.plugins.tokenmacro.TokenMacro; import org.kohsuke.stapler.DataBoundConstructor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.PrintStream; /** * Build step that allows run container through existed DockerCloud * * @author magnayn */ public class DockerBuilderControlOptionRun extends DockerBuilderControlCloudOption { private static final long serialVersionUID = -3444073364874467342L; private static final Logger LOG = LoggerFactory.getLogger(DockerBuilderControlOptionRun.class); public final String image; private String pullCredentialsId; private transient DockerRegistryEndpoint registry; public final String dnsString; public final String network; public final String dockerCommand; public final String mountsString; public final String volumesFrom; public final String environmentsString; public final boolean privileged; public final boolean tty; public final String hostname; public final String user; public final String extraGroupsString; public final String bindPorts; public final Integer memoryLimit; public final Integer memorySwap; public final Long cpuPeriod; public final Long cpuQuota; public final Integer cpuShares; public final Integer shmSize; public final boolean bindAllPorts; public final String macAddress; @DataBoundConstructor public DockerBuilderControlOptionRun( String cloudName, String image, String pullCredentialsId, String dnsString, String network, String dockerCommand, String mountsString, String volumesFrom, String environmentsString, String hostname, String user, String extraGroupsString, Integer memoryLimit, Integer memorySwap, Long cpuPeriod, Long cpuQuota, Integer cpuShares, Integer shmSize, String bindPorts, boolean bindAllPorts, boolean privileged, boolean tty, String macAddress) { super(cloudName); this.image = image; this.pullCredentialsId = pullCredentialsId; this.dnsString = dnsString; this.network = network; this.dockerCommand = dockerCommand; this.mountsString = mountsString; this.volumesFrom = volumesFrom; this.environmentsString = environmentsString; this.privileged = privileged; this.tty = tty; this.hostname = hostname; this.user = user; this.extraGroupsString = extraGroupsString; this.bindPorts = bindPorts; this.memoryLimit = memoryLimit; this.memorySwap = memorySwap; this.cpuPeriod = cpuPeriod; this.cpuQuota = cpuQuota; this.cpuShares = cpuShares; this.shmSize = shmSize; this.bindAllPorts = bindAllPorts; this.macAddress = macAddress; } public DockerRegistryEndpoint getRegistry() { if (registry == null) { registry = new DockerRegistryEndpoint(null, pullCredentialsId); } return registry; } @Override public void execute(Run<?, ?> build, Launcher launcher, TaskListener listener) throws DockerException, IOException { final PrintStream llog = listener.getLogger(); final DockerCloud cloud = getCloud(build,launcher); final DockerAPI dockerApi = cloud.getDockerApi(); String xImage = expand(build, image); String xCommand = expand(build, dockerCommand); String xHostname = expand(build, hostname); String xUser = expand(build, user); LOG.info("Pulling image {}", xImage); llog.println("Pulling image " + xImage); // need a client that will tolerate lengthy pauses for a docker pull try(final DockerClient clientWithoutReadTimeout = dockerApi.getClient(0)) { executePullOnDocker(build, llog, xImage, clientWithoutReadTimeout); } // but the remainder can use a normal client with the default timeout try(final DockerClient client = dockerApi.getClient()) { executeOnDocker(build, llog, xImage, xCommand, xHostname, xUser, client); } } private void executePullOnDocker(Run<?, ?> build, PrintStream llog, String xImage, DockerClient client) throws DockerException { PullImageResultCallback resultCallback = new PullImageResultCallback() { @Override public void onNext(PullResponseItem item) { if (item.getStatus() != null && item.getProgress() == null) { llog.print(item.getId() + ":" + item.getStatus()); LOG.info("{} : {}", item.getId(), item.getStatus()); } super.onNext(item); } }; PullImageCmd cmd = client.pullImageCmd(xImage); DockerCloud.setRegistryAuthentication(cmd, getRegistry(), build.getParent().getParent()); try { cmd.exec(resultCallback).awaitCompletion(); } catch (InterruptedException e) { throw new DockerClientException("Interrupted while pulling image", e); } } private void executeOnDocker(Run<?, ?> build, PrintStream llog, String xImage, String xCommand, String xHostname, String xUser, DockerClient client) throws DockerException { try { client.inspectImageCmd(xImage).exec(); } catch (NotFoundException e) { throw new DockerClientException("Failed to pull image: " + image, e); } DockerTemplateBase template = new DockerSimpleTemplate( xImage, pullCredentialsId, dnsString, network, xCommand, mountsString, volumesFrom, environmentsString, xHostname, xUser, extraGroupsString, memoryLimit, memorySwap, cpuPeriod, cpuQuota, cpuShares, shmSize, bindPorts, bindAllPorts, privileged, tty, macAddress, null); LOG.info("Starting container for image {}", xImage); llog.println("Starting container for image " + xImage); String containerId = DockerCloud.runContainer(template, client); LOG.info("Started container {}", containerId); llog.println("Started container " + containerId); getLaunchAction(build).started(client, containerId); } @SuppressWarnings("unused") private static String expand(Run<?, ?> build, String text) { try { if (build instanceof AbstractBuild && !Strings.isNullOrEmpty(text)) { return TokenMacro.expandAll((AbstractBuild) build, TaskListener.NULL, text); } } catch (Exception e) { LOG.info("Unable to expand variables in text {}", text); } return text; } @Extension public static final class DescriptorImpl extends DockerBuilderControlOptionDescriptor { @Override public String getDisplayName() { return "Run Container"; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.validation.impl.setup; import org.apache.sling.api.resource.LoginException; import org.apache.sling.api.resource.PersistenceException; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.commons.testing.jcr.RepositoryProvider; import org.apache.sling.commons.testing.jcr.RepositoryUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.PathNotFoundException; import javax.jcr.Repository; import javax.jcr.RepositoryException; import javax.jcr.Session; import javax.servlet.http.HttpServletRequest; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class MockedResourceResolver implements ResourceResolver { private static final Logger LOG = LoggerFactory.getLogger(MockedResourceResolver.class); private static final String[] SEARCH_PATHS = new String[] {"/apps", "/libs"}; public final RepositoryProvider repoProvider; private List<MockedResource> resources = new LinkedList<MockedResource>(); private Session session; public MockedResourceResolver() throws Exception { this.repoProvider = RepositoryProvider.instance(); createSession(); RepositoryUtil.registerSlingNodeTypes(session); } public Session createSession() throws RepositoryException { synchronized (this) { if (session != null) { return session; } session = repoProvider.getRepository().loginAdministrative(null); return session; } } @SuppressWarnings("unchecked") public <AdapterType> AdapterType adaptTo(Class<AdapterType> type) { if (type.equals(Session.class)) { try { return (AdapterType) createSession(); } catch (RepositoryException e) { throw new RuntimeException("RepositoryException: " + e, e); } } else if (type.equals(Repository.class)) { try { return (AdapterType) repoProvider.getRepository(); } catch (RepositoryException e) { throw new RuntimeException("RepositoryException: " + e, e); } } throw new UnsupportedOperationException("Not implemented"); } public Resource resolve(HttpServletRequest request, String absPath) { throw new UnsupportedOperationException("Not implemented"); } public Resource resolve(String absPath) { throw new UnsupportedOperationException("Not implemented"); } @Deprecated public Resource resolve(HttpServletRequest request) { throw new UnsupportedOperationException("Not implemented"); } public String map(String resourcePath) { throw new UnsupportedOperationException("Not implemented"); } public String map(HttpServletRequest request, String resourcePath) { throw new UnsupportedOperationException("Not implemented"); } public Resource getResource(String path) { Session session; try { session = createSession(); session.getNode(path); } catch (PathNotFoundException e) { return null; } catch (RepositoryException e) { throw new RuntimeException("RepositoryException: " + e, e); } return new MockedResource(this, path, "nt:unstructured"); } public Resource getResource(Resource base, String path) { if (base.getPath().equals("/")) { return getResource("/" + path); } else { return getResource(base.getPath() + "/" + path); } } public String[] getSearchPath() { return SEARCH_PATHS; } public Iterator<Resource> listChildren(Resource parent) { try { Node node = parent.adaptTo(Node.class); final NodeIterator nodes = node.getNodes(); return new Iterator<Resource>() { public void remove() { throw new UnsupportedOperationException(); } public Resource next() { Node next = nodes.nextNode(); try { return new MockedResource(MockedResourceResolver.this, next.getPath(), "nt:unstructured"); } catch (RepositoryException e) { throw new RuntimeException("RepositoryException: " + e, e); } } public boolean hasNext() { return nodes.hasNext(); } }; } catch (RepositoryException e) { throw new RuntimeException("RepositoryException: " + e, e); } } public Iterable<Resource> getChildren(Resource parent) { throw new UnsupportedOperationException("Not implemented"); } public Iterator<Resource> findResources(String query, String language) { List<Resource> resources = new ArrayList<Resource>(); try { NodeIterator iterator = session.getWorkspace().getQueryManager().createQuery(query, language).execute().getNodes(); while (iterator.hasNext()) { Node n = iterator.nextNode(); Resource resource = new MockedResource(this, n); resources.add(resource); } } catch (RepositoryException e) { LOG.error("Unable to execute JCR query", e); } return resources.iterator(); } public Iterator<Map<String, Object>> queryResources(String query, String language) { throw new UnsupportedOperationException("Not implemented"); } public ResourceResolver clone(Map<String, Object> authenticationInfo) throws LoginException { throw new UnsupportedOperationException("Not implemented"); } public boolean isLive() { throw new UnsupportedOperationException("Not implemented"); } public void close() { Iterator<MockedResource> it = resources.iterator(); while (it.hasNext()) { MockedResource r = it.next(); r.close(); } if (session != null) { if (session.isLive()) { session.logout(); } session = null; } } public void register(MockedResource mockedResource) { resources.add(mockedResource); } public String getUserID() { throw new UnsupportedOperationException("Not implemented"); } public Iterator<String> getAttributeNames() { throw new UnsupportedOperationException("Not implemented"); } public Object getAttribute(String name) { throw new UnsupportedOperationException("Not implemented"); } public void delete(Resource resource) throws PersistenceException { if (resources.contains(resource)) { resources.remove(resource); } Node node = resource.adaptTo(Node.class); try { node.remove(); } catch (RepositoryException e) { throw new PersistenceException("RepositoryException: "+e, e); } } public Resource create(Resource parent, String name, Map<String, Object> properties) throws PersistenceException { final Node parentNode = parent.adaptTo(Node.class); try { final Node child; if (properties!=null && properties.containsKey("jcr:primaryType")) { child = parentNode.addNode(name, (String) properties.get("jcr:primaryType")); } else { child = parentNode.addNode(name); } if (properties!=null) { final Iterator<Entry<String, Object>> it = properties.entrySet().iterator(); while(it.hasNext()) { final Entry<String, Object> entry = it.next(); if (entry.getKey().equals("jcr:primaryType")) { continue; } if (entry.getValue() instanceof String) { child.setProperty(entry.getKey(), (String)entry.getValue()); } else if (entry.getValue() instanceof Boolean) { child.setProperty(entry.getKey(), (Boolean)entry.getValue()); } else if (entry.getValue() instanceof String[]) { child.setProperty(entry.getKey(), (String[]) entry.getValue()); } else { throw new UnsupportedOperationException("Not implemented"); } } } return getResource(parent, name); } catch (RepositoryException e) { throw new RuntimeException(e); } } public void revert() { throw new UnsupportedOperationException("Not implemented"); } public void commit() throws PersistenceException { try { this.session.save(); } catch (final RepositoryException re) { throw new PersistenceException("Unable to commit changes.", re); } } public boolean hasChanges() { throw new UnsupportedOperationException("Not implemented"); } public String getParentResourceType(Resource resource) { // TODO Auto-generated method stub return null; } public String getParentResourceType(String resourceType) { // TODO Auto-generated method stub return null; } public boolean isResourceType(Resource resource, String resourceType) { // TODO Auto-generated method stub return false; } public void refresh() { // TODO Auto-generated method stub } }
/* * JBoss, Home of Professional Open Source * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. */ package org.searchisko.api.reindexer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.enterprise.event.Event; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.searchisko.api.ContentObjectFields; import org.searchisko.api.service.ContributorProfileService; import org.searchisko.api.service.ContributorService; import org.searchisko.api.service.ProjectService; import org.searchisko.api.service.ProviderService; import org.searchisko.api.service.ProviderServiceTest; import org.searchisko.api.service.SearchClientService; import org.searchisko.api.tasker.Task; import org.searchisko.api.tasker.TaskConfigurationException; import org.searchisko.api.tasker.UnsupportedTaskException; import org.searchisko.persistence.service.ContentPersistenceService; /** * Unit test for {@link ReindexingTaskFactory}. * * @author Vlastimil Elias (velias at redhat dot com) */ public class ReindexingTaskFactoryTest { @Test public void listSupportedTaskTypes() { ReindexingTaskFactory tested = new ReindexingTaskFactory(); List<String> t = tested.listSupportedTaskTypes(); Assert.assertEquals(ReindexingTaskTypes.values().length, t.size()); Assert.assertTrue(t.contains(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType())); } @Test public void createTask() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); try { tested.createTask("nonsense", null); Assert.fail("UnsupportedTaskException expected"); } catch (UnsupportedTaskException e) { // OK } } @Test public void createTask_REINDEX_FROM_PERSISTENCE() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing content type in configuration try { tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing content type in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing content type in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, " "); tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("sys_content_type configuration property must be defined", e.getMessage()); } // case - nonexisting content type in configuration { Mockito.when(tested.providerService.findContentType("mytype")).thenReturn(null); Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, "mytype"); try { tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), config); } catch (TaskConfigurationException e) { Assert.assertEquals("Content type 'mytype' doesn't exists.", e.getMessage()); } } // case - nonpersistent content type in configuration { Map<String, Object> typeDef = new HashMap<String, Object>(); typeDef.put(ProviderService.PERSIST, false); Mockito.when(tested.providerService.findContentType("mytype")).thenReturn( ProviderServiceTest.createProviderContentTypeInfo(typeDef)); Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, "mytype"); try { tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), config); } catch (TaskConfigurationException e) { Assert.assertEquals("Content type 'mytype' is not persisted.", e.getMessage()); } } // case - everything is OK { Map<String, Object> typeDef = new HashMap<String, Object>(); typeDef.put(ProviderService.PERSIST, true); Mockito.when(tested.providerService.findContentType("mytype")).thenReturn( ProviderServiceTest.createProviderContentTypeInfo(typeDef)); Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, "mytype"); Task task = tested.createTask(ReindexingTaskTypes.REINDEX_FROM_PERSISTENCE.getTaskType(), config); Assert.assertEquals(ReindexFromPersistenceTask.class, task.getClass()); ReindexFromPersistenceTask ctask = (ReindexFromPersistenceTask) task; Assert.assertEquals("mytype", ctask.sysContentType); Assert.assertEquals(tested.contentPersistenceService, ctask.contentPersistenceService); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); Assert.assertEquals(tested.eventBeforeIndexed, ctask.eventBeforeIndexed); } } @Test public void createTask_RENORMALIZE_BY_CONTENT_TYPE() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing content type in configuration try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTENT_TYPE.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing content type in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTENT_TYPE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing content type in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, " "); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTENT_TYPE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("sys_content_type configuration property must be defined", e.getMessage()); } // case - nonexisting content type in configuration { Mockito.when(tested.providerService.findContentType("mytype")).thenReturn(null); Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, "mytype"); try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTENT_TYPE.getTaskType(), config); } catch (TaskConfigurationException e) { Assert.assertEquals("Content type 'mytype' doesn't exists.", e.getMessage()); } } // case - everything is OK { Map<String, Object> typeDef = new HashMap<String, Object>(); Mockito.when(tested.providerService.findContentType("mytype")).thenReturn( ProviderServiceTest.createProviderContentTypeInfo(typeDef)); Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_SYS_CONTENT_TYPE, "mytype"); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTENT_TYPE.getTaskType(), config); Assert.assertEquals(RenormalizeByContentTypeTask.class, task.getClass()); RenormalizeByContentTypeTask ctask = (RenormalizeByContentTypeTask) task; Assert.assertEquals("mytype", ctask.sysContentType); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } } @Test public void createTask_RENORMALIZE_BY_PROJECT_CODE() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing project code in configuration try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing project code in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing project code in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, " "); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, new String[] {}); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, new String[] { " " }); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, new ArrayList<String>()); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, Arrays.asList(new String[] { " ", "" })); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_code configuration property must be defined", e.getMessage()); } // case - everything is OK { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, "myproject"); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_PROJECT, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, new String[] { "myproject", "myproject2" }); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_PROJECT, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_CODE, Arrays.asList(new String[] { "myproject", "myproject2" })); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_PROJECT, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } } @Test public void createTask_RENORMALIZE_BY_CONTRIBUTOR_CODE() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing project code in configuration try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing project code in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing project code in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, " "); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, new String[] {}); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, new String[] { " " }); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, new ArrayList<String>()); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_code configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, Arrays.asList(new String[] { " ", "" })); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_code configuration property must be defined", e.getMessage()); } // case - everything is OK { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, "myproject"); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_CONTRIBUTORS, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, new String[] { "myproject", "myproject2" }); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_CONTRIBUTORS, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_CODE, Arrays.asList(new String[] { "myproject", "myproject2" })); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_CODE.getTaskType(), config); Assert.assertEquals(RenormalizeByEsValueTask.class, task.getClass()); RenormalizeByEsValueTask ctask = (RenormalizeByEsValueTask) task; Assert.assertEquals(ContentObjectFields.SYS_CONTRIBUTORS, ctask.esField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } } @Test public void createTask_RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing project code in configuration try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing both config values in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing CFG_CONTRIBUTOR_ID_TYPE in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_type configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, ""); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_type configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, " "); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_type configuration property must be defined", e.getMessage()); } // case - missing CFG_CONTRIBUTOR_ID_VALUE in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, " "); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, new String[] {}); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, new String[] { " " }); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, new ArrayList<String>()); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, Arrays.asList(new String[] { " ", "" })); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("contributor_id_value configuration property must be defined", e.getMessage()); } // case - everything is OK { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, "idvalue"); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ContributorService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ContributorService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("idvalue", ctask.esValues[0]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, new String[] { "myproject", "myproject2" }); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ContributorService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ContributorService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_CONTRIBUTOR_ID_VALUE, Arrays.asList(new String[] { "myproject", "myproject2" })); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_CONTRIBUTOR_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ContributorService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ContributorService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } } @Test public void createTask_RENORMALIZE_BY_PROJECT_LOOKUP_ID() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - missing project code in configuration try { tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), null); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing both config values in configuration try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } // case - missing CFG_PROJECT_ID_TYPE in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_type configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, ""); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_type configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, " "); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, "value"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_type configuration property must be defined", e.getMessage()); } // case - missing CFG_PROJECT_ID_VALUE in configuration try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, " "); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, new String[] {}); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, new String[] { " " }); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, new ArrayList<String>()); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } try { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, Arrays.asList(new String[] { " ", "" })); tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { Assert.assertEquals("project_id_value configuration property must be defined", e.getMessage()); } // case - everything is OK { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, "idvalue"); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ProjectService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ProjectService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("idvalue", ctask.esValues[0]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, new String[] { "myproject", "myproject2" }); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ProjectService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ProjectService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } { Map<String, Object> config = new HashMap<String, Object>(); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_TYPE, "idtype"); config.put(ReindexingTaskFactory.CFG_PROJECT_ID_VALUE, Arrays.asList(new String[] { "myproject", "myproject2" })); Task task = tested.createTask(ReindexingTaskTypes.RENORMALIZE_BY_PROJECT_LOOKUP_ID.getTaskType(), config); Assert.assertEquals(RenormalizeByEsLookedUpValuesTask.class, task.getClass()); RenormalizeByEsLookedUpValuesTask ctask = (RenormalizeByEsLookedUpValuesTask) task; Assert.assertEquals(ProjectService.SEARCH_INDEX_NAME, ctask.lookupIndex); Assert.assertEquals(ProjectService.SEARCH_INDEX_TYPE, ctask.lookupType); Assert.assertEquals("idtype", ctask.lookupField); Assert.assertEquals("myproject", ctask.esValues[0]); Assert.assertEquals("myproject2", ctask.esValues[1]); Assert.assertEquals(tested.providerService, ctask.providerService); Assert.assertEquals(tested.searchClientService, ctask.searchClientService); } } @Test public void createTask_UPDATE_CONTRIBUTOR_PROFILE() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); // case - invalid configuration throws exception { try { Map<String, Object> config = new HashMap<String, Object>(); tested.createTask(ReindexingTaskTypes.UPDATE_CONTRIBUTOR_PROFILE.getTaskType(), config); Assert.fail("TaskConfigurationException expected"); } catch (TaskConfigurationException e) { // OK } } // case - everything is OK { Map<String, Object> config = new HashMap<String, Object>(); config.put(UpdateContributorProfileTask.CFG_CONTRIBUTOR_TYPE_SPECIFIC_CODE_TYPE, "cct"); Mockito.when(tested.contributorProfileService.isContributorCodeTypesSupported("cct")).thenReturn(true); Task task = tested.createTask(ReindexingTaskTypes.UPDATE_CONTRIBUTOR_PROFILE.getTaskType(), config); Assert.assertEquals(UpdateContributorProfileTask.class, task.getClass()); UpdateContributorProfileTask ctask = (UpdateContributorProfileTask) task; Assert.assertEquals(tested.contributorProfileService, ctask.contributorProfileService); } } @Test public void createTask_REINDEX_CONTRIBUTOR() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); Task task = tested.createTask(ReindexingTaskTypes.REINDEX_CONTRIBUTOR.getTaskType(), null); Assert.assertEquals(ReindexSearchableEntityTask.class, task.getClass()); ReindexSearchableEntityTask ctask = (ReindexSearchableEntityTask) task; Assert.assertEquals(tested.contributorService, ctask.searchableEntityService); } @Test public void createTask_REINDEX_PROJECT() throws TaskConfigurationException, UnsupportedTaskException { ReindexingTaskFactory tested = getTested(); Task task = tested.createTask(ReindexingTaskTypes.REINDEX_PROJECT.getTaskType(), null); Assert.assertEquals(ReindexSearchableEntityTask.class, task.getClass()); ReindexSearchableEntityTask ctask = (ReindexSearchableEntityTask) task; Assert.assertEquals(tested.projectService, ctask.searchableEntityService); } @Test public void testGetConfigInteger() throws TaskConfigurationException { String PROP_NAME = "testproperty"; Map<String, Object> taskConfig = new HashMap<>(); // empty value and not mandatory Assert.assertNull(ReindexingTaskFactory.getConfigInteger(taskConfig, PROP_NAME, false)); // valid value - Integer taskConfig.put(PROP_NAME, new Integer(1)); Assert.assertEquals(new Integer(1), ReindexingTaskFactory.getConfigInteger(taskConfig, PROP_NAME, false)); // valid value - string taskConfig.put(PROP_NAME, "1"); Assert.assertEquals(new Integer(1), ReindexingTaskFactory.getConfigInteger(taskConfig, PROP_NAME, false)); } @Test(expected = TaskConfigurationException.class) public void testGetConfigIntegerMissingValue() throws TaskConfigurationException { ReindexingTaskFactory.getConfigInteger(new HashMap<String, Object>(), "property", true); } @Test(expected = TaskConfigurationException.class) public void testGetConfigIntegerBadValue() throws TaskConfigurationException { String PROP_NAME = "testproperty"; Map<String, Object> taskConfig = new HashMap<>(); taskConfig.put(PROP_NAME, "badvalue"); ReindexingTaskFactory.getConfigInteger(taskConfig, PROP_NAME, false); } @SuppressWarnings("unchecked") private ReindexingTaskFactory getTested() { ReindexingTaskFactory tested = new ReindexingTaskFactory(); tested.contentPersistenceService = Mockito.mock(ContentPersistenceService.class); tested.providerService = Mockito.mock(ProviderService.class); tested.searchClientService = Mockito.mock(SearchClientService.class); tested.eventBeforeIndexed = Mockito.mock(Event.class); tested.contributorProfileService = Mockito.mock(ContributorProfileService.class); tested.contributorService = Mockito.mock(ContributorService.class); tested.projectService = Mockito.mock(ProjectService.class); return tested; } }
/* * Copyright (C) 2016 Peter Gregus for GravityBox Project (C3C076@xda) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ceco.marshmallow.gravitybox; import com.ceco.marshmallow.gravitybox.managers.StatusBarIconManager; import com.ceco.marshmallow.gravitybox.managers.SysUiManagers; import com.ceco.marshmallow.gravitybox.managers.StatusBarIconManager.ColorInfo; import com.ceco.marshmallow.gravitybox.managers.StatusBarIconManager.IconManagerListener; import android.content.Context; import android.content.Intent; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.graphics.drawable.Icon; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import de.robv.android.xposed.XC_MethodHook; import de.robv.android.xposed.XSharedPreferences; import de.robv.android.xposed.XposedBridge; import de.robv.android.xposed.XposedHelpers; import de.robv.android.xposed.callbacks.XCallback; public class ModStatusbarColor { private static final String TAG = "GB:ModStatusbarColor"; public static final String PACKAGE_NAME = "com.android.systemui"; private static final String CLASS_PHONE_STATUSBAR = "com.android.systemui.statusbar.phone.PhoneStatusBar"; private static final String CLASS_STATUSBAR_ICON_VIEW = "com.android.systemui.statusbar.StatusBarIconView"; private static final String CLASS_STATUSBAR_ICON = "com.android.internal.statusbar.StatusBarIcon"; private static final String CLASS_SB_TRANSITIONS = "com.android.systemui.statusbar.phone.PhoneStatusBarTransitions"; private static final String CLASS_SB_ICON_CTRL = "com.android.systemui.statusbar.phone.StatusBarIconController"; private static final boolean DEBUG = false; public static final String ACTION_PHONE_STATUSBAR_VIEW_MADE = "gravitybox.intent.action.PHONE_STATUSBAR_VIEW_MADE"; private static Object mPhoneStatusBar; private static void log(String message) { XposedBridge.log(TAG + ": " + message); } // in process hooks public static void init(final XSharedPreferences prefs, final ClassLoader classLoader) { try { final Class<?> phoneStatusbarClass = XposedHelpers.findClass(CLASS_PHONE_STATUSBAR, classLoader); final Class<?> statusbarIconViewClass = XposedHelpers.findClass(CLASS_STATUSBAR_ICON_VIEW, classLoader); final Class<?> sbTransitionsClass = XposedHelpers.findClass(CLASS_SB_TRANSITIONS, classLoader); XposedHelpers.findAndHookMethod(phoneStatusbarClass, "makeStatusBarView", new XC_MethodHook(XCallback.PRIORITY_LOWEST) { @Override protected void afterHookedMethod(final MethodHookParam param) throws Throwable { mPhoneStatusBar = param.thisObject; Context context = (Context) XposedHelpers.getObjectField(param.thisObject, "mContext"); if (SysUiManagers.IconManager != null) { SysUiManagers.IconManager.registerListener(mIconManagerListener); } Intent i = new Intent(ACTION_PHONE_STATUSBAR_VIEW_MADE); context.sendBroadcast(i); } }); XposedHelpers.findAndHookMethod(statusbarIconViewClass, "getIcon", CLASS_STATUSBAR_ICON, new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param) throws Throwable { if (SysUiManagers.IconManager != null && SysUiManagers.IconManager.isColoringEnabled() && XposedHelpers.getObjectField(param.thisObject, "mNotification") == null) { final String iconPackage = (String) XposedHelpers.getObjectField(param.args[0], "pkg"); if (DEBUG) log("statusbarIconView.getIcon: iconPackage=" + iconPackage); Drawable d = getColoredDrawable(((View)param.thisObject).getContext(), iconPackage, (Icon) XposedHelpers.getObjectField(param.args[0], "icon")); if (d != null) { param.setResult(d); } } } }); XposedHelpers.findAndHookMethod(sbTransitionsClass, "applyMode", int.class, boolean.class, new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param) throws Throwable { if (SysUiManagers.IconManager != null) { final float signalClusterAlpha = (Float) XposedHelpers.callMethod( param.thisObject, "getNonBatteryClockAlphaFor", (Integer) param.args[0]); final float textAndBatteryAlpha = (Float) XposedHelpers.callMethod( param.thisObject, "getBatteryClockAlpha", (Integer) param.args[0]); SysUiManagers.IconManager.setIconAlpha(signalClusterAlpha, textAndBatteryAlpha); } } }); XposedHelpers.findAndHookMethod(CLASS_SB_ICON_CTRL, classLoader, "applyIconTint", new XC_MethodHook() { @Override protected void afterHookedMethod(MethodHookParam param) throws Throwable { if (SysUiManagers.IconManager != null) { SysUiManagers.IconManager.setIconTint( XposedHelpers.getIntField(param.thisObject, "mIconTint")); } } }); } catch (Throwable t) { XposedBridge.log(t); } } private static IconManagerListener mIconManagerListener = new IconManagerListener() { @Override public void onIconManagerStatusChanged(int flags, ColorInfo colorInfo) { if ((flags & (StatusBarIconManager.FLAG_ICON_COLOR_CHANGED | StatusBarIconManager.FLAG_ICON_STYLE_CHANGED)) != 0) { updateStatusIcons("mStatusIcons"); updateStatusIcons("mStatusIconsKeyguard"); updateSettingsButton(); } } }; private static Drawable getColoredDrawable(Context ctx, String pkg, Icon icon) { if (icon == null) return null; Drawable d = null; if (pkg == null || PACKAGE_NAME.equals(pkg)) { final int iconId = (int) XposedHelpers.callMethod(icon, "getResId"); d = SysUiManagers.IconManager.getBasicIcon(iconId); if (d != null) { return d; } } d = icon.loadDrawable(ctx); if (d != null) { if (SysUiManagers.IconManager.isColoringEnabled()) { d = SysUiManagers.IconManager.applyColorFilter(d.mutate(), PorterDuff.Mode.SRC_IN); } else { d.clearColorFilter(); } } return d; } private static void updateStatusIcons(String statusIcons) { if (mPhoneStatusBar == null) return; try { Object icCtrl = XposedHelpers.getObjectField(mPhoneStatusBar, "mIconController"); ViewGroup vg = (ViewGroup) XposedHelpers.getObjectField(icCtrl, statusIcons); final int childCount = vg.getChildCount(); for (int i = 0; i < childCount; i++) { if (!vg.getChildAt(i).getClass().getName().equals(CLASS_STATUSBAR_ICON_VIEW)) { continue; } ImageView v = (ImageView) vg.getChildAt(i); final Object sbIcon = XposedHelpers.getObjectField(v, "mIcon"); if (sbIcon != null) { final String iconPackage = (String) XposedHelpers.getObjectField(sbIcon, "pkg"); Drawable d = getColoredDrawable(v.getContext(), iconPackage, (Icon) XposedHelpers.getObjectField(sbIcon, "icon")); if (d != null) { v.setImageDrawable(d); } } } } catch (Throwable t) { XposedBridge.log(t); } } private static void updateSettingsButton() { if (mPhoneStatusBar == null || SysUiManagers.IconManager == null) return; try { Object header = XposedHelpers.getObjectField(mPhoneStatusBar, "mHeader"); ImageView settingsButton = (ImageView) XposedHelpers.getObjectField( header, Utils.isSamsungRom() ? "mSettingButton" : "mSettingsButton"); if (SysUiManagers.IconManager.isColoringEnabled()) { settingsButton.setColorFilter(SysUiManagers.IconManager.getIconColor(), PorterDuff.Mode.SRC_IN); } else { settingsButton.clearColorFilter(); } } catch (Throwable t) { XposedBridge.log(t); } } }