gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (C) 2015 Free Construction Sp. z.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.sites.liberation.util; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.auth.oauth2.TokenResponse; import com.google.api.client.auth.oauth2.TokenResponseException; import com.google.api.client.googleapis.auth.oauth2.*; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.jackson2.JacksonFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.io.*; import java.net.*; import java.util.Arrays; import java.util.List; import java.util.Properties; /** * @author bestplay9@me.com */ public class Auth { private static final Logger LOGGER = LogManager.getLogger(Auth.class); private String loggerError = null; private List<String> SCOPES = Arrays.asList("https://sites.google.com/feeds"); private static String CLIENT_ID = null; private static String CLIENT_SECRET = null; private static String REDIRECT_URI = null; private String REFRESH_TOKEN = null; public Credential credential = null; private URL TOKEN_PATH = getClass().getClassLoader().getResource("token.properties"); private URL CONFIG_PATH = getClass().getClassLoader().getResource("config.properties"); /** * Method loads all important variables from config file and token file. * It also generates missing files by token given by user - refresh session */ public void loadConfig(String auth_token) throws Exception { File f1 = null; try { f1 = new File(CONFIG_PATH.toURI()); } catch (Exception e) { loggerError = "Configuration file does not exist! (config.properties)"; LOGGER.warn(loggerError); System.out.println(loggerError); System.exit(1); } if(f1.exists() && !f1.isDirectory()) { Properties properties = new Properties(); try { InputStream res = new FileInputStream(f1); properties.load(res); } catch (Exception e) { throw new IllegalStateException(e); } REDIRECT_URI = properties.getProperty("redirect_url"); CLIENT_ID = properties.getProperty("client_id"); CLIENT_SECRET = properties.getProperty("client_secret"); } File f = null; try { f = new File(TOKEN_PATH.toURI()); } catch (Exception e) { if(auth_token != null) { refreshSession(auth_token); } else { LOGGER.warn("No token file found. Authorize Google App first (url: http://tiny.cc/fcauth), then rerun app with option: -t <TOKEN>"); System.out.println("http://tiny.cc/fcauth\n"); System.out.println("alternatively: " + generateAuthUrl() + "\n"); System.out.println("Open browser and paste url given above, let google application access your Google Sites and copy generated token."); System.out.println("Then reopen app with command: APP -t <PASTED_TOKEN>\n"); System.exit(1); } } if(f.exists() && !f.isDirectory()) { Properties token_properties = new Properties(); try { InputStream res = new FileInputStream(f); token_properties.load(res); } catch (Exception e) { throw new IllegalStateException(e); } REFRESH_TOKEN = token_properties.getProperty("refresh_token"); credential = refreshAccessToken(); } } /** * Method gets directory path of working jar-with-depencencies. */ @Deprecated private String getPathOfJar() { File f = new File(System.getProperty("java.class.path")); File dir = f.getAbsoluteFile().getParentFile(); return dir.toString(); } /** * Method refreshes session of user that have alredy given permissions to google App. */ public void refreshSession(String accessToken) throws IOException { credential = getCredentials(accessToken); REFRESH_TOKEN = credential.getRefreshToken(); saveRefreshToken(); } public String getClassPath() throws UnsupportedEncodingException { ClassLoader cl = ClassLoader.getSystemClassLoader(); URL[] urls = ((URLClassLoader)cl).getURLs(); return URLDecoder.decode(urls[1].getFile().substring(1), "UTF-8"); } /** * Method saves new refresh token to external file - token config file. */ public void saveRefreshToken() { try { Properties props = new Properties(); props.setProperty("refresh_token", REFRESH_TOKEN); File f = new File(getClassPath() + "token.properties"); OutputStream out = new FileOutputStream(f); props.store(out, "File contains sensitive token which automatically log in user."); LOGGER.info("Token file has been successfully saved! Rerun app without -t parameter."); System.out.println("Token file has been successfully saved! Now try to use App normally! (without -t parameter)"); System.exit(1); } catch (Exception e ) { throw new IllegalStateException(e); } } /** * Method opens browser on google App API authentication website. * Allows user to give privileges to Google App. * ONLY FOR GUI! */ public void openBrowserAndGetToken() { try { java.awt.Desktop.getDesktop().browse(new URI(generateAuthUrl())); } catch (URISyntaxException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } /** * Method generates google api authorization url */ public String generateAuthUrl() { LOGGER.debug("Google API App Authorization URL was generated!"); return new GoogleAuthorizationCodeRequestUrl(CLIENT_ID, REDIRECT_URI, SCOPES).setAccessType("offline").setApprovalPrompt("force").build(); } /** * Get new credentials for access Token given by user */ public Credential getCredentials(String accessToken) throws IOException { HttpTransport transport = new NetHttpTransport(); JacksonFactory jsonFactory = new JacksonFactory(); GoogleTokenResponse response = new GoogleAuthorizationCodeTokenRequest(transport, jsonFactory, CLIENT_ID, CLIENT_SECRET, accessToken, REDIRECT_URI).execute(); LOGGER.debug("New authorization data has been downloaded - accessToken: " + response.getAccessToken() + ", refreshToken: " + response.getRefreshToken()); return new GoogleCredential.Builder() .setClientSecrets(CLIENT_ID, CLIENT_SECRET) .setJsonFactory(jsonFactory).setTransport(transport).build() .setAccessToken(response.getAccessToken()) .setRefreshToken(response.getRefreshToken()); } /** * Method refreshes access token by given REFRESH_TOKEN */ public Credential refreshAccessToken() throws IOException { HttpTransport transport = new NetHttpTransport(); JacksonFactory jsonFactory = new JacksonFactory(); TokenResponse response = new TokenResponse(); try { response = new GoogleRefreshTokenRequest(transport, jsonFactory, REFRESH_TOKEN, CLIENT_ID, CLIENT_SECRET).execute(); LOGGER.debug("New accessToken (" + response.getAccessToken() + ") has been downloaded (by refreshToken in token file) with Google API Authorization!"); } catch (TokenResponseException e) { LOGGER.warn("Downloaded accessToken is empty! Please check provided token (propriety of -t parameter)"); if (e.getDetails() != null) { System.err.println("Error: " + e.getDetails().getError()); if (e.getDetails().getErrorDescription() != null) { System.err.println(e.getDetails().getErrorDescription()); } if (e.getDetails().getErrorUri() != null) { System.err.println(e.getDetails().getErrorUri()); } } else { System.err.println(e.getMessage()); } } return new GoogleCredential.Builder() .setClientSecrets(CLIENT_ID, CLIENT_SECRET) .setJsonFactory(jsonFactory).setTransport(transport).build() .setAccessToken(response.getAccessToken()); } }
/** */ package br.usp.icmc.amenu.aMenu.impl; import br.usp.icmc.amenu.aMenu.AMenuPackage; import br.usp.icmc.amenu.aMenu.Config; import br.usp.icmc.amenu.aMenu.Configurations; import java.util.Collection; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Configurations</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link br.usp.icmc.amenu.aMenu.impl.ConfigurationsImpl#getConfig_top <em>Config top</em>}</li> * <li>{@link br.usp.icmc.amenu.aMenu.impl.ConfigurationsImpl#getConfig_children <em>Config children</em>}</li> * </ul> * </p> * * @generated */ public class ConfigurationsImpl extends MinimalEObjectImpl.Container implements Configurations { /** * The cached value of the '{@link #getConfig_top() <em>Config top</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getConfig_top() * @generated * @ordered */ protected EList<Config> config_top; /** * The cached value of the '{@link #getConfig_children() <em>Config children</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getConfig_children() * @generated * @ordered */ protected EList<Config> config_children; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ConfigurationsImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return AMenuPackage.Literals.CONFIGURATIONS; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Config> getConfig_top() { if (config_top == null) { config_top = new EObjectContainmentEList<Config>(Config.class, this, AMenuPackage.CONFIGURATIONS__CONFIG_TOP); } return config_top; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Config> getConfig_children() { if (config_children == null) { config_children = new EObjectContainmentEList<Config>(Config.class, this, AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN); } return config_children; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case AMenuPackage.CONFIGURATIONS__CONFIG_TOP: return ((InternalEList<?>)getConfig_top()).basicRemove(otherEnd, msgs); case AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN: return ((InternalEList<?>)getConfig_children()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case AMenuPackage.CONFIGURATIONS__CONFIG_TOP: return getConfig_top(); case AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN: return getConfig_children(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case AMenuPackage.CONFIGURATIONS__CONFIG_TOP: getConfig_top().clear(); getConfig_top().addAll((Collection<? extends Config>)newValue); return; case AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN: getConfig_children().clear(); getConfig_children().addAll((Collection<? extends Config>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case AMenuPackage.CONFIGURATIONS__CONFIG_TOP: getConfig_top().clear(); return; case AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN: getConfig_children().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case AMenuPackage.CONFIGURATIONS__CONFIG_TOP: return config_top != null && !config_top.isEmpty(); case AMenuPackage.CONFIGURATIONS__CONFIG_CHILDREN: return config_children != null && !config_children.isEmpty(); } return super.eIsSet(featureID); } } //ConfigurationsImpl
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.common.block; import io.airlift.slice.SliceOutput; import javax.annotation.Nullable; import java.util.OptionalInt; import static com.facebook.presto.common.block.ArrayBlock.createArrayBlockInternal; import static com.facebook.presto.common.block.BlockUtil.appendNullToIsNullArray; import static com.facebook.presto.common.block.BlockUtil.appendNullToOffsetsArray; import static com.facebook.presto.common.block.BlockUtil.checkArrayRange; import static com.facebook.presto.common.block.BlockUtil.checkValidPositions; import static com.facebook.presto.common.block.BlockUtil.checkValidRegion; import static com.facebook.presto.common.block.BlockUtil.compactArray; import static com.facebook.presto.common.block.BlockUtil.compactOffsets; import static com.facebook.presto.common.block.BlockUtil.countAndMarkSelectedPositionsFromOffsets; import static com.facebook.presto.common.block.BlockUtil.countSelectedPositionsFromOffsets; import static com.facebook.presto.common.block.BlockUtil.internalPositionInRange; import static com.facebook.presto.common.block.MapBlockBuilder.verify; public abstract class AbstractArrayBlock implements Block { protected abstract Block getRawElementBlock(); protected abstract int[] getOffsets(); public abstract int getOffsetBase(); /** * @return the underlying valueIsNull array, or null when all values are guaranteed to be non-null */ @Nullable protected abstract boolean[] getValueIsNull(); final int getOffset(int position) { return getOffsets()[position + getOffsetBase()]; } @Override public String getEncodingName() { return ArrayBlockEncoding.NAME; } @Override public final Block copyPositions(int[] positions, int offset, int length) { checkArrayRange(positions, offset, length); int[] newOffsets = new int[length + 1]; boolean[] newValueIsNull = mayHaveNull() ? new boolean[length] : null; // First traversal will populate newValueIsNull mask (if present) and // newOffsets to determine the total number of value positions involved for (int i = 0; i < length; i++) { int position = positions[i + offset]; newOffsets[i + 1] = newOffsets[i] + (getOffset(position + 1) - getOffset(position)); if (isNull(position)) { // newValueIsNull will be present unless mayHaveNull() and isNull() implementations are inconsistent newValueIsNull[i] = true; } } int totalElements = newOffsets[length]; if (totalElements == 0) { // No elements selected, copy a zero-length region from the elements block Block newValues = getRawElementBlock().copyRegion(0, 0); return createArrayBlockInternal(0, length, newValueIsNull, newOffsets, newValues); } // Second traversal will use the newOffsets and total element positions to // populate a correctly sized values position selection array int[] elementPositions = new int[totalElements]; int currentOffset = 0; for (int i = 0; i < length; i++) { int elementsLength = newOffsets[i + 1] - newOffsets[i]; if (elementsLength > 0) { int valuesStartOffset = getOffset(positions[i + offset]); for (int j = 0; j < elementsLength; j++) { elementPositions[currentOffset++] = valuesStartOffset + j; } } } verify(currentOffset == elementPositions.length, "unexpected element positions"); Block newValues = getRawElementBlock().copyPositions(elementPositions, 0, elementPositions.length); return createArrayBlockInternal(0, length, newValueIsNull, newOffsets, newValues); } @Override public Block getRegion(int position, int length) { int positionCount = getPositionCount(); checkValidRegion(positionCount, position, length); return createArrayBlockInternal( position + getOffsetBase(), length, getValueIsNull(), getOffsets(), getRawElementBlock()); } @Override public OptionalInt fixedSizeInBytesPerPosition() { return OptionalInt.empty(); // size per position is variable based on the number of entries in each array } @Override public long getRegionSizeInBytes(int position, int length) { int positionCount = getPositionCount(); checkValidRegion(positionCount, position, length); int valueStart = getOffsets()[getOffsetBase() + position]; int valueEnd = getOffsets()[getOffsetBase() + position + length]; return getRawElementBlock().getRegionSizeInBytes(valueStart, valueEnd - valueStart) + ((Integer.BYTES + Byte.BYTES) * (long) length); } @Override public long getRegionLogicalSizeInBytes(int position, int length) { int positionCount = getPositionCount(); checkValidRegion(positionCount, position, length); int valueStart = getOffsets()[getOffsetBase() + position]; int valueEnd = getOffsets()[getOffsetBase() + position + length]; return getRawElementBlock().getRegionLogicalSizeInBytes(valueStart, valueEnd - valueStart) + ((Integer.BYTES + Byte.BYTES) * (long) length); } @Override public long getApproximateRegionLogicalSizeInBytes(int position, int length) { int positionCount = getPositionCount(); checkValidRegion(positionCount, position, length); int valueStart = getOffset(position); int valueEnd = getOffset(position + length); return getRawElementBlock().getApproximateRegionLogicalSizeInBytes(valueStart, valueEnd - valueStart) + ((Integer.BYTES + Byte.BYTES) * (long) length); } @Override public final long getPositionsSizeInBytes(boolean[] positions, int selectedArrayPositions) { int positionCount = getPositionCount(); checkValidPositions(positions, positionCount); if (selectedArrayPositions == 0) { return 0; } if (selectedArrayPositions == positionCount) { return getSizeInBytes(); } Block rawElementBlock = getRawElementBlock(); OptionalInt fixedPerElementSizeInBytes = rawElementBlock.fixedSizeInBytesPerPosition(); int[] offsets = getOffsets(); int offsetBase = getOffsetBase(); long elementsSizeInBytes; if (fixedPerElementSizeInBytes.isPresent()) { elementsSizeInBytes = fixedPerElementSizeInBytes.getAsInt() * (long) countSelectedPositionsFromOffsets(positions, offsets, offsetBase); } else if (rawElementBlock instanceof RunLengthEncodedBlock) { // RLE blocks don't have fixed size per position, but accept null for the positions array elementsSizeInBytes = rawElementBlock.getPositionsSizeInBytes(null, countSelectedPositionsFromOffsets(positions, offsets, offsetBase)); } else { boolean[] selectedElements = new boolean[rawElementBlock.getPositionCount()]; int selectedElementCount = countAndMarkSelectedPositionsFromOffsets(positions, offsets, offsetBase, selectedElements); elementsSizeInBytes = rawElementBlock.getPositionsSizeInBytes(selectedElements, selectedElementCount); } return elementsSizeInBytes + ((Integer.BYTES + Byte.BYTES) * (long) selectedArrayPositions); } @Override public Block copyRegion(int position, int length) { int positionCount = getPositionCount(); checkValidRegion(positionCount, position, length); int startValueOffset = getOffset(position); int endValueOffset = getOffset(position + length); Block newValues = getRawElementBlock().copyRegion(startValueOffset, endValueOffset - startValueOffset); int[] newOffsets = compactOffsets(getOffsets(), position + getOffsetBase(), length); boolean[] valueIsNull = getValueIsNull(); boolean[] newValueIsNull = valueIsNull == null ? null : compactArray(valueIsNull, position + getOffsetBase(), length); if (newValues == getRawElementBlock() && newOffsets == getOffsets() && newValueIsNull == valueIsNull) { return this; } return createArrayBlockInternal(0, length, newValueIsNull, newOffsets, newValues); } @Override public Block getBlock(int position) { checkReadablePosition(position); int startValueOffset = getOffset(position); int endValueOffset = getOffset(position + 1); return getRawElementBlock().getRegion(startValueOffset, endValueOffset - startValueOffset); } @Override public void writePositionTo(int position, BlockBuilder blockBuilder) { checkReadablePosition(position); blockBuilder.appendStructureInternal(this, position); } @Override public void writePositionTo(int position, SliceOutput output) { if (isNull(position)) { output.writeByte(0); } else { int startValueOffset = getOffset(position); int endValueOffset = getOffset(position + 1); int numberOfElements = endValueOffset - startValueOffset; output.writeByte(1); output.writeInt(numberOfElements); Block rawElementBlock = getRawElementBlock(); for (int i = startValueOffset; i < endValueOffset; i++) { rawElementBlock.writePositionTo(i, output); } } } protected Block getSingleValueBlockInternal(int position) { checkReadablePosition(position); int startValueOffset = getOffset(position); int valueLength = getOffset(position + 1) - startValueOffset; Block newValues = getRawElementBlock().copyRegion(startValueOffset, valueLength); return createArrayBlockInternal( 0, 1, isNull(position) ? new boolean[] {true} : null, new int[] {0, valueLength}, newValues); } @Override public long getEstimatedDataSizeForStats(int position) { checkReadablePosition(position); if (isNull(position)) { return 0; } int startValueOffset = getOffset(position); int endValueOffset = getOffset(position + 1); Block rawElementBlock = getRawElementBlock(); long size = 0; for (int i = startValueOffset; i < endValueOffset; i++) { size += rawElementBlock.getEstimatedDataSizeForStats(i); } return size; } @Override public boolean mayHaveNull() { return getValueIsNull() != null; } public <T> T apply(ArrayBlockFunction<T> function, int position) { checkReadablePosition(position); int startValueOffset = getOffset(position); int endValueOffset = getOffset(position + 1); return function.apply(getRawElementBlock(), startValueOffset, endValueOffset - startValueOffset); } protected final void checkReadablePosition(int position) { if (position < 0 || position >= getPositionCount()) { throw new IllegalArgumentException("position is not valid"); } } public interface ArrayBlockFunction<T> { T apply(Block block, int startPosition, int length); } @Override public Block getBlockUnchecked(int internalPosition) { int startValueOffset = getOffsets()[internalPosition]; int endValueOffset = getOffsets()[internalPosition + 1]; return getRawElementBlock().getRegion(startValueOffset, endValueOffset - startValueOffset); } @Override public boolean isNullUnchecked(int internalPosition) { assert mayHaveNull() : "no nulls present"; assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount()); return getValueIsNull()[internalPosition]; } @Override public Block appendNull() { boolean[] valueIsNull = appendNullToIsNullArray(getValueIsNull(), getOffsetBase(), getPositionCount()); int[] offsets = appendNullToOffsetsArray(getOffsets(), getOffsetBase(), getPositionCount()); return createArrayBlockInternal( getOffsetBase(), getPositionCount() + 1, valueIsNull, offsets, getRawElementBlock()); } }
/* * Copyright (c) 2010-2018 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.repo.common.expression; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.prism.Visitor; import com.evolveum.midpoint.prism.crypto.EncryptionException; import com.evolveum.midpoint.prism.crypto.Protector; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.PlusMinusZero; import com.evolveum.midpoint.prism.delta.PrismValueDeltaSetTriple; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.path.ItemPathSegment; import com.evolveum.midpoint.prism.path.NameItemPathSegment; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.prism.query.*; import com.evolveum.midpoint.prism.util.ItemDeltaItem; import com.evolveum.midpoint.prism.util.JavaTypeConverter; import com.evolveum.midpoint.prism.util.ObjectDeltaObject; import com.evolveum.midpoint.prism.util.PrismUtil; import com.evolveum.midpoint.prism.xml.XmlTypeConverter; import com.evolveum.midpoint.prism.xml.XsdTypeMapper; import com.evolveum.midpoint.repo.common.ObjectResolver; import com.evolveum.midpoint.schema.constants.ExpressionConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.security.api.MidPointPrincipal; import com.evolveum.midpoint.security.api.SecurityContextManager; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.DOMUtil; import com.evolveum.midpoint.util.Holder; import com.evolveum.midpoint.util.PrettyPrinter; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ConfigurationException; import com.evolveum.midpoint.util.exception.ExpressionEvaluationException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ExpressionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.MappingType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.QueryInterpretationOfNoValueType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import com.evolveum.midpoint.xml.ns._public.common.common_3.VariableBindingDefinitionType; import com.evolveum.prism.xml.ns._public.types_3.ItemPathType; import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType; import org.jetbrains.annotations.Nullable; /** * @author semancik * */ public class ExpressionUtil { private static final Trace LOGGER = TraceManager.getTrace(ExpressionUtil.class); public static <V extends PrismValue> PrismValueDeltaSetTriple<V> toOutputTriple( PrismValueDeltaSetTriple<V> resultTriple, ItemDefinition outputDefinition, Function<Object, Object> additionalConvertor, final ItemPath residualPath, final Protector protector, final PrismContext prismContext) { PrismValueDeltaSetTriple<V> clonedTriple = resultTriple.clone(); final Class<?> resultTripleValueClass = resultTriple.getRealValueClass(); if (resultTripleValueClass == null) { // triple is empty. type does not matter. return clonedTriple; } Class<?> expectedJavaType = XsdTypeMapper.toJavaType(outputDefinition.getTypeName()); if (expectedJavaType == null) { expectedJavaType = prismContext.getSchemaRegistry() .getCompileTimeClass(outputDefinition.getTypeName()); } if (resultTripleValueClass == expectedJavaType) { return clonedTriple; } final Class<?> finalExpectedJavaType = expectedJavaType; clonedTriple.accept((Visitor) visitable -> { if (visitable instanceof PrismPropertyValue<?>) { PrismPropertyValue<Object> pval = (PrismPropertyValue<Object>) visitable; Object realVal = pval.getValue(); if (realVal != null) { if (Structured.class.isAssignableFrom(resultTripleValueClass)) { if (residualPath != null && !residualPath.isEmpty()) { realVal = ((Structured) realVal).resolve(residualPath); } } if (finalExpectedJavaType != null) { Object convertedVal = convertValue(finalExpectedJavaType, additionalConvertor, realVal, protector, prismContext); pval.setValue(convertedVal); } } } }); return clonedTriple; } /** * Slightly more powerful version of "convert" as compared to * JavaTypeConverter. This version can also encrypt/decrypt and also handles * polystrings. */ public static <I, O> O convertValue(Class<O> finalExpectedJavaType, Function<Object, Object> additionalConvertor, I inputVal, Protector protector, PrismContext prismContext) { if (inputVal == null) { return null; } if (finalExpectedJavaType.isInstance(inputVal)) { return (O) inputVal; } Object intermediateVal; if (finalExpectedJavaType == ProtectedStringType.class) { String valueToEncrypt; if (inputVal instanceof String) { valueToEncrypt = (String) inputVal; } else { valueToEncrypt = JavaTypeConverter.convert(String.class, inputVal); } try { intermediateVal = protector.encryptString(valueToEncrypt); } catch (EncryptionException e) { throw new SystemException(e.getMessage(), e); } } else if (inputVal instanceof ProtectedStringType) { try { intermediateVal = protector.decryptString((ProtectedStringType) inputVal); } catch (EncryptionException e) { throw new SystemException(e.getMessage(), e); } } else { intermediateVal = inputVal; } if (additionalConvertor != null) { intermediateVal = additionalConvertor.apply(intermediateVal); } O convertedVal = JavaTypeConverter.convert(finalExpectedJavaType, intermediateVal); PrismUtil.recomputeRealValue(convertedVal, prismContext); return convertedVal; } /** * normalizeValuesToDelete: Whether to normalize container values that are to be deleted, i.e. convert them * from id-only to full data (MID-4863). * TODO: * 1. consider setting this parameter to true at some other places where it might be relevant * 2. consider normalizing delete deltas earlier in the clockwork, probably at the very beginning of the operation */ public static Object resolvePath(ItemPath path, ExpressionVariables variables, boolean normalizeValuesToDelete, Object defaultContext, ObjectResolver objectResolver, String shortDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { Object root = defaultContext; ItemPath relativePath = path; ItemPathSegment first = path.first(); String varDesc = "default context"; if (first.isVariable()) { QName varName = ((NameItemPathSegment) first).getName(); varDesc = "variable " + PrettyPrinter.prettyPrint(varName); relativePath = path.rest(); if (variables.containsKey(varName)) { root = variables.get(varName); } else { throw new SchemaException("No variable with name " + varName + " in " + shortDesc); } } if (root == null) { return null; } if (relativePath.isEmpty()) { return root; } if (normalizeValuesToDelete) { root = normalizeValuesToDelete(root); } if (root instanceof ObjectReferenceType) { root = resolveReference((ObjectReferenceType) root, objectResolver, varDesc, shortDesc, task, result); } if (root instanceof Objectable) { return (((Objectable) root).asPrismObject()).find(relativePath); } if (root instanceof PrismObject<?>) { return ((PrismObject<?>) root).find(relativePath); } else if (root instanceof PrismContainer<?>) { return ((PrismContainer<?>) root).find(relativePath); } else if (root instanceof PrismContainerValue<?>) { return ((PrismContainerValue<?>) root).find(relativePath); } else if (root instanceof Item<?, ?>) { // Except for container (which is handled above) throw new SchemaException( "Cannot apply path " + relativePath + " to " + root + " in " + shortDesc); } else if (root instanceof ObjectDeltaObject<?>) { return ((ObjectDeltaObject<?>) root).findIdi(relativePath); } else if (root instanceof ItemDeltaItem<?, ?>) { return ((ItemDeltaItem<?, ?>) root).findIdi(relativePath); } else { throw new IllegalArgumentException( "Unexpected root " + root + " (relative path:" + relativePath + ") in " + shortDesc); } } private static Object normalizeValuesToDelete(Object root) { if (root instanceof ObjectDeltaObject<?>) { return ((ObjectDeltaObject<?>) root).normalizeValuesToDelete(true); } else if (root instanceof ItemDeltaItem<?, ?>) { // TODO normalize as well return root; } else { return root; } } public static <V extends PrismValue, F extends FocusType> Collection<V> computeTargetValues(VariableBindingDefinitionType target, Object defaultTargetContext, ExpressionVariables variables, ObjectResolver objectResolver, String contextDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (target == null) { // Is this correct? What about default targets? return null; } ItemPathType itemPathType = target.getPath(); if (itemPathType == null) { // Is this correct? What about default targets? return null; } ItemPath path = itemPathType.getItemPath(); Object object = resolvePath(path, variables, false, defaultTargetContext, objectResolver, contextDesc, task, result); if (object == null) { return new ArrayList<>(); } else if (object instanceof Item) { return ((Item) object).getValues(); } else if (object instanceof PrismValue) { return (List<V>) Collections.singletonList((PrismValue) object); } else if (object instanceof ItemDeltaItem) { ItemDeltaItem<V, ?> idi = (ItemDeltaItem<V, ?>) object; PrismValueDeltaSetTriple<V> triple = idi.toDeltaSetTriple(); return triple != null ? triple.getNonNegativeValues() : new ArrayList<>(); } else { throw new IllegalStateException("Unsupported target value(s): " + object.getClass() + " (" + object + ")"); } } // TODO what about collections of values? public static Object convertVariableValue(Object originalValue, String variableName, ObjectResolver objectResolver, String contextDescription, PrismContext prismContext, Task task, OperationResult result) throws ExpressionSyntaxException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (originalValue instanceof PrismValue) { ((PrismValue) originalValue).setPrismContext(prismContext); // TODO - or revive? Or make sure prismContext is set here? } else if (originalValue instanceof Item) { ((Item) originalValue).setPrismContext(prismContext); // TODO - or revive? Or make sure prismContext is set here? } if (originalValue instanceof ObjectReferenceType) { try { originalValue = resolveReference((ObjectReferenceType)originalValue, objectResolver, variableName, contextDescription, task, result); } catch (SchemaException e) { throw new ExpressionSyntaxException("Schema error during variable "+variableName+" resolution in "+contextDescription+": "+e.getMessage(), e); } } if (originalValue instanceof PrismObject<?>) { return ((PrismObject<?>)originalValue).asObjectable(); } if (originalValue instanceof PrismContainerValue<?>) { return ((PrismContainerValue<?>)originalValue).asContainerable(); } if (originalValue instanceof PrismPropertyValue<?>) { return ((PrismPropertyValue<?>)originalValue).getValue(); } if (originalValue instanceof PrismReferenceValue) { if (((PrismReferenceValue) originalValue).getDefinition() != null) { return ((PrismReferenceValue) originalValue).asReferencable(); } } if (originalValue instanceof PrismProperty<?>) { PrismProperty<?> prop = (PrismProperty<?>)originalValue; PrismPropertyDefinition<?> def = prop.getDefinition(); if (def != null) { if (def.isSingleValue()) { return prop.getRealValue(); } else { return prop.getRealValues(); } } else { return prop.getValues(); } } if (originalValue instanceof PrismReference) { PrismReference prop = (PrismReference)originalValue; PrismReferenceDefinition def = prop.getDefinition(); if (def != null) { if (def.isSingleValue()) { return prop.getRealValue(); } else { return prop.getRealValues(); } } else { return prop.getValues(); } } if (originalValue instanceof PrismContainer<?>) { PrismContainer<?> container = (PrismContainer<?>)originalValue; PrismContainerDefinition<?> def = container.getDefinition(); if (def != null) { if (def.isSingleValue()) { return container.getRealValue(); } else { return container.getRealValues(); } } else { return container.getValues(); } } return originalValue; } private static PrismObject<?> resolveReference(ObjectReferenceType ref, ObjectResolver objectResolver, String varDesc, String contextDescription, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, ExpressionEvaluationException { if (ref.getOid() == null) { throw new SchemaException( "Null OID in reference in variable " + varDesc + " in " + contextDescription); } else { try { ObjectType objectType = objectResolver.resolve(ref, ObjectType.class, null, contextDescription, task, result); if (objectType == null) { throw new IllegalArgumentException( "Resolve returned null for " + ref + " in " + contextDescription); } return objectType.asPrismObject(); } catch (ObjectNotFoundException e) { throw new ObjectNotFoundException("Object not found during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e, ref.getOid()); } catch (SchemaException e) { throw new SchemaException("Schema error during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e); } catch (CommunicationException e) { throw new CommunicationException("Communication error during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e); } catch (ConfigurationException e) { throw new ConfigurationException("Configuration error during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e); } catch (SecurityViolationException e) { throw new SecurityViolationException("Security violation during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e); } catch (ExpressionEvaluationException e) { throw new ExpressionEvaluationException("Expression evaluation error during variable " + varDesc + " resolution in " + contextDescription + ": " + e.getMessage(), e); } } } public static <ID extends ItemDefinition> ID resolveDefinitionPath(ItemPath path, ExpressionVariables variables, PrismContainerDefinition<?> defaultContext, String shortDesc) throws SchemaException { while (path != null && !path.isEmpty() && !(path.first() instanceof NameItemPathSegment)) { path = path.rest(); } Object root = defaultContext; ItemPath relativePath = path; NameItemPathSegment first = (NameItemPathSegment) path.first(); if (first.isVariable()) { relativePath = path.rest(); QName varName = first.getName(); if (variables.containsKey(varName)) { Object varValue = variables.get(varName); if (varValue instanceof ItemDeltaItem<?, ?>) { root = ((ItemDeltaItem<?, ?>) varValue).getDefinition(); } else if (varValue instanceof Item<?, ?>) { root = ((Item<?, ?>) varValue).getDefinition(); } else if (varValue instanceof Objectable) { root = ((Objectable) varValue).asPrismObject().getDefinition(); } else if (varValue instanceof ItemDefinition) { root = varValue; } else { throw new IllegalStateException("Unexpected content of variable " + varName + ": " + varValue + " (" + varValue.getClass() + ")"); } if (root == null) { throw new IllegalStateException( "Null definition in content of variable " + varName + ": " + varValue); } } else { throw new SchemaException("No variable with name " + varName + " in " + shortDesc); } } if (root == null) { return null; } if (relativePath.isEmpty()) { return (ID) root; } ItemDefinition result = null; if (root instanceof PrismObjectDefinition<?>) { return ((PrismObjectDefinition<?>) root).findItemDefinition(relativePath); } else if (root instanceof PrismContainerDefinition<?>) { return ((PrismContainerDefinition<?>) root).findItemDefinition(relativePath); } else if (root instanceof ItemDefinition) { // Except for container (which is handled above) throw new SchemaException( "Cannot apply path " + relativePath + " to " + root + " in " + shortDesc); } else { throw new IllegalArgumentException("Unexpected root " + root + " in " + shortDesc); } } public static <IV extends PrismValue, ID extends ItemDefinition> ItemDeltaItem<IV, ID> toItemDeltaItem( Object object, ObjectResolver objectResolver, String string, OperationResult result) { if (object == null) { return null; } if (object instanceof ItemDeltaItem<?, ?>) { return (ItemDeltaItem<IV, ID>) object; } if (object instanceof PrismObject<?>) { return (ItemDeltaItem<IV, ID>) new ObjectDeltaObject((PrismObject<?>) object, null, (PrismObject<?>) object); } else if (object instanceof Item<?, ?>) { return new ItemDeltaItem<>((Item<IV, ID>) object, null, (Item<IV, ID>) object); } else if (object instanceof ItemDelta<?, ?>) { return new ItemDeltaItem<>(null, (ItemDelta<IV, ID>) object, null); } else { throw new IllegalArgumentException("Unexpected object " + object + " " + object.getClass()); } } public static ObjectQuery evaluateQueryExpressions(ObjectQuery origQuery, ExpressionVariables variables, ExpressionFactory expressionFactory, PrismContext prismContext, String shortDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException, CommunicationException, ConfigurationException, SecurityViolationException { if (origQuery == null) { return null; } ObjectQuery query = origQuery.clone(); ObjectFilter evaluatedFilter = evaluateFilterExpressionsInternal(query.getFilter(), variables, expressionFactory, prismContext, shortDesc, task, result); query.setFilter(evaluatedFilter); return query; } public static ObjectFilter evaluateFilterExpressions(ObjectFilter origFilter, ExpressionVariables variables, ExpressionFactory expressionFactory, PrismContext prismContext, String shortDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException, CommunicationException, ConfigurationException, SecurityViolationException { if (origFilter == null) { return null; } return evaluateFilterExpressionsInternal(origFilter, variables, expressionFactory, prismContext, shortDesc, task, result); } public static boolean hasExpressions(@Nullable ObjectFilter filter) { if (filter == null) { return false; } Holder<Boolean> result = new Holder<>(false); filter.accept(f -> { if (f instanceof ValueFilter) { ValueFilter<?, ?> vf = (ValueFilter<?, ?>) f; if (vf.getExpression() != null) { result.setValue(true); } } }); return result.getValue(); } private static ObjectFilter evaluateFilterExpressionsInternal(ObjectFilter filter, ExpressionVariables variables, ExpressionFactory expressionFactory, PrismContext prismContext, String shortDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException, CommunicationException, ConfigurationException, SecurityViolationException { if (filter == null) { return null; } if (filter instanceof InOidFilter) { ExpressionWrapper expressionWrapper = ((InOidFilter) filter).getExpression(); if (expressionWrapper == null || expressionWrapper.getExpression() == null) { LOGGER.warn("No valueExpression in filter in {}. Returning original filter", shortDesc); InOidFilter inOidFilter = (InOidFilter) filter; if (inOidFilter.getOids() != null && !inOidFilter.getOids().isEmpty()){ return filter.clone(); } return NoneFilter.createNone(); } ExpressionType valueExpression = getExpression(expressionWrapper, shortDesc); try { Collection<String> expressionResult = evaluateStringExpression(variables, prismContext, valueExpression, expressionFactory, shortDesc, task, result); if (expressionResult == null || expressionResult.isEmpty()) { LOGGER.debug("Result of search filter expression was null or empty. Expression: {}", valueExpression); return createFilterForNoValue(filter, valueExpression); } // TODO: log more context LOGGER.trace("Search filter expression in the rule for {} evaluated to {}.", shortDesc, expressionResult); InOidFilter evaluatedFilter = (InOidFilter) filter.clone(); evaluatedFilter.setOids(expressionResult); evaluatedFilter.setExpression(null); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Transformed filter to:\n{}", evaluatedFilter.debugDump()); } return evaluatedFilter; } catch (Exception ex) { throw new ExpressionEvaluationException(ex); } } else if (filter instanceof FullTextFilter) { ExpressionWrapper expressionWrapper = ((FullTextFilter) filter).getExpression(); if (expressionMissing(expressionWrapper, filter, shortDesc)) { return filter.clone(); } ExpressionType valueExpression = getExpression(expressionWrapper, shortDesc); try { Collection<String> expressionResult = evaluateStringExpression(variables, prismContext, valueExpression, expressionFactory, shortDesc, task, result); if (expressionResult == null || expressionResult.isEmpty()) { LOGGER.debug("Result of search filter expression was null or empty. Expression: {}", valueExpression); return createFilterForNoValue(filter, valueExpression); } // TODO: log more context LOGGER.trace("Search filter expression in the rule for {} evaluated to {}.", shortDesc, expressionResult); FullTextFilter evaluatedFilter = (FullTextFilter) filter.clone(); evaluatedFilter.setValues(expressionResult); evaluatedFilter.setExpression(null); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Transformed filter to:\n{}", evaluatedFilter.debugDump()); } return evaluatedFilter; } catch (Exception ex) { throw new ExpressionEvaluationException(ex); } } else if (filter instanceof LogicalFilter) { List<ObjectFilter> conditions = ((LogicalFilter) filter).getConditions(); LogicalFilter evaluatedFilter = ((LogicalFilter) filter).cloneEmpty(); for (ObjectFilter condition : conditions) { ObjectFilter evaluatedSubFilter = evaluateFilterExpressionsInternal(condition, variables, expressionFactory, prismContext, shortDesc, task, result); evaluatedFilter.addCondition(evaluatedSubFilter); } return evaluatedFilter; } else if (filter instanceof ValueFilter) { ValueFilter valueFilter = (ValueFilter) filter; if (valueFilter.getValues() != null && !valueFilter.getValues().isEmpty()) { // We have value. Nothing to evaluate. return valueFilter.clone(); } ExpressionWrapper expressionWrapper = valueFilter.getExpression(); if (expressionMissing(expressionWrapper, filter, shortDesc)) { return valueFilter.clone(); } ExpressionType valueExpression = getExpression(expressionWrapper, shortDesc); try { PrismValue expressionResult = evaluateExpression(variables, prismContext, valueExpression, filter, expressionFactory, shortDesc, task, result); if (expressionResult == null || expressionResult.isEmpty()) { LOGGER.debug("Result of search filter expression was null or empty. Expression: {}", valueExpression); return createFilterForNoValue(valueFilter, valueExpression); } // TODO: log more context LOGGER.trace("Search filter expression in the rule for {} evaluated to {}.", new Object[] { shortDesc, expressionResult }); ValueFilter evaluatedFilter = valueFilter.clone(); evaluatedFilter.setValue(expressionResult); evaluatedFilter.setExpression(null); // } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Transformed filter to:\n{}", evaluatedFilter.debugDump()); } return evaluatedFilter; } catch (RuntimeException ex) { LoggingUtils.logException(LOGGER, "Couldn't evaluate expression " + PrettyPrinter.prettyPrint(valueExpression) + ".", ex); throw new SystemException( "Couldn't evaluate expression" + PrettyPrinter.prettyPrint(valueExpression) + ": " + ex.getMessage(), ex); } catch (SchemaException ex) { LoggingUtils.logException(LOGGER, "Couldn't evaluate expression " + PrettyPrinter.prettyPrint(valueExpression) + ".", ex); throw new SchemaException( "Couldn't evaluate expression" + PrettyPrinter.prettyPrint(valueExpression) + ": " + ex.getMessage(), ex); } catch (ObjectNotFoundException ex) { LoggingUtils.logException(LOGGER, "Couldn't evaluate expression " + PrettyPrinter.prettyPrint(valueExpression) + ".", ex); throw new ObjectNotFoundException( "Couldn't evaluate expression" + PrettyPrinter.prettyPrint(valueExpression) + ": " + ex.getMessage(), ex); } catch (ExpressionEvaluationException ex) { LoggingUtils.logException(LOGGER, "Couldn't evaluate expression " + PrettyPrinter.prettyPrint(valueExpression) + ".", ex); throw new ExpressionEvaluationException( "Couldn't evaluate expression " + PrettyPrinter.prettyPrint(valueExpression) + ": " + ex.getMessage(), ex); } } else if (filter instanceof ExistsFilter) { ExistsFilter evaluatedFilter = ((ExistsFilter) filter).cloneEmpty(); ObjectFilter evaluatedSubFilter = evaluateFilterExpressionsInternal(((ExistsFilter) filter).getFilter(), variables, expressionFactory, prismContext, shortDesc, task, result); evaluatedFilter.setFilter(evaluatedSubFilter); return evaluatedFilter; } else if (filter instanceof TypeFilter) { TypeFilter evaluatedFilter = ((TypeFilter) filter).cloneEmpty(); ObjectFilter evaluatedSubFilter = evaluateFilterExpressionsInternal(((TypeFilter) filter).getFilter(), variables, expressionFactory, prismContext, shortDesc, task, result); evaluatedFilter.setFilter(evaluatedSubFilter); return evaluatedFilter; } else if (filter instanceof OrgFilter) { return filter; } else if (filter instanceof AllFilter || filter instanceof NoneFilter || filter instanceof UndefinedFilter) { return filter; } else { throw new IllegalStateException("Unsupported filter type: " + filter.getClass()); } } private static boolean expressionMissing(ExpressionWrapper expressionWrapper, ObjectFilter filter, String shortDesc) { if (expressionWrapper == null || expressionWrapper.getExpression() == null) { LOGGER.debug("No valueExpression in filter {} in {}. Returning original filter", filter, shortDesc); return true; } return false; } private static ExpressionType getExpression(ExpressionWrapper expressionWrapper, String shortDesc) throws SchemaException { if (!(expressionWrapper.getExpression() instanceof ExpressionType)) { throw new SchemaException("Unexpected expression type " + expressionWrapper.getExpression().getClass() + " in filter in " + shortDesc); } return (ExpressionType) expressionWrapper.getExpression(); } private static ObjectFilter createFilterForNoValue(ObjectFilter filter, ExpressionType valueExpression) throws ExpressionEvaluationException { QueryInterpretationOfNoValueType queryInterpretationOfNoValue = valueExpression.getQueryInterpretationOfNoValue(); if (queryInterpretationOfNoValue == null) { queryInterpretationOfNoValue = QueryInterpretationOfNoValueType.FILTER_EQUAL_NULL; } switch (queryInterpretationOfNoValue) { case FILTER_UNDEFINED: return UndefinedFilter.createUndefined(); case FILTER_NONE: return NoneFilter.createNone(); case FILTER_ALL: return AllFilter.createAll(); case FILTER_EQUAL_NULL: if (filter instanceof ValueFilter) { ValueFilter evaluatedFilter = (ValueFilter) filter.clone(); evaluatedFilter.setExpression(null); return evaluatedFilter; } else if (filter instanceof InOidFilter) { return NoneFilter.createNone(); } else if (filter instanceof FullTextFilter) { return NoneFilter.createNone(); // because full text search for 'no value' is meaningless } else { throw new IllegalArgumentException("Unknown filter to evaluate: " + filter); } case ERROR: throw new ExpressionEvaluationException("Expression "+valueExpression+" evaluated to no value"); default: throw new IllegalArgumentException("Unknown value "+queryInterpretationOfNoValue+" in queryInterpretationOfNoValue in "+valueExpression); } } private static <V extends PrismValue> V evaluateExpression(ExpressionVariables variables, PrismContext prismContext, ExpressionType expressionType, ObjectFilter filter, ExpressionFactory expressionFactory, String shortDesc, Task task, OperationResult parentResult) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException, CommunicationException, ConfigurationException, SecurityViolationException { // TODO refactor after new query engine is implemented ItemDefinition outputDefinition = null; if (filter instanceof ValueFilter) { outputDefinition = ((ValueFilter) filter).getDefinition(); } if (outputDefinition == null) { outputDefinition = new PrismPropertyDefinitionImpl(ExpressionConstants.OUTPUT_ELEMENT_NAME, DOMUtil.XSD_STRING, prismContext); } return (V) evaluateExpression(variables, outputDefinition, expressionType, expressionFactory, shortDesc, task, parentResult); // String expressionResult = // expressionHandler.evaluateExpression(currentShadow, valueExpression, // shortDesc, result); } public static <V extends PrismValue, D extends ItemDefinition> V evaluateExpression( ExpressionVariables variables, D outputDefinition, ExpressionType expressionType, ExpressionFactory expressionFactory, String shortDesc, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException { Expression<V, D> expression = expressionFactory.makeExpression(expressionType, outputDefinition, shortDesc, task, parentResult); ExpressionEvaluationContext context = new ExpressionEvaluationContext(null, variables, shortDesc, task, parentResult); PrismValueDeltaSetTriple<V> outputTriple = expression.evaluate(context); LOGGER.trace("Result of the expression evaluation: {}", outputTriple); return getExpressionOutputValue(outputTriple, shortDesc); } public static <V extends PrismValue> V getExpressionOutputValue(PrismValueDeltaSetTriple<V> outputTriple, String shortDesc) throws ExpressionEvaluationException { if (outputTriple == null) { return null; } Collection<V> nonNegativeValues = outputTriple.getNonNegativeValues(); if (nonNegativeValues == null || nonNegativeValues.isEmpty()) { return null; } if (nonNegativeValues.size() > 1) { throw new ExpressionEvaluationException("Expression returned more than one value (" + nonNegativeValues.size() + ") in " + shortDesc); } return nonNegativeValues.iterator().next(); } public static Collection<String> evaluateStringExpression(ExpressionVariables variables, PrismContext prismContext, ExpressionType expressionType, ExpressionFactory expressionFactory, String shortDesc, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException { PrismPropertyDefinitionImpl<String> outputDefinition = new PrismPropertyDefinitionImpl( ExpressionConstants.OUTPUT_ELEMENT_NAME, DOMUtil.XSD_STRING, prismContext); outputDefinition.setMaxOccurs(-1); Expression<PrismPropertyValue<String>, PrismPropertyDefinition<String>> expression = expressionFactory .makeExpression(expressionType, outputDefinition, shortDesc, task, parentResult); ExpressionEvaluationContext context = new ExpressionEvaluationContext(null, variables, shortDesc, task, parentResult); PrismValueDeltaSetTriple<PrismPropertyValue<String>> outputTriple = expression.evaluate(context); LOGGER.trace("Result of the expression evaluation: {}", outputTriple); if (outputTriple == null) { return null; } Collection<PrismPropertyValue<String>> nonNegativeValues = outputTriple.getNonNegativeValues(); if (nonNegativeValues == null || nonNegativeValues.isEmpty()) { return null; } return PrismValue.getRealValuesOfCollection((Collection) nonNegativeValues); // return nonNegativeValues.iterator().next(); } public static PrismPropertyValue<Boolean> evaluateCondition(ExpressionVariables variables, ExpressionType expressionType, ExpressionFactory expressionFactory, String shortDesc, Task task, OperationResult parentResult) throws SchemaException, ExpressionEvaluationException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException { ItemDefinition outputDefinition = new PrismPropertyDefinitionImpl( ExpressionConstants.OUTPUT_ELEMENT_NAME, DOMUtil.XSD_BOOLEAN, expressionFactory.getPrismContext()); return (PrismPropertyValue<Boolean>) evaluateExpression(variables, outputDefinition, expressionType, expressionFactory, shortDesc, task, parentResult); } public static boolean getBooleanConditionOutput(PrismPropertyValue<Boolean> conditionOutput) { if (conditionOutput == null) { return false; } Boolean value = conditionOutput.getValue(); if (value == null) { return false; } return value; } public static Map<QName, Object> compileVariablesAndSources(ExpressionEvaluationContext params) { Map<QName, Object> variablesAndSources = new HashMap<>(); if (params.getVariables() != null) { for (Entry<QName, Object> entry : params.getVariables().entrySet()) { variablesAndSources.put(entry.getKey(), entry.getValue()); } } if (params.getSources() != null) { for (Source<?, ?> source : params.getSources()) { variablesAndSources.put(source.getName(), source); } } return variablesAndSources; } public static boolean hasExplicitTarget(List<MappingType> mappingTypes) { for (MappingType mappingType : mappingTypes) { if (hasExplicitTarget(mappingType)) { return true; } } return false; } private static boolean hasExplicitTarget(MappingType mappingType) { return mappingType.getTarget() != null; } public static boolean computeConditionResult( Collection<PrismPropertyValue<Boolean>> booleanPropertyValues) { if (booleanPropertyValues == null || booleanPropertyValues.isEmpty()) { // No value means false return false; } boolean hasFalse = false; for (PrismPropertyValue<Boolean> pval : booleanPropertyValues) { Boolean value = pval.getValue(); if (Boolean.TRUE.equals(value)) { return true; } if (Boolean.FALSE.equals(value)) { hasFalse = true; } } if (hasFalse) { return false; } // No value or all values null. Return default. return true; } public static PlusMinusZero computeConditionResultMode(boolean condOld, boolean condNew) { if (condOld && condNew) { return PlusMinusZero.ZERO; } if (!condOld && !condNew) { return null; } if (condOld && !condNew) { return PlusMinusZero.MINUS; } if (!condOld && condNew) { return PlusMinusZero.PLUS; } throw new IllegalStateException("notreached"); } public static void addActorVariable(ExpressionVariables scriptVariables, SecurityContextManager securityContextManager) { // There can already be a value, because for mappings, we create the // variable before parsing sources. // For other scripts we do it just before the execution, to catch all // possible places where scripts can be executed. PrismObject<UserType> oldActor = (PrismObject<UserType>) scriptVariables.get(ExpressionConstants.VAR_ACTOR); if (oldActor != null) { return; } PrismObject<UserType> actor = null; try { if (securityContextManager != null) { if (!securityContextManager.isAuthenticated()) { // This is most likely evaluation of role // condition before // the authentication is complete. scriptVariables.addVariableDefinition(ExpressionConstants.VAR_ACTOR, null); return; } MidPointPrincipal principal = securityContextManager.getPrincipal(); if (principal != null) { UserType principalUser = principal.getUser(); if (principalUser != null) { actor = principalUser.asPrismObject(); } } } if (actor == null) { LOGGER.debug("Couldn't get principal information - the 'actor' variable is set to null"); } } catch (SecurityViolationException e) { LoggingUtils.logUnexpectedException(LOGGER, "Couldn't get principal information - the 'actor' variable is set to null", e); } scriptVariables.addVariableDefinition(ExpressionConstants.VAR_ACTOR, actor); } public static <D extends ItemDefinition> Object convertToOutputValue(Long longValue, D outputDefinition, Protector protector) throws ExpressionEvaluationException, SchemaException { if (longValue == null) { return null; } QName outputType = outputDefinition.getTypeName(); if (outputType.equals(DOMUtil.XSD_INT)) { return longValue.intValue(); } else if (outputType.equals(DOMUtil.XSD_LONG)) { return longValue; } else { return convertToOutputValue(longValue.toString(), outputDefinition, protector); } } public static <D extends ItemDefinition> Object convertToOutputValue(String stringValue, D outputDefinition, Protector protector) throws ExpressionEvaluationException, SchemaException { if (stringValue == null) { return null; } QName outputType = outputDefinition.getTypeName(); if (outputType.equals(DOMUtil.XSD_STRING)) { return stringValue; } else if (outputType.equals(ProtectedStringType.COMPLEX_TYPE)) { try { return protector.encryptString(stringValue); } catch (EncryptionException e) { throw new ExpressionEvaluationException("Crypto error: " + e.getMessage(), e); } } else if (XmlTypeConverter.canConvert(outputType)) { Class<?> outputJavaType = XsdTypeMapper.toJavaType(outputType); try { return XmlTypeConverter.toJavaValue(stringValue, outputJavaType, true); } catch (NumberFormatException e) { throw new SchemaException("Cannot convert string '" + stringValue + "' to data type " + outputType + ": invalid number format", e); } catch (IllegalArgumentException e) { throw new SchemaException("Cannot convert string '" + stringValue + "' to data type " + outputType + ": " + e.getMessage(), e); } } else { throw new IllegalArgumentException( "Expression cannot generate values for properties of type " + outputType); } } public static <T> boolean isEmpty(T val) { if (val == null) { return true; } if (val instanceof String && ((String)val).isEmpty()) { return true; } if (val instanceof PolyString && ((PolyString)val).isEmpty()) { return true; } return false; } public static <T, V extends PrismValue> V convertToPrismValue(T value, ItemDefinition definition, String contextDescription, PrismContext prismContext) throws ExpressionEvaluationException { if (definition instanceof PrismReferenceDefinition) { return (V) ((ObjectReferenceType) value).asReferenceValue(); } else if (definition instanceof PrismContainerDefinition) { try { prismContext.adopt((Containerable) value); ((Containerable) value).asPrismContainerValue().applyDefinition(definition); } catch (SchemaException e) { throw new ExpressionEvaluationException(e.getMessage() + " " + contextDescription, e); } return (V) ((Containerable) value).asPrismContainerValue(); } else { return (V) new PrismPropertyValue<>(value); } } public static Expression<PrismPropertyValue<Boolean>,PrismPropertyDefinition<Boolean>> createCondition( ExpressionType conditionExpressionType, ExpressionFactory expressionFactory, String shortDesc, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException { return expressionFactory.makeExpression(conditionExpressionType, createConditionOutputDefinition(expressionFactory.getPrismContext()), shortDesc, task, result); } public static Function<Object, Object> createRefConvertor(QName defaultType) { return (o) -> { if (o == null || o instanceof ObjectReferenceType) { return o; } else if (o instanceof Referencable) { ObjectReferenceType rv = new ObjectReferenceType(); rv.setupReferenceValue(((Referencable) o).asReferenceValue()); return rv; } else if (o instanceof PrismReferenceValue) { ObjectReferenceType rv = new ObjectReferenceType(); rv.setupReferenceValue((PrismReferenceValue) o); return rv; } else if (o instanceof String) { return new ObjectReferenceType().oid((String) o).type(defaultType); } else { //throw new IllegalArgumentException("The value couldn't be converted to an object reference: " + o); return o; // let someone else complain at this } }; } public static PrismPropertyDefinition<Boolean> createConditionOutputDefinition(PrismContext prismContext) { return new PrismPropertyDefinitionImpl<>(ExpressionConstants.OUTPUT_ELEMENT_NAME, DOMUtil.XSD_BOOLEAN, prismContext); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.handler; import java.io.IOException; import java.util.List; import java.util.concurrent.ExecutorService; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.CoordinatedStateException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.constraint.ConstraintException; import org.apache.hadoop.hbase.executor.EventHandler; import org.apache.hadoop.hbase.executor.EventType; import org.apache.hadoop.hbase.master.AssignmentManager; import org.apache.hadoop.hbase.master.BulkAssigner; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.master.RegionStates; import org.apache.hadoop.hbase.master.TableLockManager; import org.apache.hadoop.hbase.master.RegionState.State; import org.apache.hadoop.hbase.master.TableLockManager.TableLock; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.htrace.Trace; /** * Handler to run disable of a table. */ @InterfaceAudience.Private public class DisableTableHandler extends EventHandler { private static final Log LOG = LogFactory.getLog(DisableTableHandler.class); private final TableName tableName; private final AssignmentManager assignmentManager; private final TableLockManager tableLockManager; private final boolean skipTableStateCheck; private TableLock tableLock; public DisableTableHandler(Server server, TableName tableName, AssignmentManager assignmentManager, TableLockManager tableLockManager, boolean skipTableStateCheck) { super(server, EventType.C_M_DISABLE_TABLE); this.tableName = tableName; this.assignmentManager = assignmentManager; this.tableLockManager = tableLockManager; this.skipTableStateCheck = skipTableStateCheck; } public DisableTableHandler prepare() throws TableNotFoundException, TableNotEnabledException, IOException { if(tableName.equals(TableName.META_TABLE_NAME)) { throw new ConstraintException("Cannot disable catalog table"); } //acquire the table write lock, blocking this.tableLock = this.tableLockManager.writeLock(tableName, EventType.C_M_DISABLE_TABLE.toString()); this.tableLock.acquire(); boolean success = false; try { // Check if table exists if (!MetaTableAccessor.tableExists(this.server.getConnection(), tableName)) { throw new TableNotFoundException(tableName); } // There could be multiple client requests trying to disable or enable // the table at the same time. Ensure only the first request is honored // After that, no other requests can be accepted until the table reaches // DISABLED or ENABLED. //TODO: reevaluate this since we have table locks now if (!skipTableStateCheck) { try { if (!this.assignmentManager.getTableStateManager().setTableStateIfInStates( this.tableName, ZooKeeperProtos.Table.State.DISABLING, ZooKeeperProtos.Table.State.ENABLED)) { LOG.info("Table " + tableName + " isn't enabled; skipping disable"); throw new TableNotEnabledException(this.tableName); } } catch (CoordinatedStateException e) { throw new IOException("Unable to ensure that the table will be" + " disabling because of a coordination engine issue", e); } } success = true; } finally { if (!success) { releaseTableLock(); } } return this; } @Override public String toString() { String name = "UnknownServerName"; if(server != null && server.getServerName() != null) { name = server.getServerName().toString(); } return getClass().getSimpleName() + "-" + name + "-" + getSeqid() + "-" + tableName; } @Override public void process() { try { LOG.info("Attempting to disable table " + this.tableName); MasterCoprocessorHost cpHost = ((HMaster) this.server) .getMasterCoprocessorHost(); if (cpHost != null) { cpHost.preDisableTableHandler(this.tableName); } handleDisableTable(); if (cpHost != null) { cpHost.postDisableTableHandler(this.tableName); } } catch (IOException e) { LOG.error("Error trying to disable table " + this.tableName, e); } catch (CoordinatedStateException e) { LOG.error("Error trying to disable table " + this.tableName, e); } finally { releaseTableLock(); } } private void releaseTableLock() { if (this.tableLock != null) { try { this.tableLock.release(); } catch (IOException ex) { LOG.warn("Could not release the table lock", ex); } } } private void handleDisableTable() throws IOException, CoordinatedStateException { // Set table disabling flag up in zk. this.assignmentManager.getTableStateManager().setTableState(this.tableName, ZooKeeperProtos.Table.State.DISABLING); boolean done = false; while (true) { // Get list of online regions that are of this table. Regions that are // already closed will not be included in this list; i.e. the returned // list is not ALL regions in a table, its all online regions according // to the in-memory state on this master. final List<HRegionInfo> regions = this.assignmentManager .getRegionStates().getRegionsOfTable(tableName); if (regions.size() == 0) { done = true; break; } LOG.info("Offlining " + regions.size() + " regions."); BulkDisabler bd = new BulkDisabler(this.server, regions); try { if (bd.bulkAssign()) { done = true; break; } } catch (InterruptedException e) { LOG.warn("Disable was interrupted"); // Preserve the interrupt. Thread.currentThread().interrupt(); break; } } // Flip the table to disabled if success. if (done) this.assignmentManager.getTableStateManager().setTableState(this.tableName, ZooKeeperProtos.Table.State.DISABLED); LOG.info("Disabled table, " + this.tableName + ", is done=" + done); } /** * Run bulk disable. */ class BulkDisabler extends BulkAssigner { private final List<HRegionInfo> regions; BulkDisabler(final Server server, final List<HRegionInfo> regions) { super(server); this.regions = regions; } @Override protected void populatePool(ExecutorService pool) { RegionStates regionStates = assignmentManager.getRegionStates(); for (HRegionInfo region: regions) { if (regionStates.isRegionInTransition(region) && !regionStates.isRegionInState(region, State.FAILED_CLOSE)) { continue; } final HRegionInfo hri = region; pool.execute(Trace.wrap("DisableTableHandler.BulkDisabler",new Runnable() { public void run() { assignmentManager.unassign(hri, true); } })); } } @Override protected boolean waitUntilDone(long timeout) throws InterruptedException { long startTime = System.currentTimeMillis(); long remaining = timeout; List<HRegionInfo> regions = null; long lastLogTime = startTime; while (!server.isStopped() && remaining > 0) { Thread.sleep(waitingTimeForEvents); regions = assignmentManager.getRegionStates().getRegionsOfTable(tableName); long now = System.currentTimeMillis(); // Don't log more than once every ten seconds. Its obnoxious. And only log table regions // if we are waiting a while for them to go down... if (LOG.isDebugEnabled() && ((now - lastLogTime) > 10000)) { lastLogTime = now; LOG.debug("Disable waiting until done; " + remaining + " ms remaining; " + regions); } if (regions.isEmpty()) break; remaining = timeout - (now - startTime); } return regions != null && regions.isEmpty(); } } }
/* * Copyright (C) 2014 Haruki Hasegawa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.h6ah4i.android.media.test.classtest; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import junit.framework.TestSuite; import com.h6ah4i.android.media.IBasicMediaPlayer; import com.h6ah4i.android.media.IMediaPlayerFactory; import com.h6ah4i.android.media.audiofx.IAudioEffect; import com.h6ah4i.android.media.audiofx.IVirtualizer; import com.h6ah4i.android.media.audiofx.IVirtualizer.Settings; import com.h6ah4i.android.media.test.base.BasicMediaPlayerTestCaseBase; import com.h6ah4i.android.media.test.utils.CompletionListenerObject; import com.h6ah4i.android.media.test.utils.ErrorListenerObject; import com.h6ah4i.android.media.test.utils.SeekCompleteListenerObject; import com.h6ah4i.android.testing.ParameterizedTestArgs; import com.h6ah4i.android.testing.ParameterizedTestSuiteBuilder; public class VirtualizerTestCase extends BasicMediaPlayerTestCaseBase { private static final short DEFAULT_STRENGTH = 750; private static final class TestParams extends BasicTestParams { private final PlayerState mPlayerState; public TestParams( Class<? extends IMediaPlayerFactory> factoryClass, PlayerState playerState) { super(factoryClass); mPlayerState = playerState; } public PlayerState getPlayerState() { return mPlayerState; } @Override public String toString() { return super.toString() + ", " + mPlayerState; } } public static TestSuite buildTestSuite( Class<? extends IMediaPlayerFactory> factoryClazz) { TestSuite suite = new TestSuite(); // parameterized tests ParameterizedTestSuiteBuilder.Filter filter = ParameterizedTestSuiteBuilder.notMatches("testPlayerStateTransition"); List<TestParams> params = new ArrayList<TestParams>(); List<PlayerState> playerStates = new ArrayList<PlayerState>(); playerStates.addAll(Arrays.asList(PlayerState.values())); playerStates.remove(PlayerState.End); for (PlayerState playerState : playerStates) { params.add(new TestParams(factoryClazz, playerState)); } suite.addTest(ParameterizedTestSuiteBuilder.buildDetail( VirtualizerTestCase.class, params, filter, false)); // not parameterized tests suite.addTest(makeSingleBasicTest( VirtualizerTestCase.class, "testPlayerStateTransition", factoryClazz)); return suite; } public VirtualizerTestCase(ParameterizedTestArgs args) { super(args); } // // Exposed test cases // public void testDefaultParameters() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkDefaultParameters(player); } }); } public void testSetAndGetEnabled() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkSetAndGetEnabled(player); } }); } public void testStrengthParamWithValidRange() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkStrengthParamWithValidRange(player); } }); } public void testPropertiesCompatWithNullSettings() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkPropertiesCompatWithNullSettings(player); } }); } public void testStrengthParamWithInvalidRange() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkStrengthParamWithInvalidRange(player); } }); } public void testAfterRelease() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkAfterRelease(player); } }); } public void testHasControl() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkHasControl(player); } }); } public void testAfterControlLost() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkAfterControlLost(player); } }); } public void testMultiInstanceBehavior() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkMultiInstanceBehavior(player); } }); } public void testPlayerReleasedBeforeEffect() throws Throwable { TestParams params = (TestParams) getTestParams(); checkWithNoPlayerErrors( params, new BasicMediaPlayerTestRunnable() { @Override public void run(IBasicMediaPlayer player, Object args) throws Throwable { checkPlayerReleasedBeforeEffect(player); } }); } public void testPlayerStateTransition() throws Exception { IBasicMediaPlayer player = null; IVirtualizer effect = null; try { // check effect settings are preserved along player state transition Object waitObj = new Object(); CompletionListenerObject comp = new CompletionListenerObject(waitObj); SeekCompleteListenerObject seekComp = new SeekCompleteListenerObject(waitObj); player = createWrappedPlayerInstance(); effect = getFactory().createVirtualizer(unwrap(player)); player.setOnCompletionListener(comp); player.setOnSeekCompleteListener(seekComp); // configure assertEquals(IAudioEffect.SUCCESS, effect.setEnabled(true)); effect.setStrength((short) 123); final IVirtualizer.Settings expectedSettings = effect.getProperties(); // player: idle // player: initialized setDataSourceForCommonTests(player, null); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: prepared player.prepare(); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: started player.start(); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: paused player.pause(); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: playback completed player.seekTo(player.getDuration()); if (!seekComp.await(DEFAULT_EVENT_WAIT_DURATION)) { fail(); } player.start(); if (!comp.await(SHORT_EVENT_WAIT_DURATION)) { // XXX This assertion fails on Android 5.0 with // StandardMediaPlayer fail(); } assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: stop player.stop(); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: idle player.reset(); assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); // player: end player.release(); player = null; assertTrue(effect.getEnabled()); assertEquals(expectedSettings, effect.getProperties()); } finally { releaseQuietly(player); releaseQuietly(effect); } } private void checkDefaultParameters(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); // check checkIsDefaultState(effect); // modify parameters effect.setEnabled(true); effect.setStrength((short) 123); // release effect.release(); effect = null; // re-confirm with new instance effect = getFactory().createVirtualizer(player); checkIsDefaultState(effect); } finally { releaseQuietly(effect); } } private void checkSetAndGetEnabled(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); assertEquals(false, effect.getEnabled()); effect.setEnabled(false); assertEquals(false, effect.getEnabled()); effect.setEnabled(true); assertEquals(true, effect.getEnabled()); } finally { releaseQuietly(effect); } } private void checkStrengthParamWithValidRange(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); // when not enabled effect.setEnabled(false); setAndCheckVaildStrength(effect); // when enabled effect.setEnabled(true); setAndCheckVaildStrength(effect); } finally { releaseQuietly(effect); } } private void checkPropertiesCompatWithNullSettings(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); try { effect.setProperties(null); fail(); } catch (IllegalArgumentException e) { // expected } } finally { releaseQuietly(effect); } } private void checkStrengthParamWithInvalidRange(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); // when not enabled effect.setEnabled(false); setAndCheckInvalidStrength(effect); // when enabled effect.setEnabled(true); setAndCheckInvalidStrength(effect); } finally { releaseQuietly(effect); } } private void checkAfterRelease(IBasicMediaPlayer player) { try { createReleasedVirtualizer(player).getId(); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).getEnabled(); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).hasControl(); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).setEnabled(true); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).setEnabled(false); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).getStrengthSupported(); // this method should not raise any exceptions } catch (IllegalStateException e) { fail(); } try { createReleasedVirtualizer(player).getRoundedStrength(); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).setStrength((short) 0); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).getProperties(); fail(); } catch (IllegalStateException e) { // expected } try { createReleasedVirtualizer(player).setProperties(createSettings((short) 0)); fail(); } catch (IllegalStateException e) { // expected } } private void checkHasControl(IBasicMediaPlayer player) { IVirtualizer effect1 = null, effect2 = null, effect3 = null; try { // create instance 1 // NOTE: [1]: has control, [2] not created, [3] not created effect1 = getFactory().createVirtualizer(player); assertTrue(effect1.hasControl()); // create instance 2 // NOTE: [1]: lost control, [2] has control, [3] not created effect2 = getFactory().createVirtualizer(player); assertFalse(effect1.hasControl()); assertTrue(effect2.hasControl()); assertEquals( IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(false)); assertEquals( IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect2.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect2.setEnabled(false)); // create instance 3 // NOTE: [1]: lost control, [2] lost control, [3] has control effect3 = getFactory().createVirtualizer(player); assertFalse(effect1.hasControl()); assertFalse(effect2.hasControl()); assertTrue(effect3.hasControl()); assertEquals( IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(true)); assertEquals( IAudioEffect.ERROR_INVALID_OPERATION, effect2.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect3.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect3.setEnabled(false)); // release the instance 3 // NOTE: [1]: lost control, [2] has control, [3] released effect3.release(); effect3 = null; assertFalse(effect1.hasControl()); assertTrue(effect2.hasControl()); assertEquals( IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect2.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect2.setEnabled(false)); // release the instance 2 // NOTE: [1]: has control, [2] released, [3] released effect2.release(); effect2 = null; // XXX This assertion may be fail when using StandardMediaPlayer assertTrue(effect1.hasControl()); assertEquals( IAudioEffect.SUCCESS, effect1.setEnabled(true)); assertEquals( IAudioEffect.SUCCESS, effect1.setEnabled(false)); } finally { releaseQuietly(effect1); effect1 = null; releaseQuietly(effect2); effect2 = null; releaseQuietly(effect3); effect3 = null; } } private void checkAfterControlLost(IBasicMediaPlayer player) { IVirtualizer effect1 = null, effect2 = null; try { effect1 = getFactory().createVirtualizer(player); effect2 = getFactory().createVirtualizer(player); final boolean initialEnabledState = effect2.getEnabled(); final IVirtualizer.Settings initialSettings = effect2.getProperties(); assertFalse(effect1.hasControl()); assertTrue(effect2.hasControl()); // no state changing methods should not raise any errors assertEquals(effect2.getEnabled(), effect1.getEnabled()); assertEquals(effect2.getId(), effect1.getId()); assertEquals(effect2.getStrengthSupported(), effect1.getStrengthSupported()); assertEquals(effect2.getRoundedStrength(), effect1.getRoundedStrength()); assertEquals(effect2.getProperties(), effect1.getProperties()); // setEnabled() should return IAudioEffect.ERROR_INVALID_OPERATION assertEquals(IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(false)); assertEquals(IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(true)); // state changing methods should raise UnsupportedOperationException try { effect1.setStrength(DEFAULT_STRENGTH); fail(); } catch (UnsupportedOperationException e) { // expected } try { effect1.setProperties(createSettings((short) 100)); fail(); } catch (UnsupportedOperationException e) { // expected } // confirm object state assertEquals(initialEnabledState, effect1.getEnabled()); assertEquals(initialSettings, effect1.getProperties()); } finally { releaseQuietly(effect1); effect1 = null; releaseQuietly(effect2); effect2 = null; } } private void checkMultiInstanceBehavior(IBasicMediaPlayer player) { IVirtualizer effect1 = null, effect2 = null; try { effect1 = getFactory().createVirtualizer(player); effect2 = getFactory().createVirtualizer(player); // check pre. conditions assertFalse(effect1.hasControl()); assertTrue(effect2.hasControl()); assertFalse(effect1.getEnabled()); assertFalse(effect2.getEnabled()); assertEquals(DEFAULT_STRENGTH, effect1.getRoundedStrength()); assertEquals(DEFAULT_STRENGTH, effect2.getRoundedStrength()); // check effect 1 lost controls assertEquals(IAudioEffect.ERROR_INVALID_OPERATION, effect1.setEnabled(false)); try { effect1.setStrength((short) 123); fail(); } catch (UnsupportedOperationException e) { // expected } assertEquals(DEFAULT_STRENGTH, effect1.getRoundedStrength()); try { effect1.setProperties(createSettings((short) 123)); fail(); } catch (UnsupportedOperationException e) { // expected } assertEquals(DEFAULT_STRENGTH, effect1.getRoundedStrength()); // change states assertEquals(IAudioEffect.SUCCESS, effect2.setEnabled(true)); effect2.setStrength((short) 123); // check post conditions assertFalse(effect1.hasControl()); assertTrue(effect2.hasControl()); assertTrue(effect1.getEnabled()); assertTrue(effect2.getEnabled()); assertEquals((short) 123, effect1.getRoundedStrength()); assertEquals((short) 123, effect2.getRoundedStrength()); // release effect 2 effect2.release(); effect2 = null; // check effect 1 gains control // XXX This assertion may be fail when using StandardMediaPlayer assertTrue(effect1.hasControl()); assertEquals(IAudioEffect.SUCCESS, effect1.setEnabled(false)); } finally { releaseQuietly(effect1); releaseQuietly(effect2); } } private void checkPlayerReleasedBeforeEffect(IBasicMediaPlayer player) { IVirtualizer effect = null; try { effect = getFactory().createVirtualizer(player); // pre. check assertTrue(effect.hasControl()); assertEquals(IAudioEffect.SUCCESS, effect.setEnabled(true)); // release player player.release(); player = null; // post check assertTrue(effect.hasControl()); assertEquals(true, effect.getEnabled()); assertEquals(IAudioEffect.SUCCESS, effect.setEnabled(false)); // release effect effect.release(); effect = null; } finally { releaseQuietly(effect); effect = null; } } private void setAndCheckVaildStrength(IVirtualizer virtualizer) { // by setStrength() virtualizer.setStrength((short) 0); assertStrengthEquals((short) 0, virtualizer); virtualizer.setStrength((short) 500); assertStrengthEquals((short) 500, virtualizer); virtualizer.setStrength((short) 1000); assertStrengthEquals((short) 1000, virtualizer); // by setProperties() virtualizer.setProperties(createSettings((short) 0)); assertStrengthEquals((short) 0, virtualizer); virtualizer.setProperties(createSettings((short) 500)); assertStrengthEquals((short) 500, virtualizer); virtualizer.setProperties(createSettings((short) 1000)); assertStrengthEquals((short) 1000, virtualizer); } private void setAndCheckInvalidStrength(IVirtualizer virtualizer) { short expected = 123; // set pre. condition virtualizer.setStrength(expected); assertStrengthEquals(expected, virtualizer); try { virtualizer.setStrength((short) -1); fail(); } catch (IllegalArgumentException e) { // expected } assertStrengthEquals(expected, virtualizer); try { virtualizer.setStrength((short) 1001); fail(); } catch (IllegalArgumentException e) { // expected } assertStrengthEquals(expected, virtualizer); // by setProperties() try { virtualizer.setProperties(createSettings((short) -1)); fail(); } catch (IllegalArgumentException e) { // expected } assertStrengthEquals(expected, virtualizer); try { virtualizer.setProperties(createSettings((short) 1001)); fail(); } catch (IllegalArgumentException e) { // expected } assertStrengthEquals(expected, virtualizer); } // // Utilities // static void assertStrengthEquals(short expected, IVirtualizer virtualizer) { assertEquals(expected, virtualizer.getRoundedStrength()); assertEquals(expected, virtualizer.getProperties().strength); } static IVirtualizer.Settings createSettings(short strength) { IVirtualizer.Settings settings = new Settings(); settings.strength = strength; return settings; } private IVirtualizer createReleasedVirtualizer(IBasicMediaPlayer player) { IVirtualizer virtualizer = getFactory().createVirtualizer(player); virtualizer.release(); return virtualizer; } private static interface BasicMediaPlayerTestRunnable { public void run(IBasicMediaPlayer player, Object args) throws Throwable; } private void checkWithNoPlayerErrors( TestParams params, BasicMediaPlayerTestRunnable checkProcess) throws Throwable { IBasicMediaPlayer player = null; try { player = createWrappedPlayerInstance(); transitState(params.getPlayerState(), player, null); Object sharedSyncObj = new Object(); ErrorListenerObject err = new ErrorListenerObject(sharedSyncObj, false); // set callbacks player.setOnErrorListener(err); // check checkProcess.run(unwrap(player), null); // expects no errors assertFalse(err.occurred()); } finally { releaseQuietly(player); } } private void checkIsDefaultState(IVirtualizer effect) { assertEquals(false, effect.getEnabled()); // XXX Default Strength // XXX HTC Evo 3D assertEquals(DEFAULT_STRENGTH, effect.getRoundedStrength()); } private static void assertEquals(IVirtualizer.Settings expected, IVirtualizer.Settings actual) { assertEquals(expected.toString(), actual.toString()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Consumer; public class DateHistogramAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; private static final String INSTANT_FIELD = "instant"; private static final List<String> dataset = Arrays.asList( "2010-03-12T01:07:45", "2010-04-27T03:43:34", "2012-05-18T04:11:00", "2013-05-29T05:11:31", "2013-10-31T08:24:05", "2015-02-13T13:09:32", "2015-06-24T13:47:43", "2015-11-13T16:14:34", "2016-03-04T17:09:50", "2017-12-12T22:55:46"); public void testMatchNoDocs() throws IOException { testBothCases(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(0, histogram.getBuckets().size()) ); } public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(6, histogram.getBuckets().size()) ); testSearchAndReduceCase(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> assertEquals(8, histogram.getBuckets().size()) ); testBothCases(query, dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), histogram -> assertEquals(6, histogram.getBuckets().size()) ); } public void testNoDocs() throws IOException { Query query = new MatchNoDocsQuery(); List<String> dates = Collections.emptyList(); Consumer<DateHistogramAggregationBuilder> aggregation = agg -> agg.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()) ); testSearchAndReduceCase(query, dates, aggregation, histogram -> assertNull(histogram) ); } public void testAggregateWrongField() throws IOException { testBothCases(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"), histogram -> assertEquals(0, histogram.getBuckets().size()) ); } public void testIntervalYear() throws IOException { testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); } public void testIntervalMonth() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testIntervalDay() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY).field(DATE_FIELD).minDocCount(1L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(4, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); } public void testIntervalHour() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", "2017-02-01T10:15:00.000Z", "2017-02-01T13:06:00.000Z", "2017-02-01T14:04:00.000Z", "2017-02-01T14:05:00.000Z", "2017-02-01T15:59:00.000Z", "2017-02-01T16:06:00.000Z", "2017-02-01T16:48:00.000Z", "2017-02-01T16:59:00.000Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.HOUR).field(DATE_FIELD).minDocCount(1L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(6, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(4); assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(5); assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testIntervalMinute() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", "2017-02-01T09:15:37.000Z", "2017-02-01T09:16:04.000Z", "2017-02-01T09:16:42.000Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MINUTE).field(DATE_FIELD).minDocCount(1L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); } ); } public void testIntervalSecond() throws IOException { testBothCases(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.074Z", "2017-02-01T00:00:37.688Z", "2017-02-01T00:00:37.210Z", "2017-02-01T00:00:37.380Z" ), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(3, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); assertEquals(2, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T00:00:37.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } public void testMinDocCount() throws IOException { Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); List<String> timestamps = Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.074Z", "2017-02-01T00:00:13.688Z", "2017-02-01T00:00:21.380Z" ); // 5 sec interval with minDocCount = 0 testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(4, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); bucket = buckets.get(1); assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); bucket = buckets.get(2); assertEquals("2017-02-01T00:00:15.000Z", bucket.getKeyAsString()); assertEquals(0, bucket.getDocCount()); bucket = buckets.get(3); assertEquals("2017-02-01T00:00:20.000Z", bucket.getKeyAsString()); assertEquals(1, bucket.getDocCount()); } ); // 5 sec interval with minDocCount = 3 testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(3L), histogram -> { List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); assertEquals(1, buckets.size()); Histogram.Bucket bucket = buckets.get(0); assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); assertEquals(3, bucket.getDocCount()); } ); } private void testSearchCase(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { executeTestCase(false, query, dataset, configure, verify); } private void testSearchAndReduceCase(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { executeTestCase(true, query, dataset, configure, verify); } private void testBothCases(Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { testSearchCase(query, dataset, configure, verify); testSearchAndReduceCase(query, dataset, configure, verify); } private void executeTestCase(boolean reduced, Query query, List<String> dataset, Consumer<DateHistogramAggregationBuilder> configure, Consumer<Histogram> verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String date : dataset) { if (frequently()) { indexWriter.commit(); } long instant = asLong(date); document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new LongPoint(INSTANT_FIELD, instant)); indexWriter.addDocument(document); document.clear(); } } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); DateHistogramAggregationBuilder aggregationBuilder = new DateHistogramAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); } DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name"); DateFieldMapper.DateFieldType fieldType = builder.fieldType(); fieldType.setHasDocValues(true); fieldType.setName(aggregationBuilder.field()); InternalDateHistogram histogram; if (reduced) { histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); } else { histogram = search(indexSearcher, query, aggregationBuilder, fieldType); } verify.accept(histogram); } } } private static long asLong(String dateTime) { return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.replication.storage; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.asterix.common.cluster.ClusterPartition; import org.apache.asterix.common.config.MetadataProperties; import org.apache.asterix.common.replication.IReplicaResourcesManager; import org.apache.asterix.common.utils.StorageConstants; import org.apache.asterix.common.utils.StoragePathUtil; import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository; import org.apache.commons.io.FileUtils; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.io.FileReference; import org.apache.hyracks.storage.common.ILocalResourceRepository; import org.apache.hyracks.storage.common.LocalResource; public class ReplicaResourcesManager implements IReplicaResourcesManager { private static final Logger LOGGER = Logger.getLogger(ReplicaResourcesManager.class.getName()); public final static String LSM_COMPONENT_MASK_SUFFIX = "_mask"; private final static String REPLICA_INDEX_LSN_MAP_NAME = ".LSN_MAP"; public static final long REPLICA_INDEX_CREATION_LSN = -1; private final PersistentLocalResourceRepository localRepository; private final Map<String, ClusterPartition[]> nodePartitions; public ReplicaResourcesManager(ILocalResourceRepository localRepository, MetadataProperties metadataProperties) { this.localRepository = (PersistentLocalResourceRepository) localRepository; nodePartitions = metadataProperties.getNodePartitions(); } public void deleteIndexFile(LSMIndexFileProperties afp) throws HyracksDataException { String indexPath = getIndexPath(afp); if (indexPath != null) { if (afp.isLSMComponentFile()) { //delete index file String indexFilePath = indexPath + File.separator + afp.getFileName(); File destFile = new File(indexFilePath); FileUtils.deleteQuietly(destFile); } else { //delete index directory FileUtils.deleteQuietly(new File(indexPath)); } } } public String getIndexPath(LSMIndexFileProperties fileProperties) throws HyracksDataException { final FileReference indexPath = localRepository.getIndexPath(Paths.get(fileProperties.getFilePath())); if (!indexPath.getFile().exists()) { indexPath.getFile().mkdirs(); } return indexPath.toString(); } public void initializeReplicaIndexLSNMap(String indexPath, long currentLSN) throws IOException { HashMap<Long, Long> lsnMap = new HashMap<Long, Long>(); lsnMap.put(REPLICA_INDEX_CREATION_LSN, currentLSN); updateReplicaIndexLSNMap(indexPath, lsnMap); } public void createRemoteLSMComponentMask(LSMComponentProperties lsmComponentProperties) throws IOException { String maskPath = lsmComponentProperties.getMaskPath(this); Path path = Paths.get(maskPath); if (!Files.exists(path)) { File maskFile = new File(maskPath); maskFile.createNewFile(); } } public void markLSMComponentReplicaAsValid(LSMComponentProperties lsmComponentProperties) throws IOException { //remove mask to mark component as valid String maskPath = lsmComponentProperties.getMaskPath(this); Path path = Paths.get(maskPath); Files.deleteIfExists(path); //add component LSN to the index LSNs map Map<Long, Long> lsnMap = getReplicaIndexLSNMap(lsmComponentProperties.getReplicaComponentPath(this)); lsnMap.put(lsmComponentProperties.getOriginalLSN(), lsmComponentProperties.getReplicaLSN()); //update map on disk updateReplicaIndexLSNMap(lsmComponentProperties.getReplicaComponentPath(this), lsnMap); } public Set<File> getReplicaIndexes(String replicaId) throws HyracksDataException { Set<File> remoteIndexesPaths = new HashSet<File>(); ClusterPartition[] partitions = nodePartitions.get(replicaId); for (ClusterPartition partition : partitions) { remoteIndexesPaths.addAll(localRepository.getPartitionIndexes(partition.getPartitionId())); } return remoteIndexesPaths; } @Override public long getPartitionsMinLSN(Set<Integer> partitions) throws HyracksDataException { long minRemoteLSN = Long.MAX_VALUE; for (Integer partition : partitions) { //for every index in replica Set<File> remoteIndexes = localRepository.getPartitionIndexes(partition); for (File indexFolder : remoteIndexes) { //read LSN map try { //get max LSN per index long remoteIndexMaxLSN = getReplicaIndexMaxLSN(indexFolder); //get min of all maximums minRemoteLSN = Math.min(minRemoteLSN, remoteIndexMaxLSN); } catch (IOException e) { LOGGER.log(Level.INFO, indexFolder.getAbsolutePath() + " Couldn't read LSN map for index " + indexFolder); continue; } } } return minRemoteLSN; } public Map<Long, String> getLaggingReplicaIndexesId2PathMap(String replicaId, long targetLSN) throws IOException { Map<Long, String> laggingReplicaIndexes = new HashMap<Long, String>(); try { //for every index in replica Set<File> remoteIndexes = getReplicaIndexes(replicaId); for (File indexFolder : remoteIndexes) { if (getReplicaIndexMaxLSN(indexFolder) < targetLSN) { File localResource = new File( indexFolder + File.separator + StorageConstants.METADATA_FILE_NAME); LocalResource resource = PersistentLocalResourceRepository.readLocalResource(localResource); laggingReplicaIndexes.put(resource.getId(), indexFolder.getAbsolutePath()); } } } catch (HyracksDataException e) { e.printStackTrace(); } return laggingReplicaIndexes; } private long getReplicaIndexMaxLSN(File indexFolder) throws IOException { long remoteIndexMaxLSN = 0; //get max LSN per index Map<Long, Long> lsnMap = getReplicaIndexLSNMap(indexFolder.getAbsolutePath()); if (lsnMap != null) { for (Long lsn : lsnMap.values()) { remoteIndexMaxLSN = Math.max(remoteIndexMaxLSN, lsn); } } return remoteIndexMaxLSN; } public void cleanInvalidLSMComponents(String replicaId) { //for every index in replica Set<File> remoteIndexes = null; try { remoteIndexes = getReplicaIndexes(replicaId); } catch (HyracksDataException e) { throw new IllegalStateException(e); } for (File remoteIndexFile : remoteIndexes) { //search for any mask File[] masks = remoteIndexFile.listFiles(LSM_COMPONENTS_MASKS_FILTER); for (File mask : masks) { //delete all files belonging to this mask deleteLSMComponentFilesForMask(mask); //delete the mask itself mask.delete(); } } } private static void deleteLSMComponentFilesForMask(File maskFile) { String lsmComponentTimeStamp = maskFile.getName().substring(0, maskFile.getName().length() - LSM_COMPONENT_MASK_SUFFIX.length()); File indexFolder = maskFile.getParentFile(); File[] lsmComponentsFiles = indexFolder.listFiles(LSM_COMPONENTS_NON_MASKS_FILTER); for (File lsmComponentFile : lsmComponentsFiles) { if (lsmComponentFile.getName().contains(lsmComponentTimeStamp)) { //match based on time stamp lsmComponentFile.delete(); } } } @SuppressWarnings({ "unchecked" }) public synchronized Map<Long, Long> getReplicaIndexLSNMap(String indexPath) throws IOException { try (FileInputStream fis = new FileInputStream(indexPath + File.separator + REPLICA_INDEX_LSN_MAP_NAME); ObjectInputStream oisFromFis = new ObjectInputStream(fis)) { Map<Long, Long> lsnMap = null; try { lsnMap = (Map<Long, Long>) oisFromFis.readObject(); } catch (ClassNotFoundException e) { e.printStackTrace(); } return lsnMap; } } public synchronized void updateReplicaIndexLSNMap(String indexPath, Map<Long, Long> lsnMap) throws IOException { try (FileOutputStream fos = new FileOutputStream(indexPath + File.separator + REPLICA_INDEX_LSN_MAP_NAME); ObjectOutputStream oosToFos = new ObjectOutputStream(fos)) { oosToFos.writeObject(lsnMap); oosToFos.flush(); } } /** * @param partition * @return Absolute paths to all partition files */ public List<String> getPartitionIndexesFiles(int partition, boolean relativePath) throws HyracksDataException { List<String> partitionFiles = new ArrayList<String>(); Set<File> partitionIndexes = localRepository.getPartitionIndexes(partition); for (File indexDir : partitionIndexes) { if (indexDir.isDirectory()) { File[] indexFiles = indexDir.listFiles(LSM_INDEX_FILES_FILTER); if (indexFiles != null) { for (File file : indexFiles) { if (!relativePath) { partitionFiles.add(file.getAbsolutePath()); } else { partitionFiles.add(StoragePathUtil.getIndexFileRelativePath(file.getAbsolutePath())); } } } } } return partitionFiles; } private static final FilenameFilter LSM_COMPONENTS_MASKS_FILTER = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(LSM_COMPONENT_MASK_SUFFIX); } }; private static final FilenameFilter LSM_COMPONENTS_NON_MASKS_FILTER = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.endsWith(LSM_COMPONENT_MASK_SUFFIX); } }; private static final FilenameFilter LSM_INDEX_FILES_FILTER = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.equalsIgnoreCase(StorageConstants.METADATA_FILE_NAME) || !name.startsWith("."); } }; }
package org.zstack.image; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.Platform; import org.zstack.core.asyncbatch.AsyncBatchRunner; import org.zstack.core.asyncbatch.LoopAsyncBatch; import org.zstack.core.cloudbus.*; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.config.GlobalConfig; import org.zstack.core.config.GlobalConfigUpdateExtensionPoint; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.defer.Defer; import org.zstack.core.defer.Deferred; import org.zstack.core.errorcode.ErrorFacade; import org.zstack.core.thread.CancelablePeriodicTask; import org.zstack.core.thread.ThreadFacade; import org.zstack.core.workflow.FlowChainBuilder; import org.zstack.core.workflow.ShareFlow; import org.zstack.header.AbstractService; import org.zstack.header.core.AsyncLatch; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.ErrorCodeList; import org.zstack.header.errorcode.SysErrors; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.identity.*; import org.zstack.header.image.*; import org.zstack.header.image.APICreateRootVolumeTemplateFromVolumeSnapshotEvent.Failure; import org.zstack.header.image.ImageConstant.ImageMediaType; import org.zstack.header.image.ImageDeletionPolicyManager.ImageDeletionPolicy; import org.zstack.header.managementnode.ManagementNodeReadyExtensionPoint; import org.zstack.header.message.APIMessage; import org.zstack.header.message.Message; import org.zstack.header.message.MessageReply; import org.zstack.header.message.NeedQuotaCheckMessage; import org.zstack.header.rest.RESTFacade; import org.zstack.header.search.SearchOp; import org.zstack.header.storage.backup.*; import org.zstack.header.storage.primary.PrimaryStorageVO; import org.zstack.header.storage.primary.PrimaryStorageVO_; import org.zstack.header.storage.snapshot.*; import org.zstack.header.vm.CreateTemplateFromVmRootVolumeMsg; import org.zstack.header.vm.CreateTemplateFromVmRootVolumeReply; import org.zstack.header.vm.VmInstanceConstant; import org.zstack.header.volume.*; import org.zstack.identity.AccountManager; import org.zstack.identity.QuotaUtil; import org.zstack.search.SearchQuery; import org.zstack.tag.TagManager; import org.zstack.utils.CollectionUtils; import org.zstack.utils.ObjectUtils; import org.zstack.utils.RunOnce; import org.zstack.utils.Utils; import org.zstack.utils.data.SizeUnit; import org.zstack.utils.function.ForEachFunction; import org.zstack.utils.function.Function; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import javax.persistence.Tuple; import javax.persistence.TypedQuery; import java.sql.Timestamp; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static org.zstack.utils.CollectionDSL.list; public class ImageManagerImpl extends AbstractService implements ImageManager, ManagementNodeReadyExtensionPoint, ReportQuotaExtensionPoint, ResourceOwnerPreChangeExtensionPoint { private static final CLogger logger = Utils.getLogger(ImageManagerImpl.class); @Autowired private CloudBus bus; @Autowired private PluginRegistry pluginRgty; @Autowired private DatabaseFacade dbf; @Autowired private AccountManager acntMgr; @Autowired private ErrorFacade errf; @Autowired private TagManager tagMgr; @Autowired private ThreadFacade thdf; @Autowired private ResourceDestinationMaker destMaker; @Autowired private ImageDeletionPolicyManager deletionPolicyMgr; @Autowired protected RESTFacade restf; private Map<String, ImageFactory> imageFactories = Collections.synchronizedMap(new HashMap<>()); private static final Set<Class> allowedMessageAfterDeletion = new HashSet<>(); private Future<Void> expungeTask; static { allowedMessageAfterDeletion.add(ImageDeletionMsg.class); } @Override @MessageSafe public void handleMessage(Message msg) { if (msg instanceof ImageMessage) { passThrough((ImageMessage) msg); } else if (msg instanceof APIMessage) { handleApiMessage(msg); } else { handleLocalMessage(msg); } } private void handleLocalMessage(Message msg) { bus.dealWithUnknownMessage(msg); } private void handleApiMessage(Message msg) { if (msg instanceof APIAddImageMsg) { handle((APIAddImageMsg) msg); } else if (msg instanceof APIListImageMsg) { handle((APIListImageMsg) msg); } else if (msg instanceof APISearchImageMsg) { handle((APISearchImageMsg) msg); } else if (msg instanceof APIGetImageMsg) { handle((APIGetImageMsg) msg); } else if (msg instanceof APICreateRootVolumeTemplateFromRootVolumeMsg) { handle((APICreateRootVolumeTemplateFromRootVolumeMsg) msg); } else if (msg instanceof APICreateRootVolumeTemplateFromVolumeSnapshotMsg) { handle((APICreateRootVolumeTemplateFromVolumeSnapshotMsg) msg); } else if (msg instanceof APICreateDataVolumeTemplateFromVolumeMsg) { handle((APICreateDataVolumeTemplateFromVolumeMsg) msg); } else { bus.dealWithUnknownMessage(msg); } } private void handle(final APICreateDataVolumeTemplateFromVolumeMsg msg) { final APICreateDataVolumeTemplateFromVolumeEvent evt = new APICreateDataVolumeTemplateFromVolumeEvent(msg.getId()); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-data-volume-template-from-volume-%s", msg.getVolumeUuid())); chain.then(new ShareFlow() { List<BackupStorageInventory> backupStorage = new ArrayList<>(); ImageVO image; long actualSize; @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-actual-size-of-data-volume"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg smsg = new SyncVolumeSizeMsg(); smsg.setVolumeUuid(msg.getVolumeUuid()); bus.makeTargetServiceIdByResourceUuid(smsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid()); bus.send(smsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } SyncVolumeSizeReply sr = reply.castReply(); actualSize = sr.getActualSize(); trigger.next(); } }); } }); flow(new Flow() { String __name__ = "create-image-in-database"; @Override public void run(FlowTrigger trigger, Map data) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.select(VolumeVO_.format, VolumeVO_.size); q.add(VolumeVO_.uuid, Op.EQ, msg.getVolumeUuid()); Tuple t = q.findTuple(); String format = t.get(0, String.class); long size = t.get(1, Long.class); final ImageVO vo = new ImageVO(); vo.setUuid(msg.getResourceUuid() == null ? Platform.getUuid() : msg.getResourceUuid()); vo.setName(msg.getName()); vo.setDescription(msg.getDescription()); vo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); vo.setMediaType(ImageMediaType.DataVolumeTemplate); vo.setSize(size); vo.setActualSize(actualSize); vo.setState(ImageState.Enabled); vo.setStatus(ImageStatus.Creating); vo.setFormat(format); vo.setUrl(String.format("volume://%s", msg.getVolumeUuid())); image = dbf.persistAndRefresh(vo); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { if (image != null) { dbf.remove(image); } trigger.rollback(); } }); flow(new Flow() { String __name__ = "select-backup-storage"; @Override public void run(final FlowTrigger trigger, Map data) { final String zoneUuid = new Callable<String>() { @Override @Transactional(readOnly = true) public String call() { String sql = "select ps.zoneUuid" + " from PrimaryStorageVO ps, VolumeVO vol" + " where vol.primaryStorageUuid = ps.uuid" + " and vol.uuid = :volUuid"; TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class); q.setParameter("volUuid", msg.getVolumeUuid()); return q.getSingleResult(); } }.call(); if (msg.getBackupStorageUuids() == null) { AllocateBackupStorageMsg amsg = new AllocateBackupStorageMsg(); amsg.setRequiredZoneUuid(zoneUuid); amsg.setSize(actualSize); bus.makeLocalServiceId(amsg, BackupStorageConstant.SERVICE_ID); bus.send(amsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { backupStorage.add(((AllocateBackupStorageReply) reply).getInventory()); trigger.next(); } else { trigger.fail(errf.stringToOperationError("cannot find proper backup storage", reply.getError())); } } }); } else { List<AllocateBackupStorageMsg> amsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<AllocateBackupStorageMsg, String>() { @Override public AllocateBackupStorageMsg call(String arg) { AllocateBackupStorageMsg amsg = new AllocateBackupStorageMsg(); amsg.setRequiredZoneUuid(zoneUuid); amsg.setSize(actualSize); amsg.setBackupStorageUuid(arg); bus.makeLocalServiceId(amsg, BackupStorageConstant.SERVICE_ID); return amsg; } }); bus.send(amsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { List<ErrorCode> errs = new ArrayList<>(); for (MessageReply r : replies) { if (r.isSuccess()) { backupStorage.add(((AllocateBackupStorageReply) r).getInventory()); } else { errs.add(r.getError()); } } if (backupStorage.isEmpty()) { trigger.fail(errf.stringToOperationError(String.format("failed to allocate all backup storage[uuid:%s], a list of error: %s", msg.getBackupStorageUuids(), JSONObjectUtil.toJsonString(errs)))); } else { trigger.next(); } } }); } } @Override public void rollback(FlowRollback trigger, Map data) { if (!backupStorage.isEmpty()) { List<ReturnBackupStorageMsg> rmsgs = CollectionUtils.transformToList(backupStorage, new Function<ReturnBackupStorageMsg, BackupStorageInventory>() { @Override public ReturnBackupStorageMsg call(BackupStorageInventory arg) { ReturnBackupStorageMsg rmsg = new ReturnBackupStorageMsg(); rmsg.setBackupStorageUuid(arg.getUuid()); rmsg.setSize(actualSize); bus.makeLocalServiceId(rmsg, BackupStorageConstant.SERVICE_ID); return rmsg; } }); bus.send(rmsgs, new CloudBusListCallBack(null) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { BackupStorageInventory bs = backupStorage.get(replies.indexOf(r)); logger.warn(String.format("failed to return %s bytes to backup storage[uuid:%s]", acntMgr, bs.getUuid())); } } }); } trigger.rollback(); } }); flow(new NoRollbackFlow() { String __name__ = "create-data-volume-template-from-volume"; @Override public void run(final FlowTrigger trigger, Map data) { List<CreateDataVolumeTemplateFromDataVolumeMsg> cmsgs = CollectionUtils.transformToList(backupStorage, new Function<CreateDataVolumeTemplateFromDataVolumeMsg, BackupStorageInventory>() { @Override public CreateDataVolumeTemplateFromDataVolumeMsg call(BackupStorageInventory bs) { CreateDataVolumeTemplateFromDataVolumeMsg cmsg = new CreateDataVolumeTemplateFromDataVolumeMsg(); cmsg.setVolumeUuid(msg.getVolumeUuid()); cmsg.setBackupStorageUuid(bs.getUuid()); cmsg.setImageUuid(image.getUuid()); bus.makeTargetServiceIdByResourceUuid(cmsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid()); return cmsg; } }); bus.send(cmsgs, new CloudBusListCallBack(msg) { @Override public void run(List<MessageReply> replies) { int fail = 0; String mdsum = null; ErrorCode err = null; String format = null; for (MessageReply r : replies) { BackupStorageInventory bs = backupStorage.get(replies.indexOf(r)); if (!r.isSuccess()) { logger.warn(String.format("failed to create data volume template from volume[uuid:%s] on backup storage[uuid:%s], %s", msg.getVolumeUuid(), bs.getUuid(), r.getError())); fail++; err = r.getError(); continue; } CreateDataVolumeTemplateFromDataVolumeReply reply = r.castReply(); ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(bs.getUuid()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(image.getUuid()); ref.setInstallPath(reply.getInstallPath()); dbf.persist(ref); if (mdsum == null) { mdsum = reply.getMd5sum(); } if (reply.getFormat() != null) { format = reply.getFormat(); } } int backupStorageNum = msg.getBackupStorageUuids() == null ? 1 : msg.getBackupStorageUuids().size(); if (fail == backupStorageNum) { ErrorCode errCode = errf.instantiateErrorCode(SysErrors.OPERATION_ERROR, String.format("failed to create data volume template from volume[uuid:%s] on all backup storage%s. See cause for one of errors", msg.getVolumeUuid(), msg.getBackupStorageUuids()), err ); trigger.fail(errCode); } else { image = dbf.reload(image); if (format != null) { image.setFormat(format); } image.setMd5Sum(mdsum); image.setStatus(ImageStatus.Ready); image = dbf.updateAndRefresh(image); trigger.next(); } } }); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { evt.setInventory(ImageInventory.valueOf(image)); bus.publish(evt); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { evt.setError(errCode); bus.publish(evt); } }); } }).start(); } private void handle(final APICreateRootVolumeTemplateFromVolumeSnapshotMsg msg) { final APICreateRootVolumeTemplateFromVolumeSnapshotEvent evt = new APICreateRootVolumeTemplateFromVolumeSnapshotEvent(msg.getId()); SimpleQuery<VolumeSnapshotVO> q = dbf.createQuery(VolumeSnapshotVO.class); q.select(VolumeSnapshotVO_.format); q.add(VolumeSnapshotVO_.uuid, Op.EQ, msg.getSnapshotUuid()); String format = q.findValue(); final ImageVO vo = new ImageVO(); if (msg.getResourceUuid() != null) { vo.setUuid(msg.getResourceUuid()); } else { vo.setUuid(Platform.getUuid()); } vo.setName(msg.getName()); vo.setSystem(msg.isSystem()); vo.setDescription(msg.getDescription()); vo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); vo.setGuestOsType(vo.getGuestOsType()); vo.setStatus(ImageStatus.Creating); vo.setState(ImageState.Enabled); vo.setFormat(format); vo.setMediaType(ImageMediaType.RootVolumeTemplate); vo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); vo.setUrl(String.format("volumeSnapshot://%s", msg.getSnapshotUuid())); dbf.persist(vo); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); SimpleQuery<VolumeSnapshotVO> sq = dbf.createQuery(VolumeSnapshotVO.class); sq.select(VolumeSnapshotVO_.volumeUuid, VolumeSnapshotVO_.treeUuid); sq.add(VolumeSnapshotVO_.uuid, Op.EQ, msg.getSnapshotUuid()); Tuple t = sq.findTuple(); String volumeUuid = t.get(0, String.class); String treeUuid = t.get(1, String.class); List<CreateTemplateFromVolumeSnapshotMsg> cmsgs = msg.getBackupStorageUuids().stream().map(bsUuid -> { CreateTemplateFromVolumeSnapshotMsg cmsg = new CreateTemplateFromVolumeSnapshotMsg(); cmsg.setSnapshotUuid(msg.getSnapshotUuid()); cmsg.setImageUuid(vo.getUuid()); cmsg.setVolumeUuid(volumeUuid); cmsg.setTreeUuid(treeUuid); cmsg.setBackupStorageUuid(bsUuid); String resourceUuid = volumeUuid != null ? volumeUuid : treeUuid; bus.makeTargetServiceIdByResourceUuid(cmsg, VolumeSnapshotConstant.SERVICE_ID, resourceUuid); return cmsg; }).collect(Collectors.toList()); List<Failure> failures = new ArrayList<>(); AsyncLatch latch = new AsyncLatch(cmsgs.size(), new NoErrorCompletion(msg) { @Override public void done() { if (failures.size() == cmsgs.size()) { // failed on all ErrorCodeList error = errf.stringToOperationError(String.format("failed to create template from" + " the volume snapshot[uuid:%s] on backup storage[uuids:%s]", msg.getSnapshotUuid(), msg.getBackupStorageUuids()), failures.stream().map(f -> f.error).collect(Collectors.toList())); evt.setError(error); dbf.remove(vo); } else { ImageVO imvo = dbf.reload(vo); evt.setInventory(ImageInventory.valueOf(imvo)); logger.debug(String.format("successfully created image[uuid:%s, name:%s] from volume snapshot[uuid:%s]", imvo.getUuid(), imvo.getName(), msg.getSnapshotUuid())); } if (!failures.isEmpty()) { evt.setFailuresOnBackupStorage(failures); } bus.publish(evt); } }); RunOnce once = new RunOnce(); for (CreateTemplateFromVolumeSnapshotMsg cmsg : cmsgs) { bus.send(cmsg, new CloudBusCallBack(latch) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { synchronized (failures) { Failure failure = new Failure(); failure.error = reply.getError(); failure.backupStorageUuid = cmsg.getBackupStorageUuid(); failures.add(failure); } } else { CreateTemplateFromVolumeSnapshotReply cr = reply.castReply(); ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(cr.getBackupStorageUuid()); ref.setInstallPath(cr.getBackupStorageInstallPath()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(vo.getUuid()); dbf.persist(ref); once.run(() -> { vo.setSize(cr.getSize()); vo.setActualSize(cr.getActualSize()); vo.setStatus(ImageStatus.Ready); dbf.update(vo); }); } latch.ack(); } }); } } private void passThrough(ImageMessage msg) { ImageVO vo = dbf.findByUuid(msg.getImageUuid(), ImageVO.class); if (vo == null && allowedMessageAfterDeletion.contains(msg.getClass())) { ImageEO eo = dbf.findByUuid(msg.getImageUuid(), ImageEO.class); vo = ObjectUtils.newAndCopy(eo, ImageVO.class); } if (vo == null) { String err = String.format("Cannot find image[uuid:%s], it may have been deleted", msg.getImageUuid()); logger.warn(err); bus.replyErrorByMessageType((Message) msg, errf.instantiateErrorCode(SysErrors.RESOURCE_NOT_FOUND, err)); return; } ImageFactory factory = getImageFacotry(ImageType.valueOf(vo.getType())); Image img = factory.getImage(vo); img.handleMessage((Message) msg); } private void handle(final APICreateRootVolumeTemplateFromRootVolumeMsg msg) { FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("create-template-from-root-volume-%s", msg.getRootVolumeUuid())); chain.then(new ShareFlow() { ImageVO imageVO; VolumeInventory rootVolume; Long imageActualSize; List<BackupStorageInventory> targetBackupStorages = new ArrayList<>(); String zoneUuid; { VolumeVO rootvo = dbf.findByUuid(msg.getRootVolumeUuid(), VolumeVO.class); rootVolume = VolumeInventory.valueOf(rootvo); SimpleQuery<PrimaryStorageVO> q = dbf.createQuery(PrimaryStorageVO.class); q.select(PrimaryStorageVO_.zoneUuid); q.add(PrimaryStorageVO_.uuid, Op.EQ, rootVolume.getPrimaryStorageUuid()); zoneUuid = q.findValue(); } @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "get-volume-actual-size"; @Override public void run(final FlowTrigger trigger, Map data) { SyncVolumeSizeMsg msg = new SyncVolumeSizeMsg(); msg.setVolumeUuid(rootVolume.getUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VolumeConstant.SERVICE_ID, rootVolume.getPrimaryStorageUuid()); bus.send(msg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { trigger.fail(reply.getError()); return; } SyncVolumeSizeReply sr = reply.castReply(); imageActualSize = sr.getActualSize(); trigger.next(); } }); } }); flow(new Flow() { String __name__ = "create-image-in-database"; public void run(FlowTrigger trigger, Map data) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.add(VolumeVO_.uuid, Op.EQ, msg.getRootVolumeUuid()); final VolumeVO volvo = q.find(); String accountUuid = acntMgr.getOwnerAccountUuidOfResource(volvo.getUuid()); final ImageVO imvo = new ImageVO(); if (msg.getResourceUuid() != null) { imvo.setUuid(msg.getResourceUuid()); } else { imvo.setUuid(Platform.getUuid()); } imvo.setDescription(msg.getDescription()); imvo.setMediaType(ImageMediaType.RootVolumeTemplate); imvo.setState(ImageState.Enabled); imvo.setGuestOsType(msg.getGuestOsType()); imvo.setFormat(volvo.getFormat()); imvo.setName(msg.getName()); imvo.setSystem(msg.isSystem()); imvo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); imvo.setStatus(ImageStatus.Downloading); imvo.setType(ImageConstant.ZSTACK_IMAGE_TYPE); imvo.setUrl(String.format("volume://%s", msg.getRootVolumeUuid())); imvo.setSize(volvo.getSize()); imvo.setActualSize(imageActualSize); dbf.persist(imvo); acntMgr.createAccountResourceRef(accountUuid, imvo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, imvo.getUuid(), ImageVO.class.getSimpleName()); imageVO = imvo; trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { if (imageVO != null) { dbf.remove(imageVO); } trigger.rollback(); } }); flow(new Flow() { String __name__ = String.format("select-backup-storage"); @Override public void run(final FlowTrigger trigger, Map data) { if (msg.getBackupStorageUuids() == null) { AllocateBackupStorageMsg abmsg = new AllocateBackupStorageMsg(); abmsg.setRequiredZoneUuid(zoneUuid); abmsg.setSize(imageActualSize); bus.makeLocalServiceId(abmsg, BackupStorageConstant.SERVICE_ID); bus.send(abmsg, new CloudBusCallBack(trigger) { @Override public void run(MessageReply reply) { if (reply.isSuccess()) { targetBackupStorages.add(((AllocateBackupStorageReply) reply).getInventory()); trigger.next(); } else { trigger.fail(reply.getError()); } } }); } else { List<AllocateBackupStorageMsg> amsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<AllocateBackupStorageMsg, String>() { @Override public AllocateBackupStorageMsg call(String arg) { AllocateBackupStorageMsg abmsg = new AllocateBackupStorageMsg(); abmsg.setSize(imageActualSize); abmsg.setBackupStorageUuid(arg); bus.makeLocalServiceId(abmsg, BackupStorageConstant.SERVICE_ID); return abmsg; } }); bus.send(amsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { List<ErrorCode> errs = new ArrayList<>(); for (MessageReply r : replies) { if (r.isSuccess()) { targetBackupStorages.add(((AllocateBackupStorageReply) r).getInventory()); } else { errs.add(r.getError()); } } if (targetBackupStorages.isEmpty()) { trigger.fail(errf.stringToOperationError(String.format("unable to allocate backup storage specified by uuids%s, list errors are: %s", msg.getBackupStorageUuids(), JSONObjectUtil.toJsonString(errs)))); } else { trigger.next(); } } }); } } @Override public void rollback(final FlowRollback trigger, Map data) { if (targetBackupStorages.isEmpty()) { trigger.rollback(); return; } List<ReturnBackupStorageMsg> rmsgs = CollectionUtils.transformToList(targetBackupStorages, new Function<ReturnBackupStorageMsg, BackupStorageInventory>() { @Override public ReturnBackupStorageMsg call(BackupStorageInventory arg) { ReturnBackupStorageMsg rmsg = new ReturnBackupStorageMsg(); rmsg.setBackupStorageUuid(arg.getUuid()); rmsg.setSize(imageActualSize); bus.makeLocalServiceId(rmsg, BackupStorageConstant.SERVICE_ID); return rmsg; } }); bus.send(rmsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { if (!r.isSuccess()) { BackupStorageInventory bs = targetBackupStorages.get(replies.indexOf(r)); logger.warn(String.format("failed to return capacity[%s] to backup storage[uuid:%s], because %s", imageActualSize, bs.getUuid(), r.getError())); } } trigger.rollback(); } }); } }); flow(new NoRollbackFlow() { String __name__ = String.format("start-creating-template"); @Override public void run(final FlowTrigger trigger, Map data) { List<CreateTemplateFromVmRootVolumeMsg> cmsgs = CollectionUtils.transformToList(targetBackupStorages, new Function<CreateTemplateFromVmRootVolumeMsg, BackupStorageInventory>() { @Override public CreateTemplateFromVmRootVolumeMsg call(BackupStorageInventory arg) { CreateTemplateFromVmRootVolumeMsg cmsg = new CreateTemplateFromVmRootVolumeMsg(); cmsg.setRootVolumeInventory(rootVolume); cmsg.setBackupStorageUuid(arg.getUuid()); cmsg.setImageInventory(ImageInventory.valueOf(imageVO)); bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, rootVolume.getVmInstanceUuid()); return cmsg; } }); bus.send(cmsgs, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { boolean success = false; ErrorCode err = null; for (MessageReply r : replies) { BackupStorageInventory bs = targetBackupStorages.get(replies.indexOf(r)); if (!r.isSuccess()) { logger.warn(String.format("failed to create image from root volume[uuid:%s] on backup storage[uuid:%s], because %s", msg.getRootVolumeUuid(), bs.getUuid(), r.getError())); err = r.getError(); continue; } CreateTemplateFromVmRootVolumeReply reply = (CreateTemplateFromVmRootVolumeReply) r; ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setBackupStorageUuid(bs.getUuid()); ref.setStatus(ImageStatus.Ready); ref.setImageUuid(imageVO.getUuid()); ref.setInstallPath(reply.getInstallPath()); dbf.persist(ref); imageVO.setStatus(ImageStatus.Ready); if (reply.getFormat() != null) { imageVO.setFormat(reply.getFormat()); } dbf.update(imageVO); imageVO = dbf.reload(imageVO); success = true; logger.debug(String.format("successfully created image[uuid:%s] from root volume[uuid:%s] on backup storage[uuid:%s]", imageVO.getUuid(), msg.getRootVolumeUuid(), bs.getUuid())); } if (success) { trigger.next(); } else { trigger.fail(errf.instantiateErrorCode(SysErrors.OPERATION_ERROR, String.format("failed to create image from root volume[uuid:%s] on all backup storage, see cause for one of errors", msg.getRootVolumeUuid()), err)); } } }); } }); flow(new Flow() { String __name__ = "copy-system-tag-to-image"; public void run(FlowTrigger trigger, Map data) { // find the rootimage and create some systemtag if it has SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.add(VolumeVO_.uuid, SimpleQuery.Op.EQ, msg.getRootVolumeUuid()); q.select(VolumeVO_.vmInstanceUuid); String vmInstanceUuid = q.findValue(); if (tagMgr.hasSystemTag(vmInstanceUuid, ImageSystemTags.IMAGE_INJECT_QEMUGA.getTagFormat())) { tagMgr.createNonInherentSystemTag(imageVO.getUuid(), ImageSystemTags.IMAGE_INJECT_QEMUGA.getTagFormat(), ImageVO.class.getSimpleName()); } trigger.next(); } @Override public void rollback(FlowRollback trigger, Map data) { trigger.rollback(); } }); done(new FlowDoneHandler(msg) { @Override public void handle(Map data) { APICreateRootVolumeTemplateFromRootVolumeEvent evt = new APICreateRootVolumeTemplateFromRootVolumeEvent(msg.getId()); imageVO = dbf.reload(imageVO); ImageInventory iinv = ImageInventory.valueOf(imageVO); evt.setInventory(iinv); logger.warn(String.format("successfully create template[uuid:%s] from root volume[uuid:%s]", iinv.getUuid(), msg.getRootVolumeUuid())); bus.publish(evt); } }); error(new FlowErrorHandler(msg) { @Override public void handle(ErrorCode errCode, Map data) { APICreateRootVolumeTemplateFromRootVolumeEvent evt = new APICreateRootVolumeTemplateFromRootVolumeEvent(msg.getId()); evt.setError(errCode); logger.warn(String.format("failed to create template from root volume[uuid:%s], because %s", msg.getRootVolumeUuid(), errCode)); bus.publish(evt); } }); } }).start(); } private void handle(APIGetImageMsg msg) { SearchQuery<ImageInventory> sq = new SearchQuery(ImageInventory.class); sq.addAccountAsAnd(msg); sq.add("uuid", SearchOp.AND_EQ, msg.getUuid()); List<ImageInventory> invs = sq.list(); APIGetImageReply reply = new APIGetImageReply(); if (!invs.isEmpty()) { reply.setInventory(JSONObjectUtil.toJsonString(invs.get(0))); } bus.reply(msg, reply); } private void handle(APISearchImageMsg msg) { SearchQuery<ImageInventory> sq = SearchQuery.create(msg, ImageInventory.class); sq.addAccountAsAnd(msg); String content = sq.listAsString(); APISearchImageReply reply = new APISearchImageReply(); reply.setContent(content); bus.reply(msg, reply); } private void handle(APIListImageMsg msg) { List<ImageVO> vos = dbf.listAll(ImageVO.class); List<ImageInventory> invs = ImageInventory.valueOf(vos); APIListImageReply reply = new APIListImageReply(); reply.setInventories(invs); bus.reply(msg, reply); } @Deferred private void handle(final APIAddImageMsg msg) { String imageType = msg.getType(); imageType = imageType == null ? DefaultImageFactory.type.toString() : imageType; final APIAddImageEvent evt = new APIAddImageEvent(msg.getId()); ImageVO vo = new ImageVO(); if (msg.getResourceUuid() != null) { vo.setUuid(msg.getResourceUuid()); } else { vo.setUuid(Platform.getUuid()); } vo.setName(msg.getName()); vo.setDescription(msg.getDescription()); if (msg.getFormat().equals(ImageConstant.ISO_FORMAT_STRING)) { vo.setMediaType(ImageMediaType.ISO); } else { vo.setMediaType(ImageMediaType.valueOf(msg.getMediaType())); } vo.setType(imageType); vo.setSystem(msg.isSystem()); vo.setGuestOsType(msg.getGuestOsType()); vo.setFormat(msg.getFormat()); vo.setStatus(ImageStatus.Downloading); vo.setState(ImageState.Enabled); vo.setUrl(msg.getUrl()); vo.setDescription(msg.getDescription()); vo.setPlatform(ImagePlatform.valueOf(msg.getPlatform())); ImageFactory factory = getImageFacotry(ImageType.valueOf(imageType)); final ImageVO ivo = factory.createImage(vo, msg); acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), ImageVO.class); tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), ImageVO.class.getSimpleName()); Defer.guard(() -> dbf.remove(ivo)); final ImageInventory inv = ImageInventory.valueOf(ivo); for (AddImageExtensionPoint ext : pluginRgty.getExtensionList(AddImageExtensionPoint.class)) { ext.preAddImage(inv); } final List<DownloadImageMsg> dmsgs = CollectionUtils.transformToList(msg.getBackupStorageUuids(), new Function<DownloadImageMsg, String>() { @Override public DownloadImageMsg call(String arg) { DownloadImageMsg dmsg = new DownloadImageMsg(inv); dmsg.setBackupStorageUuid(arg); dmsg.setFormat(msg.getFormat()); dmsg.setSystemTags(msg.getSystemTags()); bus.makeTargetServiceIdByResourceUuid(dmsg, BackupStorageConstant.SERVICE_ID, arg); return dmsg; } }); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.beforeAddImage(inv); } }); new LoopAsyncBatch<DownloadImageMsg>(msg) { AtomicBoolean success = new AtomicBoolean(false); @Override protected Collection<DownloadImageMsg> collect() { return dmsgs; } @Override protected AsyncBatchRunner forEach(DownloadImageMsg dmsg) { return new AsyncBatchRunner() { @Override public void run(NoErrorCompletion completion) { ImageBackupStorageRefVO ref = new ImageBackupStorageRefVO(); ref.setImageUuid(ivo.getUuid()); ref.setInstallPath(""); ref.setBackupStorageUuid(dmsg.getBackupStorageUuid()); ref.setStatus(ImageStatus.Downloading); dbf.persist(ref); bus.send(dmsg, new CloudBusCallBack(completion) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { errors.add(reply.getError()); dbf.remove(ref); } else { DownloadImageReply re = reply.castReply(); ref.setStatus(ImageStatus.Ready); ref.setInstallPath(re.getInstallPath()); dbf.update(ref); if (success.compareAndSet(false, true)) { // In case 'Platform' etc. is changed. ImageVO vo = dbf.reload(ivo); vo.setMd5Sum(re.getMd5sum()); vo.setSize(re.getSize()); vo.setActualSize(re.getActualSize()); vo.setStatus(ImageStatus.Ready); dbf.update(vo); } logger.debug(String.format("successfully downloaded image[uuid:%s, name:%s] to backup storage[uuid:%s]", inv.getUuid(), inv.getName(), dmsg.getBackupStorageUuid())); } completion.done(); } }); } }; } @Override protected void done() { // TODO: check if the database still has the record of the image // if there is no record, that means user delete the image during the downloading, // then we need to cleanup if (success.get()) { ImageVO vo = dbf.reload(ivo); final ImageInventory einv = ImageInventory.valueOf(vo); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.afterAddImage(einv); } }); evt.setInventory(einv); } else { final ErrorCode err = errf.instantiateErrorCode(SysErrors.CREATE_RESOURCE_ERROR, String.format("Failed to download image[name:%s] on all backup storage%s.", inv.getName(), msg.getBackupStorageUuids()), errors); CollectionUtils.safeForEach(pluginRgty.getExtensionList(AddImageExtensionPoint.class), new ForEachFunction<AddImageExtensionPoint>() { @Override public void run(AddImageExtensionPoint ext) { ext.failedToAddImage(inv, err); } }); dbf.remove(ivo); evt.setError(err); } bus.publish(evt); } }.start(); } @Override public String getId() { return bus.makeLocalServiceId(ImageConstant.SERVICE_ID); } private void populateExtensions() { for (ImageFactory f : pluginRgty.getExtensionList(ImageFactory.class)) { ImageFactory old = imageFactories.get(f.getType().toString()); if (old != null) { throw new CloudRuntimeException(String.format("duplicate ImageFactory[%s, %s] for type[%s]", f.getClass().getName(), old.getClass().getName(), f.getType())); } imageFactories.put(f.getType().toString(), f); } } @Override public boolean start() { populateExtensions(); installGlobalConfigUpdater(); return true; } private void installGlobalConfigUpdater() { ImageGlobalConfig.DELETION_POLICY.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); ImageGlobalConfig.EXPUNGE_INTERVAL.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); ImageGlobalConfig.EXPUNGE_PERIOD.installUpdateExtension(new GlobalConfigUpdateExtensionPoint() { @Override public void updateGlobalConfig(GlobalConfig oldConfig, GlobalConfig newConfig) { startExpungeTask(); } }); } private void startExpungeTask() { if (expungeTask != null) { expungeTask.cancel(true); } expungeTask = thdf.submitCancelablePeriodicTask(new CancelablePeriodicTask() { private List<Tuple> getDeletedImageManagedByUs() { int qun = 1000; SimpleQuery q = dbf.createQuery(ImageBackupStorageRefVO.class); q.add(ImageBackupStorageRefVO_.status, Op.EQ, ImageStatus.Deleted); long amount = q.count(); int times = (int) (amount / qun) + (amount % qun != 0 ? 1 : 0); int start = 0; List<Tuple> ret = new ArrayList<Tuple>(); for (int i = 0; i < times; i++) { q = dbf.createQuery(ImageBackupStorageRefVO.class); q.select(ImageBackupStorageRefVO_.imageUuid, ImageBackupStorageRefVO_.lastOpDate, ImageBackupStorageRefVO_.backupStorageUuid); q.add(ImageBackupStorageRefVO_.status, Op.EQ, ImageStatus.Deleted); q.setLimit(qun); q.setStart(start); List<Tuple> ts = q.listTuple(); start += qun; for (Tuple t : ts) { String imageUuid = t.get(0, String.class); if (!destMaker.isManagedByUs(imageUuid)) { continue; } ret.add(t); } } return ret; } @Override public boolean run() { final List<Tuple> images = getDeletedImageManagedByUs(); if (images.isEmpty()) { logger.debug("[Image Expunge Task]: no images to expunge"); return false; } for (Tuple t : images) { String imageUuid = t.get(0, String.class); Timestamp date = t.get(1, Timestamp.class); String bsUuid = t.get(2, String.class); final Timestamp current = dbf.getCurrentSqlTime(); if (current.getTime() >= date.getTime() + TimeUnit.SECONDS.toMillis(ImageGlobalConfig.EXPUNGE_PERIOD.value(Long.class))) { ImageDeletionPolicy deletionPolicy = deletionPolicyMgr.getDeletionPolicy(imageUuid); if (ImageDeletionPolicy.Never == deletionPolicy) { logger.debug(String.format("the deletion policy[Never] is set for the image[uuid:%s] on the backup storage[uuid:%s]," + "don't expunge it", images, bsUuid)); continue; } ExpungeImageMsg msg = new ExpungeImageMsg(); msg.setImageUuid(imageUuid); msg.setBackupStorageUuid(bsUuid); bus.makeTargetServiceIdByResourceUuid(msg, ImageConstant.SERVICE_ID, imageUuid); bus.send(msg, new CloudBusCallBack(null) { @Override public void run(MessageReply reply) { if (!reply.isSuccess()) { //TODO logger.warn(String.format("failed to expunge the image[uuid:%s], %s", images, reply.getError())); } } }); } } return false; } @Override public TimeUnit getTimeUnit() { return TimeUnit.SECONDS; } @Override public long getInterval() { return ImageGlobalConfig.EXPUNGE_INTERVAL.value(Long.class); } @Override public String getName() { return "expunge-image"; } }); } @Override public boolean stop() { return true; } private ImageFactory getImageFacotry(ImageType type) { ImageFactory factory = imageFactories.get(type.toString()); if (factory == null) { throw new CloudRuntimeException(String.format("Unable to find ImageFactory with type[%s]", type)); } return factory; } @Override public void managementNodeReady() { startExpungeTask(); } @Override public List<Quota> reportQuota() { Quota.QuotaOperator checker = new Quota.QuotaOperator() { @Override public void checkQuota(APIMessage msg, Map<String, Quota.QuotaPair> pairs) { if (!new QuotaUtil().isAdminAccount(msg.getSession().getAccountUuid())) { if (msg instanceof APIAddImageMsg) { check((APIAddImageMsg) msg, pairs); } else if (msg instanceof APIRecoverImageMsg) { check((APIRecoverImageMsg) msg, pairs); } else if (msg instanceof APIChangeResourceOwnerMsg) { check((APIChangeResourceOwnerMsg) msg, pairs); } } else { if (msg instanceof APIChangeResourceOwnerMsg) { check((APIChangeResourceOwnerMsg) msg, pairs); } } } @Override public void checkQuota(NeedQuotaCheckMessage msg, Map<String, Quota.QuotaPair> pairs) { } @Override public List<Quota.QuotaUsage> getQuotaUsageByAccount(String accountUuid) { List<Quota.QuotaUsage> usages = new ArrayList<>(); ImageQuotaUtil.ImageQuota imageQuota = new ImageQuotaUtil().getUsed(accountUuid); Quota.QuotaUsage usage = new Quota.QuotaUsage(); usage.setName(ImageConstant.QUOTA_IMAGE_NUM); usage.setUsed(imageQuota.imageNum); usages.add(usage); usage = new Quota.QuotaUsage(); usage.setName(ImageConstant.QUOTA_IMAGE_SIZE); usage.setUsed(imageQuota.imageSize); usages.add(usage); return usages; } @Transactional(readOnly = true) private void check(APIChangeResourceOwnerMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = msg.getAccountUuid(); if (new QuotaUtil().isAdminAccount(resourceTargetOwnerAccountUuid)) { return; } SimpleQuery<AccountResourceRefVO> q = dbf.createQuery(AccountResourceRefVO.class); q.add(AccountResourceRefVO_.resourceUuid, Op.EQ, msg.getResourceUuid()); AccountResourceRefVO accResRefVO = q.find(); if (accResRefVO.getResourceType().equals(ImageVO.class.getSimpleName())) { long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageSizeQuota = pairs.get(ImageConstant.QUOTA_IMAGE_SIZE).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageSizeUsed = new ImageQuotaUtil().getUsedImageSize(resourceTargetOwnerAccountUuid); ImageVO image = dbf.getEntityManager().find(ImageVO.class, msg.getResourceUuid()); long imageNumAsked = 1; long imageSizeAsked = image.getSize(); QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_SIZE; quotaCompareInfo.quotaValue = imageSizeQuota; quotaCompareInfo.currentUsed = imageSizeUsed; quotaCompareInfo.request = imageSizeAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } } } @Transactional(readOnly = true) private void check(APIRecoverImageMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = new QuotaUtil().getResourceOwnerAccountUuid(msg.getImageUuid()); long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageSizeQuota = pairs.get(ImageConstant.QUOTA_IMAGE_SIZE).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageSizeUsed = new ImageQuotaUtil().getUsedImageSize(resourceTargetOwnerAccountUuid); ImageVO image = dbf.getEntityManager().find(ImageVO.class, msg.getImageUuid()); long imageNumAsked = 1; long imageSizeAsked = image.getSize(); QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_SIZE; quotaCompareInfo.quotaValue = imageSizeQuota; quotaCompareInfo.currentUsed = imageSizeUsed; quotaCompareInfo.request = imageSizeAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } } @Transactional(readOnly = true) private void check(APIAddImageMsg msg, Map<String, Quota.QuotaPair> pairs) { String currentAccountUuid = msg.getSession().getAccountUuid(); String resourceTargetOwnerAccountUuid = msg.getSession().getAccountUuid(); long imageNumQuota = pairs.get(ImageConstant.QUOTA_IMAGE_NUM).getValue(); long imageNumUsed = new ImageQuotaUtil().getUsedImageNum(resourceTargetOwnerAccountUuid); long imageNumAsked = 1; QuotaUtil.QuotaCompareInfo quotaCompareInfo; { quotaCompareInfo = new QuotaUtil.QuotaCompareInfo(); quotaCompareInfo.currentAccountUuid = currentAccountUuid; quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid; quotaCompareInfo.quotaName = ImageConstant.QUOTA_IMAGE_NUM; quotaCompareInfo.quotaValue = imageNumQuota; quotaCompareInfo.currentUsed = imageNumUsed; quotaCompareInfo.request = imageNumAsked; new QuotaUtil().CheckQuota(quotaCompareInfo); } new ImageQuotaUtil().checkImageSizeQuotaUseHttpHead(msg, pairs); } }; Quota quota = new Quota(); quota.setOperator(checker); quota.addMessageNeedValidation(APIAddImageMsg.class); quota.addMessageNeedValidation(APIRecoverImageMsg.class); quota.addMessageNeedValidation(APIChangeResourceOwnerMsg.class); Quota.QuotaPair p = new Quota.QuotaPair(); p.setName(ImageConstant.QUOTA_IMAGE_NUM); p.setValue(20); quota.addPair(p); p = new Quota.QuotaPair(); p.setName(ImageConstant.QUOTA_IMAGE_SIZE); p.setValue(SizeUnit.TERABYTE.toByte(10)); quota.addPair(p); return list(quota); } @Override @Transactional(readOnly = true) public void resourceOwnerPreChange(AccountResourceRefInventory ref, String newOwnerUuid) { } }
package com.intellij.compiler.options; import com.intellij.compiler.CompilerConfiguration; import com.intellij.ide.util.ElementsChooser; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.TableUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.ui.ItemRemovable; import com.intellij.util.ui.Table; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.border.TitledBorder; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.JTableHeader; import javax.swing.table.TableCellEditor; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.io.File; import java.util.*; /** * @author Eugene Zhuravlev * Date: Oct 5, 2009 */ public class AnnotationProcessorsConfigurable implements Configurable{ private ElementsChooser<Module> myModulesChooser; private final Project myProject; private JRadioButton myRbClasspath; private JRadioButton myRbProcessorsPath; private TextFieldWithBrowseButton myProcessorPathField; private ProcessorTableModel myProcessorsModel; private JCheckBox myCbEnableProcessing; private JButton myRemoveButton; private Table myProcessorTable; private JButton myAddButton; public AnnotationProcessorsConfigurable(final Project project) { myProject = project; } public String getDisplayName() { return "Annotation Processors"; } public Icon getIcon() { return null; } public String getHelpTopic() { return null; } public JComponent createComponent() { final JPanel mainPanel = new JPanel(new GridBagLayout()); myCbEnableProcessing = new JCheckBox("Enable annotation processing"); myRbClasspath = new JRadioButton("Obtain processors from project classpath"); myRbProcessorsPath = new JRadioButton("Processor path:"); ButtonGroup group = new ButtonGroup(); group.add(myRbClasspath); group.add(myRbProcessorsPath); myProcessorPathField = new TextFieldWithBrowseButton(new ActionListener() { public void actionPerformed(ActionEvent e) { final VirtualFile[] files = FileChooser.chooseFiles(myProcessorPathField, new FileChooserDescriptor(true, true, true, true, false, true)); if (files.length > 0) { final StringBuilder builder = new StringBuilder(); for (VirtualFile file : files) { if (builder.length() > 0) { builder.append(File.pathSeparator); } builder.append(FileUtil.toSystemDependentName(file.getPath())); } myProcessorPathField.setText(builder.toString()); } } }); final JPanel processorTablePanel = new JPanel(new BorderLayout()); myProcessorsModel = new ProcessorTableModel(); processorTablePanel.setBorder(new TitledBorder("Annotation Processors")); myProcessorTable = new Table(myProcessorsModel); processorTablePanel.add(new JScrollPane(myProcessorTable), BorderLayout.CENTER); final JPanel buttons = new JPanel(new GridBagLayout()); myAddButton = new JButton("Add"); buttons.add(myAddButton, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(0, 5, 0, 0), 0, 0)); myRemoveButton = new JButton("Remove"); buttons.add(myRemoveButton, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 1.0, GridBagConstraints.NORTH, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 0), 0, 0)); processorTablePanel.add(buttons, BorderLayout.EAST); processorTablePanel.setPreferredSize(new Dimension(processorTablePanel.getPreferredSize().width, 50)); myModulesChooser = new ElementsChooser<Module>(true) { protected String getItemText(@NotNull Module module) { return module.getName() + " (" + FileUtil.toSystemDependentName(module.getModuleFilePath()) + ")"; } protected Icon getItemIcon(Module module) { return module.getModuleType().getNodeIcon(false); } }; myModulesChooser.setBorder(BorderFactory.createTitledBorder("Processed Modules")); mainPanel.add(myCbEnableProcessing, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 0, 0, 0), 0, 0)); mainPanel.add(myRbClasspath, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(10, 0, 0, 0), 0, 0)); mainPanel.add(myRbProcessorsPath, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(5, 0, 0, 0), 0, 0)); mainPanel.add(myProcessorPathField, new GridBagConstraints(1, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 0, 0), 0, 0)); mainPanel.add(processorTablePanel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(10, 0, 0, 0), 0, 0)); mainPanel.add(myModulesChooser, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(10, 0, 0, 0), 0, 0)); myRbClasspath.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { updateEnabledState(); } }); myProcessorTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { if (!e.getValueIsAdjusting()) { updateEnabledState(); } } }); myAddButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { final TableCellEditor cellEditor = myProcessorTable.getCellEditor(); if (cellEditor != null) { cellEditor.stopCellEditing(); } final ProcessorTableModel model = (ProcessorTableModel)myProcessorTable.getModel(); final int inserdedIndex = model.addRow(); TableUtil.editCellAt(myProcessorTable, inserdedIndex, ProcessorTableRow.NAME_COLUMN); } }); myRemoveButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { TableUtil.removeSelectedItems(myProcessorTable); } }); myCbEnableProcessing.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { updateEnabledState(); } }); updateEnabledState(); return mainPanel; } private void updateEnabledState() { final boolean enabled = myCbEnableProcessing.isSelected(); final boolean useProcessorpath = !myRbClasspath.isSelected(); myRbClasspath.setEnabled(enabled); myRbProcessorsPath.setEnabled(enabled); myProcessorPathField.setEnabled(enabled && useProcessorpath); myRemoveButton.setEnabled(enabled && myProcessorTable.getSelectedRow() >= 0); myAddButton.setEnabled(enabled); myProcessorTable.setEnabled(enabled); final JTableHeader header = myProcessorTable.getTableHeader(); if (header != null) { header.repaint(); } myModulesChooser.setEnabled(enabled); } public boolean isModified() { final CompilerConfiguration config = CompilerConfiguration.getInstance(myProject); if (config.isAnnotationProcessorsEnabled() != myCbEnableProcessing.isSelected()) { return true; } if (config.isObtainProcessorsFromClasspath() != myRbClasspath.isSelected()) { return true; } if (!FileUtil.pathsEqual(config.getProcessorPath(), FileUtil.toSystemIndependentName(myProcessorPathField.getText().trim()))) { return true; } final Map<String, String> map = myProcessorsModel.exportToMap(); if (!map.equals(config.getAnnotationProcessorsMap())) { return true; } if (!getExcludedModules().equals(config.getExcludedModules())) { return true; } return false; } public void apply() throws ConfigurationException { final CompilerConfiguration config = CompilerConfiguration.getInstance(myProject); config.setAnnotationProcessorsEnabled(myCbEnableProcessing.isSelected()); config.setObtainProcessorsFromClasspath(myRbClasspath.isSelected()); config.setProcessorsPath(FileUtil.toSystemIndependentName(myProcessorPathField.getText().trim())); config.setAnnotationProcessorsMap(myProcessorsModel.exportToMap()); config.setExcludedModules(getExcludedModules()); } private Set<Module> getExcludedModules() { final Set<Module> excludedModules = new HashSet<Module>(Arrays.asList(ModuleManager.getInstance(myProject).getModules())); excludedModules.removeAll(new HashSet<Module>(myModulesChooser.getMarkedElements())); return excludedModules; } public void reset() { final CompilerConfiguration config = CompilerConfiguration.getInstance(myProject); myCbEnableProcessing.setSelected(config.isAnnotationProcessorsEnabled()); final boolean obtainFromClasspath = config.isObtainProcessorsFromClasspath(); if (obtainFromClasspath) { myRbClasspath.setSelected(true); } else { myRbProcessorsPath.setSelected(true); } myProcessorPathField.setText(FileUtil.toSystemDependentName(config.getProcessorPath())); myProcessorsModel.setProcessorMap(config.getAnnotationProcessorsMap()); // excludes final Set<Module> excludedModules = new HashSet<Module>(config.getExcludedModules()); myModulesChooser.removeAllElements(); for (final Module module : ModuleManager.getInstance(myProject).getModules()) { myModulesChooser.addElement(module, !excludedModules.contains(module)); } myModulesChooser.sort(new Comparator<Module>() { public int compare(Module o1, Module o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); } public void disposeUIResources() { } private static class ProcessorTableModel extends AbstractTableModel implements ItemRemovable{ private final java.util.List<ProcessorTableRow> myRows = new ArrayList<ProcessorTableRow>(); public String getColumnName(int column) { switch (column) { case ProcessorTableRow.NAME_COLUMN: return "Processor FQ Name"; case ProcessorTableRow.OPTIONS_COLUMN : return "Processor Run Options"; } return super.getColumnName(column); } public Class<?> getColumnClass(int columnIndex) { return String.class; } public int getRowCount() { return myRows.size(); } public int getColumnCount() { return 2; } public boolean isCellEditable(int rowIndex, int columnIndex) { return columnIndex == ProcessorTableRow.NAME_COLUMN || columnIndex == ProcessorTableRow.OPTIONS_COLUMN; } public Object getValueAt(int rowIndex, int columnIndex) { final ProcessorTableRow row = myRows.get(rowIndex); switch (columnIndex) { case ProcessorTableRow.NAME_COLUMN: return row.name; case ProcessorTableRow.OPTIONS_COLUMN : return row.options; } return null; } public void setValueAt(Object aValue, int rowIndex, int columnIndex) { if (aValue != null) { final ProcessorTableRow row = myRows.get(rowIndex); switch (columnIndex) { case ProcessorTableRow.NAME_COLUMN: row.name = (String)aValue; break; case ProcessorTableRow.OPTIONS_COLUMN: row.options = (String)aValue; break; } } } public void removeRow(int idx) { myRows.remove(idx); fireTableRowsDeleted(idx, idx); } public int addRow() { myRows.add(new ProcessorTableRow()); final int inserted = myRows.size() - 1; fireTableRowsInserted(inserted, inserted); return inserted; } public void setProcessorMap(Map<String, String> processorMap) { clear(); if (processorMap.size() > 0) { for (Map.Entry<String, String> entry : processorMap.entrySet()) { myRows.add(new ProcessorTableRow(entry.getKey(), entry.getValue())); } Collections.sort(myRows, new Comparator<ProcessorTableRow>() { public int compare(ProcessorTableRow o1, ProcessorTableRow o2) { return o1.name.compareToIgnoreCase(o2.name); } }); fireTableRowsInserted(0, processorMap.size()-1); } } public void clear() { final int count = myRows.size(); if (count > 0) { myRows.clear(); fireTableRowsDeleted(0, count-1); } } public Map<String, String> exportToMap() { final Map<String, String> map = new HashMap<String, String>(); for (ProcessorTableRow row : myRows) { if (row.name != null) { final String name = row.name.trim(); if (name.length() > 0 && !map.containsKey(name)) { map.put(name, row.options); } } } return map; } } private static final class ProcessorTableRow { public static final int NAME_COLUMN = 0; public static final int OPTIONS_COLUMN = 1; public String name = ""; public String options = ""; public ProcessorTableRow() { } public ProcessorTableRow(String name, String options) { this.name = name != null? name : ""; this.options = options != null? options : ""; } } }
/* * Copyright (c) 2007, intarsys consulting GmbH * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * - Neither the name of intarsys nor the names of its contributors may be used * to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package de.intarsys.tools.currency; /** * http://www.iso.org/iso/en/prods-services/popstds/currencycodeslist.html * */ public class Iso4217 { public static final int COLUMN_ENTITY = 0; public static final int COLUMN_CURRENCY = 1; public static final int COLUMN_ALPHABETIC = 2; public static final int COLUMN_NUMERIC = 3; public static final String[][] table = new String[][] { // Entity, Currency, Alphabetic-Code, Numeric-Code { "AFGHANISTAN", "Afghani", "AFN", "971" }, { "ALBANIA", "Lek", "ALL", "008" }, { "ALGERIA", "Algerian Dinar", "DZD", "012" }, { "AMERICAN SAMOA", "US Dollar", "USD", "840" }, { "ANDORRA", "Euro", "EUR", "978" }, { "ANGOLA", "Kwanza", "AOA", "973" }, { "ANGUILLA", "East Caribbean Dollar", "XCD", "951" }, { "ANTIGUA AND BARBUDA", "East Caribbean Dollar", "XCD", "951" }, { "ARGENTINA", "Argentine Peso", "ARS", "032" }, { "ARMENIA", "Armenian Dram", "AMD", "051" }, { "ARUBA", "Aruban Guilder", "AWG", "533" }, { "AUSTRALIA", "Australian Dollar", "AUD", "036" }, { "AUSTRIA", "Euro", "EUR", "978" }, { "AZERBAIJAN", "Azerbaijanian Manat", "AZN", "944" }, { "BAHAMAS", "Bahamian Dollar", "BSD", "044" }, { "BAHRAIN", "Bahraini Dinar", "BHD", "048" }, { "BANGLADESH", "Taka", "BDT", "050" }, { "BARBADOS", "Barbados Dollar", "BBD", "052" }, { "BELARUS", "Belarussian Ruble", "BYR", "974" }, { "BELGIUM", "Euro", "EUR", "978" }, { "BELIZE", "Belize Dollar", "BZD", "084" }, { "BENIN", "CFA Franc BCEAO", "XOF", "952" }, { "BERMUDA", "Bermudian Dollar", "BMD", "060" }, { "BHUTAN", "Indian Rupee", "INR", "356" }, { "BHUTAN", "Ngultrum", "BTN", "064" }, { "BOLIVIA", "Boliviano", "BOB", "068" }, { "BOLIVIA", "Mvdol", "BOV", "984" }, { "BOSNIA & HERZEGOVINA", "Convertible Marks", "BAM", "977" }, { "BOTSWANA", "Pula", "BWP", "072" }, { "BOUVET ISLAND", "Norwegian Krone", "NOK", "578" }, { "BRAZIL", "Brazilian Real", "BRL", "986" }, { "BRITISH INDIAN OCEAN TERRITORY", "US Dollar", "USD", "840" }, { "BRUNEI DARUSSALAM", "Brunei Dollar", "BND", "096" }, { "BULGARIA", "Bulgarian Lev", "BGN", "975" }, { "BURKINA FASO", "CFA Franc BCEAO", "XOF", "952" }, { "BURUNDI", "Burundi Franc", "BIF", "108" }, { "CAMBODIA", "Riel", "KHR", "116" }, { "CAMEROON", "CFA Franc BEAC", "XAF", "950" }, { "CANADA", "Canadian Dollar", "CAD", "124" }, { "CAPE VERDE", "Cape Verde Escudo", "CVE", "132" }, { "CAYMAN ISLANDS", "Cayman Islands Dollar", "KYD", "136" }, { "CENTRAL AFRICAN REPUBLIC", "CFA Franc BEAC", "XAF", "950" }, { "CHAD", "CFA Franc BEAC", "XAF", "950" }, { "CHILE", "Chilean Peso", "CLP", "152" }, { "CHILE", "Unidades de formento", "CLF", "990" }, { "CHINA", "Yuan Renminbi", "CNY", "156" }, { "CHRISTMAS ISLAND", "Australian Dollar", "AUD", "036" }, { "COCOS (KEELING) ISLANDS", "Australian Dollar", "AUD", "036" }, { "COLOMBIA", "Colombian Peso", "COP", "170" }, { "COLOMBIA", "Unidad de Valor Real", "COU", "970" }, { "COMOROS", "Comoro Franc", "KMF", "174" }, { "CONGO", "CFA Franc BEAC", "XAF", "950" }, { "CONGO THE DEMOCRATIC REPUBLIC OF", "Franc Congolais", "CDF", "976" }, { "COOK ISLANDS", "New Zealand Dollar", "NZD", "554" }, { "COSTA RICA", "Costa Rican Colon", "CRC", "188" }, { "C&Ocirc;TE D'IVOIRE", "CFA Franc BCEAO", "XOF", "952" }, { "CROATIA", "Croatian Kuna", "HRK", "191" }, { "CUBA", "Cuban Peso", "CUP", "192" }, { "CYPRUS", "Cyprus Pound", "CYP", "196" }, { "CZECH REPUBLIC", "Czech Koruna", "CZK", "203" }, { "DENMARK", "Danish Krone", "DKK", "208" }, { "DJIBOUTI", "Djibouti Franc", "DJF", "262" }, { "DOMINICA", "East Caribbean Dollar", "XCD", "951" }, { "DOMINICAN REPUBLIC", "Dominican Peso", "DOP", "214" }, { "ECUADOR", "US Dollar", "USD", "840" }, { "EGYPT", "Egyptian Pound", "EGP", "818" }, { "EL SALVADOR", "El Salvador Colon", "SVC", "222" }, { "EL SALVADOR", "US Dollar", "USD", "840" }, { "EQUATORIAL GUINEA", "CFA Franc BEAC", "XAF", "950" }, { "ERITREA", "Nakfa", "ERN", "232" }, { "ESTONIA", "Kroon", "EEK", "233" }, { "ETHIOPIA", "Ethiopian Birr", "ETB", "230" }, { "FALKLAND ISLANDS (MALVINAS)", "Falkland Islands Pound", "FKP", "238" }, { "FAROE ISLANDS", "Danish Krone", "DKK", "208" }, { "FIJI", "Fiji Dollar", "FJD", "242" }, { "FINLAND", "Euro", "EUR", "978" }, { "FRANCE", "Euro", "EUR", "978" }, { "FRENCH GUIANA", "Euro", "EUR", "978" }, { "FRENCH POLYNESIA", "CFP Franc", "XPF", "953" }, { "FRENCH SOUTHERN TERRITORIES", "Euro", "EUR", "978" }, { "GABON", "CFA Franc BEAC", "XAF", "950" }, { "GAMBIA", "Dalasi", "GMD", "270" }, { "GEORGIA", "Lari", "GEL", "981" }, { "GERMANY", "Euro", "EUR", "978" }, { "GHANA", "Cedi", "GHC", "288" }, { "GIBRALTAR", "Gibraltar Pound", "GIP", "292" }, { "GREECE", "Euro", "EUR", "978" }, { "GREENLAND", "Danish Krone", "DKK", "208" }, { "GRENADA", "East Caribbean Dollar", "XCD", "951" }, { "GUADELOUPE", "Euro", "EUR", "978" }, { "GUAM", "US Dollar", "USD", "840" }, { "GUATEMALA", "Quetzal", "GTQ", "320" }, { "GUINEA", "Guinea Franc", "GNF", "324" }, { "GUINEA-BISSAU", "Guinea-Bissau Peso", "GWP", "624" }, { "GUINEA-BISSAU", "CFA Franc BCEAO", "XOF", "952" }, { "GUYANA", "Guyana Dollar", "GYD", "328" }, { "HAITI", "Gourde", "HTG", "332" }, { "HAITI", "US Dollar", "USD", "840" }, { "HEARD ISLAND AND McDONALD ISLANDS", "Australian Dollar", "AUD", "036" }, { "HOLY SEE (VATICAN CITY STATE)", "Euro", "EUR", "978" }, { "HONDURAS", "Lempira", "HNL", "340" }, { "HONG KONG", "Hong Kong Dollar", "HKD", "344" }, { "HUNGARY", "Forint", "HUF", "348" }, { "ICELAND", "Iceland Krona", "ISK", "352" }, { "INDIA", "Indian Rupee", "INR", "356" }, { "INDONESIA", "Rupiah", "IDR", "360" }, { "INTERNATIONAL MONETARY FUND (I.M.F)", "SDR", "XDR", "960" }, { "IRAN (ISLAMIC REPUBLIC OF)", "Iranian Rial", "IRR", "364" }, { "IRAQ", "Iraqi Dinar", "IQD", "368" }, { "IRELAND", "Euro", "EUR", "978" }, { "ISRAEL", "New Israeli Sheqel", "ILS", "376" }, { "ITALY", "Euro", "EUR", "978" }, { "JAMAICA", "Jamaican Dollar", "JMD", "388" }, { "JAPAN", "Yen", "JPY", "392" }, { "JORDAN", "Jordanian Dinar", "JOD", "400" }, { "KAZAKHSTAN", "Tenge", "KZT", "398" }, { "KENYA", "Kenyan Shilling", "KES", "404" }, { "KIRIBATI", "Australian Dollar", "AUD", "036" }, { "KOREA DEMOCRATIC PEOPLE'S REPUBLIC OF", "North Korean Won", "KPW", "408" }, { "KOREA REPUBLIC OF", "Won", "KRW", "410" }, { "KUWAIT", "Kuwaiti Dinar", "KWD", "414" }, { "KYRGYZSTAN", "Som", "KGS", "417" }, { "LAO PEOPLE'S DEMOCRATIC REPUBLIC", "Kip", "LAK", "418" }, { "LATVIA", "Latvian Lats", "LVL", "428" }, { "LEBANON", "Lebanese Pound", "LBP", "422" }, { "LESOTHO", "Rand", "ZAR", "710" }, { "LESOTHO", "Loti", "LSL", "426" }, { "LIBERIA", "Liberian Dollar", "LRD", "430" }, { "LIBYAN ARAB JAMAHIRIYA", "Libyan Dinar", "LYD", "434" }, { "LIECHTENSTEIN", "Swiss Franc", "CHF", "756" }, { "LITHUANIA", "Lithuanian Litas", "LTL", "440" }, { "LUXEMBOURG", "Euro", "EUR", "978" }, { "MACAO", "Pataca", "MOP", "446" }, { "MACEDONIA THE FORMER YUGOSLAV REPUBLIC OF", "Denar", "MKD", "807" }, { "MADAGASCAR", "Malagascy Ariary", "MGA", "969" }, { "MALAWI", "Kwacha", "MWK", "454" }, { "MALAYSIA", "Malaysian Ringgit", "MYR", "458" }, { "MALDIVES", "Rufiyaa", "MVR", "462" }, { "MALI", "CFA Franc BCEAO", "XOF", "952" }, { "MALTA", "Maltese Lira", "MTL", "470" }, { "MARSHALL ISLANDS", "US Dollar", "USD", "840" }, { "MARTINIQUE", "Euro", "EUR", "978" }, { "MAURITANIA", "Ouguiya", "MRO", "478" }, { "MAURITIUS", "Mauritius Rupee", "MUR", "480" }, { "MAYOTTE", "Euro", "EUR", "978" }, { "MEXICO", "Mexican Peso", "MXN", "484" }, { "MEXICO", "Mexican Unidad de Inversion (UID)", "MXV", "979" }, { "MICRONESIA (FEDERATED STATES OF)", "US Dollar", "USD", "840" }, { "MOLDOVA REPUBLIC OF", "Moldovan Leu", "MDL", "498" }, { "MONACO", "Euro", "EUR", "978" }, { "MONGOLIA", "Tugrik", "MNT", "496" }, { "MONTENEGRO", "Euro", "EUR", "978" }, { "MONTSERRAT", "East Caribbean Dollar", "XCD", "951" }, { "MOROCCO", "Moroccan Dirham", "MAD", "504" }, { "MOZAMBIQUE", "Metical", "MZN", "943" }, { "MYANMAR", "Kyat", "MMK", "104" }, { "NAMIBIA", "Rand", "ZAR", "710" }, { "NAMIBIA", "Namibian Dollar", "NAD", "516" }, { "NAURU", "Australian Dollar", "AUD", "036" }, { "NEPAL", "Nepalese Rupee", "NPR", "524" }, { "NETHERLANDS", "Euro", "EUR", "978" }, { "NETHERLANDS ANTILLES", "Netherlands Antillian Guilder", "ANG", "532" }, { "NEW CALEDONIA", "CFP Franc", "XPF", "953" }, { "NEW ZEALAND", "New Zealand Dollar", "NZD", "554" }, { "NICARAGUA", "Cordoba Oro", "NIO", "558" }, { "NIGER", "CFA Franc BCEAO", "XOF", "952" }, { "NIGERIA", "Naira", "NGN", "566" }, { "NIUE", "New Zealand Dollar", "NZD", "554" }, { "NORFOLK ISLAND", "Australian Dollar", "AUD", "036" }, { "NORTHERN MARIANA ISLANDS", "US Dollar", "USD", "840" }, { "NORWAY", "Norwegian Krone", "NOK", "578" }, { "OMAN", "Rial Omani", "OMR", "512" }, { "PAKISTAN", "Pakistan Rupee", "PKR", "586" }, { "PALAU", "US Dollar", "USD", "840" }, { "PANAMA", "Balboa", "PAB", "590" }, { "PANAMA", "US Dollar", "USD", "840" }, { "PAPUA NEW GUINEA", "Kina", "PGK", "598" }, { "PARAGUAY", "Guarani", "PYG", "600" }, { "PERU", "Nuevo Sol", "PEN", "604" }, { "PHILIPPINES", "Philippine Peso", "PHP", "608" }, { "PITCAIRN", "New Zealand Dollar", "NZD", "554" }, { "POLAND", "Zloty", "PLN", "985" }, { "PORTUGAL", "Euro", "EUR", "978" }, { "PUERTO RICO", "US Dollar", "USD", "840" }, { "QATAR", "Qatari Rial", "QAR", "634" }, { "R&Eacute;UNION", "Euro", "EUR", "978" }, { "ROMANIA", "Old Leu", "ROL", "642" }, { "ROMANIA", "New Leu", "RON", "946" }, { "RUSSIAN FEDERATION", "Russian Ruble", "RUB", "643" }, { "RWANDA", "Rwanda Franc", "RWF", "646" }, { "SAINT HELENA", "Saint Helena Pound", "SHP", "654" }, { "SAINT KITTS AND NEVIS", "East Caribbean Dollar", "XCD", "951" }, { "SAINT LUCIA", "East Caribbean Dollar", "XCD", "951" }, { "SAINT PIERRE AND MIQUELON", "Euro", "EUR", "978" }, { "SAINT VINCENT AND THE GRENADINES", "East Caribbean Dollar", "XCD", "951" }, { "SAMOA", "Tala", "WST", "882" }, { "SAN MARINO", "Euro", "EUR", "978" }, { "S&Atilde;O TOME AND PRINCIPE", "Dobra", "STD", "678" }, { "SAUDI ARABIA", "Saudi Riyal", "SAR", "682" }, { "SENEGAL", "CFA Franc BCEAO", "XOF", "952" }, { "SERBIA", "Serbian Dinar", "RSD", "941" }, { "SEYCHELLES", "Seychelles Rupee", "SCR", "690" }, { "SIERRA LEONE", "Leone", "SLL", "694" }, { "SINGAPORE", "Singapore Dollar", "SGD", "702" }, { "SLOVAKIA", "Slovak Koruna", "SKK", "703" }, { "SLOVENIA", "Tolar", "SIT", "705" }, { "SOLOMON ISLANDS", "Solomon Islands Dollar", "SBD", "090" }, { "SOMALIA", "Somali Shilling", "SOS", "706" }, { "SOUTH AFRICA", "Rand", "ZAR", "710" }, { "SPAIN", "Euro", "EUR", "978" }, { "SRI LANKA", "Sri Lanka Rupee", "LKR", "144" }, { "SUDAN", "Sudanese Dinar", "SDG", "938" }, { "SURINAME", "Surinam Dollar", "SRD", "968" }, { "SVALBARD AND JAN MAYEN", "Norwegian Krone", "NOK", "578" }, { "SWAZILAND", "Lilangeni", "SZL", "748" }, { "SWEDEN", "Swedish Krona", "SEK", "752" }, { "SWITZERLAND", "Swiss Franc", "CHF", "756" }, { "SWITZERLAND", "WIR Franc", "CHW", "948" }, { "SWITZERLAND", "WIR Euro", "CHE", "947" }, { "SYRIAN ARAB REPUBLIC", "Syrian Pound", "SYP", "760" }, { "TAIWAN PROVINCE OF CHINA", "New Taiwan Dollar", "TWD", "901" }, { "TAJIKISTAN", "Somoni", "TJS", "972" }, { "TANZANIA UNITED REPUBLIC OF", "Tanzanian Shilling", "TZS", "834" }, { "THAILAND", "Baht", "THB", "764" }, { "TIMOR-LESTE", "US Dollar", "USD", "840" }, { "TOGO", "CFA Franc BCEAO", "XOF", "952" }, { "TOKELAU", "New Zealand Dollar", "NZD", "554" }, { "TONGA", "Pa'anga", "TOP", "776" }, { "TRINIDAD AND TOBAGO", "Trinidad and Tobago Dollar", "TTD", "780" }, { "TUNISIA", "Tunisian Dinar", "TND", "788" }, { "TURKEY", "New Turkish Lira", "TRY", "949" }, { "TURKMENISTAN", "Manat", "TMM", "795" }, { "TURKS AND CAICOS ISLANDS", "US Dollar", "USD", "840" }, { "TUVALU", "Australian Dollar", "AUD", "036" }, { "UGANDA", "Uganda Shilling", "UGX", "800" }, { "UKRAINE", "Hryvnia", "UAH", "980" }, { "UNITED ARAB EMIRATES", "UAE Dirham", "AED", "784" }, { "UNITED KINGDOM", "Pound Sterling", "GBP", "826" }, { "UNITED STATES", "US Dollar", "USD", "840" }, { "UNITED STATES", "US Dollar (Same day)", "USS", "998" }, { "UNITED STATES", "US Dollar(Next day)", "USN", "997" }, { "UNITED STATES MINOR OUTLYING ISLANDS", "US Dollar", "USD", "840" }, { "URUGUAY", "Peso Uruguayo", "UYU", "858" }, { "URUGUAY", "Uruguay Peso en Unidades Indexadas", "UYI", "940" }, { "UZBEKISTAN", "Uzbekistan Sum", "UZS", "860" }, { "VANUATU", "Vatu", "VUV", "548" }, { "VENEZUELA", "Bolivar", "VEB", "862" }, { "VIET NAM", "Dong", "VND", "704" }, { "VIRGIN ISLANDS (BRITISH)", "US Dollar", "USD", "840" }, { "VIRGIN ISLANDS (US)", "US Dollar", "USD", "840" }, { "WALLIS AND FUTUNA", "CFP Franc", "XPF", "953" }, { "WESTERN SAHARA", "Moroccan Dirham", "MAD", "504" }, { "YEMEN", "Yemeni Rial", "YER", "886" }, { "ZAMBIA", "Kwacha", "ZMK", "894" }, { "ZIMBABWE", "Zimbabwe Dollar", "ZWD", "716" }, { "-", "Gold", "XAU", "959" }, { "-", "Bond Markets Units European Composite Unit (EURCO)", "XBA", "955" }, { "-", "European Monetary Unit (E.M.U.-6)", "XBB", "956" }, { "-", "European Unit of Account 9(E.U.A.-9)", "XBC", "957" }, { "-", "European Unit of Account 17(E.U.A.-17)", "XBD", "958" }, { "-", "Palladium", "XPD", "964" }, { "-", "Platinum", "XPT", "962" }, { "-", "Silver", "XAG", "961" }, { "-", "UIC-Franc", "XFU", "-1" }, { "-", "Gold-Franc", "XFO", "-1" }, { "-", "Codes specifically reserved for testing purposes", "XTS", "963" }, { "-", "The codes assigned for transactions where no currency is involved are:", "XXX", "999" } }; public static String Numeric2Alphabetic(int numeric) { for (int row = 0; row < table.length; row++) { String numericStr = table[row][COLUMN_NUMERIC]; if (numeric == Integer.parseInt(numericStr)) { return table[row][COLUMN_ALPHABETIC]; } } return ""; } }
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4.automata; import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.tool.ast.ActionAST; import org.antlr.v4.tool.ast.BlockAST; import org.antlr.v4.tool.ast.GrammarAST; import org.antlr.v4.tool.ast.PredAST; import org.antlr.v4.tool.ast.TerminalAST; import java.util.List; public interface ATNFactory { /** A pair of states pointing to the left/right (start and end) states of a * state submachine. Used to build ATNs. */ public static class Handle { public ATNState left; public ATNState right; public Handle(ATNState left, ATNState right) { this.left = left; this.right = right; } @Override public String toString() { return "("+left+","+right+")"; } } @NotNull ATN createATN(); void setCurrentRuleName(@NotNull String name); void setCurrentOuterAlt(int alt); @NotNull Handle rule(@NotNull GrammarAST ruleAST, @NotNull String name, @NotNull Handle blk); @NotNull ATNState newState(); @NotNull Handle label(@NotNull Handle t); @NotNull Handle listLabel(@NotNull Handle t); @NotNull Handle tokenRef(@NotNull TerminalAST node); @NotNull Handle set(@NotNull GrammarAST associatedAST, @NotNull List<GrammarAST> alts, boolean invert); @NotNull Handle charSetLiteral(@NotNull GrammarAST charSetAST); @NotNull Handle range(@NotNull GrammarAST a, @NotNull GrammarAST b); /** For a non-lexer, just build a simple token reference atom. * For a lexer, a string is a sequence of char to match. That is, * "fog" is treated as 'f' 'o' 'g' not as a single transition in * the DFA. Machine== o-'f'->o-'o'->o-'g'->o and has n+1 states * for n characters. */ @NotNull Handle stringLiteral(@NotNull TerminalAST stringLiteralAST); /** For reference to rule r, build * * o-e->(r) o * * where (r) is the start of rule r and the trailing o is not linked * to from rule ref state directly (it's done thru the transition(0) * RuleClosureTransition. * * If the rule r is just a list of tokens, it's block will be just * a set on an edge o->o->o-set->o->o->o, could inline it rather than doing * the rule reference, but i'm not doing this yet as I'm not sure * it would help much in the ATN->DFA construction. * * TODO add to codegen: collapse alt blks that are sets into single matchSet * @param node */ @NotNull Handle ruleRef(@NotNull GrammarAST node); /** From an empty alternative build Grip o-e->o */ @NotNull Handle epsilon(@NotNull GrammarAST node); /** Build what amounts to an epsilon transition with a semantic * predicate action. The pred is a pointer into the AST of * the SEMPRED token. */ @NotNull Handle sempred(@NotNull PredAST pred); /** Build what amounts to an epsilon transition with an action. * The action goes into ATN though it is ignored during analysis. */ @NotNull Handle action(@NotNull ActionAST action); @NotNull Handle action(@NotNull String action); @NotNull Handle alt(@NotNull List<Handle> els); /** From A|B|..|Z alternative block build * * o->o-A->o->o (last ATNState is blockEndATNState pointed to by all alts) * | ^ * o->o-B->o--| * | | * ... | * | | * o->o-Z->o--| * * So every alternative gets begin ATNState connected by epsilon * and every alt right side points at a block end ATNState. There is a * new ATNState in the ATNState in the Grip for each alt plus one for the * end ATNState. * * Special case: only one alternative: don't make a block with alt * begin/end. * * Special case: if just a list of tokens/chars/sets, then collapse * to a single edge'd o-set->o graph. * * Set alt number (1..n) in the left-Transition ATNState. */ @NotNull Handle block(@NotNull BlockAST blockAST, @NotNull GrammarAST ebnfRoot, @NotNull List<Handle> alternativeGrips); // Handle notBlock(GrammarAST blockAST, Handle set); /** From (A)? build either: * * o--A->o * | ^ * o---->| * * or, if A is a block, just add an empty alt to the end of the block */ @NotNull Handle optional(@NotNull GrammarAST optAST, @NotNull Handle blk); /** From (A)+ build * * |---| (Transition 2 from A.right points at alt 1) * v | (follow of loop is Transition 1) * o->o-A-o->o * * Meaning that the last ATNState in A points back to A's left Transition ATNState * and we add a new begin/end ATNState. A can be single alternative or * multiple. * * During analysis we'll call the follow link (transition 1) alt n+1 for * an n-alt A block. */ @NotNull Handle plus(@NotNull GrammarAST plusAST, @NotNull Handle blk); /** From (A)* build * * |---| * v | * o->o-A-o--o (Transition 2 from block end points at alt 1; follow is Transition 1) * | ^ * o---------| (optional branch is 2nd alt of optional block containing A+) * * Meaning that the last (end) ATNState in A points back to A's * left side ATNState and we add 3 new ATNStates (the * optional branch is built just like an optional subrule). * See the Aplus() method for more on the loop back Transition. * The new node on right edge is set to RIGHT_EDGE_OF_CLOSURE so we * can detect nested (A*)* loops and insert an extra node. Previously, * two blocks shared same EOB node. * * There are 2 or 3 decision points in a A*. If A is not a block (i.e., * it only has one alt), then there are two decisions: the optional bypass * and then loopback. If A is a block of alts, then there are three * decisions: bypass, loopback, and A's decision point. * * Note that the optional bypass must be outside the loop as (A|B)* is * not the same thing as (A|B|)+. * * This is an accurate ATN representation of the meaning of (A)*, but * for generating code, I don't need a DFA for the optional branch by * virtue of how I generate code. The exit-loopback-branch decision * is sufficient to let me make an appropriate enter, exit, loop * determination. See codegen.g */ @NotNull Handle star(@NotNull GrammarAST starAST, @NotNull Handle blk); /** Build an atom with all possible values in its label */ @NotNull Handle wildcard(@NotNull GrammarAST associatedAST); @NotNull Handle lexerAltCommands(@NotNull Handle alt, @NotNull Handle cmds); @NotNull Handle lexerCallCommand(@NotNull GrammarAST ID, @NotNull GrammarAST arg); @NotNull Handle lexerCommand(@NotNull GrammarAST ID); }
package example.com.powerinterview.activities; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.util.Base64; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.LinearLayout; import com.loopj.android.http.AsyncHttpResponseHandler; import com.loopj.android.http.JsonHttpResponseHandler; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.math.BigInteger; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.util.Arrays; import java.util.Iterator; import java.util.Map; import javax.crypto.KeyGenerator; import javax.crypto.SecretKey; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.Unbinder; import cz.msebera.android.httpclient.Header; import example.com.powerinterview.R; import example.com.powerinterview.core.BaseInterviewController; import example.com.powerinterview.core.BaseQuestionController; import example.com.powerinterview.core.InterviewLogger; import example.com.powerinterview.core.PowerInterviewApp; import example.com.powerinterview.exceptions.ConvertException; import example.com.powerinterview.exceptions.EncryptionException; import example.com.powerinterview.exceptions.FactoryException; import example.com.powerinterview.exceptions.InterviewElementNotFoundException; import example.com.powerinterview.interfaces.IPIWidgetsFactory; import example.com.powerinterview.interfaces.InterviewProvider; import example.com.powerinterview.managers.AccountManager; import example.com.powerinterview.managers.InterviewsTemplatesManager; import example.com.powerinterview.model.Interview; import example.com.powerinterview.model.Variable; import example.com.powerinterview.network.InterviewClient; import example.com.powerinterview.ui.CustomToast; import example.com.powerinterview.utils.Converter; import example.com.powerinterview.utils.Encrypt; import example.com.powerinterview.utils.Validator; public class InterviewActivity extends BaseWorkerActivity implements InterviewProvider { private AccountManager accountManager; private Unbinder unbinder; private InterviewsTemplatesManager interviewsTemplatesManager; private BaseInterviewController controller; private long interviewID; private InterviewClient interviewClient; private Interview interview; @BindView(R.id.interviewArea) LinearLayout interviewArea; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_interview); File file = (File) getIntent().getSerializableExtra("template"); interviewID = getIntent().getLongExtra("id", -1); if(interviewID == -1) finish(); interviewsTemplatesManager = ((PowerInterviewApp) getApplication()).getInterviewComponent().getInterviewManager(); accountManager = ((PowerInterviewApp) getApplication()).getAuthComponent().accountManager(); interviewClient = ((PowerInterviewApp) getApplication()).getInterviewComponent().getInterviewClient(); unbinder = ButterKnife.bind(this); try { interview = interviewsTemplatesManager.loadInterviewByFile(file); IPIWidgetsFactory factory = ((PowerInterviewApp) getApplication()).getInterviewComponent().getWidgetsFactory(); controller = new BaseInterviewController(this, new BaseQuestionController(factory)); controller.initInterview(interview); } catch (Exception e) { e.printStackTrace(); handleException(e); } //clear log InterviewLogger.clearLog(); } @Override protected void onDestroy() { super.onDestroy(); unbinder.unbind(); } @Override public void displayViews(View[] views) { interviewArea.removeAllViews(); for (View view: views) { interviewArea.addView(view); } } @Override public void endInterview() { final EditText respondentEmail = new EditText(this); respondentEmail.setHint(getString(R.string.respondent_email_string)); respondentEmail.setMaxEms(20); AlertDialog alertDialog = new AlertDialog.Builder(this) .setMessage("Interview is over") .setPositiveButton("Send results", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { InterviewLogger.writeToInterviewLog("Interview is over"); InterviewLogger.writeToInterviewLog("Variables:"); Iterator it = interview.getVariables().entrySet().iterator(); while (it.hasNext()) { Map.Entry<String, Variable> pair = (Map.Entry)it.next(); InterviewLogger.writeToInterviewLog(pair.getKey() + "---- " + pair.getValue().getValue()); } String email = respondentEmail.getText().toString(); if(email.isEmpty() || !Validator.checkEmail(email)) { InterviewActivity.this.showToast("Incorrect respondent's email", CustomToast.ToastType.TOAST_ALERT); dialog.dismiss(); return; } sendInterview(email); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }).create(); alertDialog.setView(respondentEmail); alertDialog.show(); } @Override public void handleException(Exception ex) { super.handleException(ex); } private void sendInterview(String respondentEmail) { String aes = null; try { aes = Encrypt.generateRandomAESKey(); } catch (EncryptionException e) { e.printStackTrace(); return; } try { showProgressDialog(getString(R.string.uploading)); interviewClient.sendInterviewResults(accountManager.getToken(), interviewID, respondentEmail, new ByteArrayInputStream(InterviewLogger.getResults(aes)),null, aes, new JsonHttpResponseHandler() { @Override public void onSuccess(int statusCode, Header[] headers, JSONObject response) { dismissProgressDialog(); try { if(displayResult(response)) { showSuccessDialog(response.getString("key")); } } catch (JSONException e) { e.printStackTrace(); } } @Override public void onFailure(int statusCode, Header[] headers, String responseString, Throwable throwable) { dismissProgressDialog(); writeDebugLog("Interview loader", "exception from server, status code: " + statusCode + "message: " + responseString); } }); } catch (Exception e) { dismissProgressDialog(); e.printStackTrace(); } } private void showSuccessDialog(String key) { AlertDialog alertDialog = new AlertDialog.Builder(this) .setMessage(getString(R.string.inrerview_upload_success) + key) .setPositiveButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); finish(); } }) .create(); alertDialog.show(); } @Override public Context getContext() { return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.api.ldap.model.message; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.entry.DefaultAttribute; import org.apache.directory.api.ldap.model.entry.DefaultModification; import org.apache.directory.api.ldap.model.entry.Modification; import org.apache.directory.api.ldap.model.entry.ModificationOperation; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.exception.LdapInvalidAttributeValueException; import org.apache.directory.api.ldap.model.name.Dn; import org.junit.Test; import org.junit.runner.RunWith; import com.mycila.junit.concurrent.Concurrency; import com.mycila.junit.concurrent.ConcurrentJunitRunner; /** * Test case for the ModifyRequestImpl class. * * @author <a href="mailto:dev@directory.apache.org"> Apache Directory Project</a> */ @RunWith(ConcurrentJunitRunner.class) @Concurrency() public class ModifyRequestImplTest { private static final Map<String, Control> EMPTY_CONTROL_MAP = new HashMap<String, Control>(); /** * Builds a ModifyRequest for testing purposes. * * @return the ModifyRequest to use for tests */ private ModifyRequestImpl getRequest() throws LdapException { // Construct the Modify request to test ModifyRequestImpl req = new ModifyRequestImpl(); req.setMessageId( 45 ); try { req.setName( new Dn( "cn=admin,dc=apache,dc=org" ) ); } catch ( LdapException ne ) { // do nothing } Attribute attr = new DefaultAttribute( "attr0" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req.addModification( item ); attr = new DefaultAttribute( "attr1" ); attr.add( "val3" ); item = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attr ); req.addModification( item ); attr = new DefaultAttribute( "attr2" ); attr.add( "val4" ); attr.add( "val5" ); item = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attr ); req.addModification( item ); return req; } /** * Tests the same object reference for equality. */ @Test public void testEqualsSameObj() throws LdapException { ModifyRequestImpl req = getRequest(); assertTrue( req.equals( req ) ); } /** * Tests for equality using exact copies. */ @Test public void testEqualsExactCopy() throws LdapException { ModifyRequestImpl req0 = getRequest(); ModifyRequestImpl req1 = getRequest(); assertTrue( req0.equals( req1 ) ); } /** * Tests the same object reference for equal hashCode. */ @Test public void testHashCodeSameObj() throws LdapException { ModifyRequestImpl req = getRequest(); assertTrue( req.hashCode() == req.hashCode() ); } /** * Tests for equal hashCode using exact copies. */ @Test public void testHashCodeExactCopy() throws LdapException { ModifyRequestImpl req0 = getRequest(); ModifyRequestImpl req1 = getRequest(); assertTrue( req0.hashCode() == req1.hashCode() ); } /** * Test for inequality when only the IDs are different. */ @Test public void testNotEqualDiffId() { ModifyRequestImpl req0 = new ModifyRequestImpl(); req0.setMessageId( 7 ); ModifyRequestImpl req1 = new ModifyRequestImpl(); req1.setMessageId( 5 ); assertFalse( req0.equals( req1 ) ); } /** * Test for inequality when only the Dn names are different. */ @Test public void testNotEqualDiffName() { try { ModifyRequestImpl req0 = getRequest(); req0.setName( new Dn( "cn=admin,dc=example,dc=com" ) ); ModifyRequestImpl req1 = getRequest(); req1.setName( new Dn( "cn=admin,dc=apache,dc=org" ) ); assertFalse( req0.equals( req1 ) ); } catch ( LdapException ine ) { // do nothing } } /** * Test for inequality when only the mods ops are different. */ @Test public void testNotEqualDiffModOps() throws LdapException { ModifyRequestImpl req0 = getRequest(); Attribute attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); ModifyRequestImpl req1 = getRequest(); attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); item = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attr ); req0.addModification( item ); assertFalse( req0.equals( req1 ) ); assertFalse( req1.equals( req0 ) ); } /** * Test for inequality when only the number of mods are different. */ @Test public void testNotEqualDiffModCount() throws LdapException { ModifyRequestImpl req0 = getRequest(); Attribute attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); ModifyRequestImpl req1 = getRequest(); assertFalse( req0.equals( req1 ) ); assertFalse( req1.equals( req0 ) ); } /** * Test for inequality when only the mods attribute Id's are different. */ @Test public void testNotEqualDiffModIds() throws LdapException { ModifyRequestImpl req0 = getRequest(); Attribute attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); ModifyRequestImpl req1 = getRequest(); attr = new DefaultAttribute( "attr4" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); assertFalse( req0.equals( req1 ) ); assertFalse( req1.equals( req0 ) ); } /** * Test for inequality when only the mods attribute values are different. */ @Test public void testNotEqualDiffModValues() throws LdapException { ModifyRequestImpl req0 = getRequest(); Attribute attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); ModifyRequestImpl req1 = getRequest(); attr = new DefaultAttribute( "attr3" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); attr.add( "val3" ); item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); req0.addModification( item ); assertFalse( req0.equals( req1 ) ); assertFalse( req1.equals( req0 ) ); } /** * Tests for equality even when another BindRequest implementation is used. */ @Test public void testEqualsDiffImpl() throws LdapException { ModifyRequest req0 = new ModifyRequest() { public Collection<Modification> getModifications() { List<Modification> list = new ArrayList<Modification>(); try { Attribute attr = new DefaultAttribute( "attr0" ); attr.add( "val0" ); attr.add( "val1" ); attr.add( "val2" ); Modification item = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attr ); list.add( item ); attr = new DefaultAttribute( "attr1" ); attr.add( "val3" ); item = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attr ); list.add( item ); attr = new DefaultAttribute( "attr2" ); attr.add( "val4" ); attr.add( "val5" ); item = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attr ); list.add( item ); } catch ( LdapInvalidAttributeValueException liave ) { // Can't happen } return list; } public ModifyRequest addModification( Modification mod ) { return this; } public ModifyRequest removeModification( Modification mod ) { return this; } public Dn getName() { try { return new Dn( "cn=admin,dc=apache,dc=org" ); } catch ( Exception e ) { //do nothing return null; } } public ModifyRequest setName( Dn name ) { return this; } public MessageTypeEnum getResponseType() { return MessageTypeEnum.MODIFY_RESPONSE; } public boolean hasResponse() { return true; } public MessageTypeEnum getType() { return MessageTypeEnum.MODIFY_REQUEST; } public Map<String, Control> getControls() { return EMPTY_CONTROL_MAP; } public ModifyRequest addControl( Control a_control ) { return this; } public ModifyRequest removeControl( Control a_control ) { return this; } public int getMessageId() { return 45; } public Object get( Object a_key ) { return null; } public Object put( Object a_key, Object a_value ) { return null; } public void abandon() { } public boolean isAbandoned() { return false; } public ModifyRequest addAbandonListener( AbandonListener listener ) { return this; } public ModifyResponse getResultResponse() { return null; } public ModifyRequest addAllControls( Control[] controls ) { return this; } public boolean hasControl( String oid ) { return false; } public Control getControl( String oid ) { return null; } public ModifyRequest setMessageId( int messageId ) { return this; } public ModifyRequest addModification( Attribute attr, ModificationOperation modOp ) { return this; } public ModifyRequest replace( String attributeName ) { return this; } public ModifyRequest replace( String attributeName, String... attributeValue ) { return this; } public ModifyRequest replace( String attributeName, byte[]... attributeValue ) { return this; } public ModifyRequest replace( Attribute attr ) { return this; } public ModifyRequest add( String attributeName, String... attributeValue ) { return this; } public ModifyRequest add( String attributeName, byte[]... attributeValue ) { return this; } public ModifyRequest add( Attribute attr ) { return this; } public ModifyRequest remove( String attributeName, String... attributeValue ) { return this; } public ModifyRequest remove( String attributeName, byte[]... attributeValue ) { return this; } public ModifyRequest remove( Attribute attr ) { return this; } }; ModifyRequestImpl req1 = getRequest(); assertTrue( req1.equals( req0 ) ); } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Interner; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.concurrent.BlazeInterners; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.syntax.StarlarkThread.Extension; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Map; import java.util.Objects; /** * A SkyValue that contains the result of the parsing of one part of the WORKSPACE file. The parsing * of the WORKSPACE file is split before each series of load statement because we need to resolve * repositories before being able to load from those repositories. */ public class WorkspaceFileValue implements SkyValue { public static final SkyFunctionName WORKSPACE_FILE = SkyFunctionName.createHermetic("WORKSPACE_FILE"); /** Argument for the SkyKey to request a WorkspaceFileValue. */ @Immutable @AutoCodec public static class WorkspaceFileKey implements SkyKey { private static final Interner<WorkspaceFileKey> interner = BlazeInterners.newWeakInterner(); private final RootedPath path; private final int idx; private WorkspaceFileKey(RootedPath path, int idx) { this.path = path; this.idx = idx; } @AutoCodec.VisibleForSerialization @AutoCodec.Instantiator static WorkspaceFileKey create(RootedPath path, int idx) { return interner.intern(new WorkspaceFileKey(path, idx)); } public RootedPath getPath() { return path; } public int getIndex() { return idx; } @Override public SkyFunctionName functionName() { return WORKSPACE_FILE; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof WorkspaceFileKey)) { return false; } WorkspaceFileKey other = (WorkspaceFileKey) obj; return Objects.equals(path, other.path) && idx == other.idx; } @Override public int hashCode() { return Objects.hash(path.hashCode(), idx); } @Override public String toString() { return path + ", " + idx; } } private final Package pkg; private final int idx; private final RootedPath path; private final boolean hasNext; private final ImmutableMap<String, Object> bindings; private final ImmutableMap<String, Extension> importMap; private final ImmutableMap<String, Integer> importToChunkMap; private final ImmutableMap<RepositoryName, ImmutableMap<RepositoryName, RepositoryName>> repositoryMapping; // Mapping of the relative paths of the incrementally updated managed directories // to the managing external repositories private final ImmutableMap<PathFragment, RepositoryName> managedDirectories; // Directories to be excluded from symlinking to the execroot. private final ImmutableSortedSet<String> doNotSymlinkInExecrootPaths; /** * Create a WorkspaceFileValue containing the various values necessary to compute the split * WORKSPACE file. * * @param pkg Package built by agreggating all parts of the split WORKSPACE file up to this one. * @param importMap List of imports (i.e., load statements) present in all parts of the split * WORKSPACE file up to this one. * @param importToChunkMap Map of all load statements encountered so far to the chunk they * initially appeared in. * @param bindings List of top-level variable bindings from the all parts of the split WORKSPACE * file up to this one. The key is the name of the bindings and the value is the actual * object. * @param path The rooted path to workspace file to parse. * @param idx The index of this part of the split WORKSPACE file (0 for the first one, 1 for the * second one and so on). * @param hasNext Is there a next part in the WORKSPACE file or this part the last one? * @param managedDirectories Mapping of the relative paths of the incrementally updated managed * @param doNotSymlinkInExecrootPaths directories to be excluded from symlinking to the execroot */ public WorkspaceFileValue( Package pkg, Map<String, Extension> importMap, Map<String, Integer> importToChunkMap, Map<String, Object> bindings, RootedPath path, int idx, boolean hasNext, ImmutableMap<PathFragment, RepositoryName> managedDirectories, ImmutableSortedSet<String> doNotSymlinkInExecrootPaths) { this.pkg = Preconditions.checkNotNull(pkg); this.idx = idx; this.path = path; this.hasNext = hasNext; this.bindings = ImmutableMap.copyOf(bindings); this.importMap = ImmutableMap.copyOf(importMap); this.importToChunkMap = ImmutableMap.copyOf(importToChunkMap); this.repositoryMapping = pkg.getExternalPackageRepositoryMappings(); this.managedDirectories = managedDirectories; this.doNotSymlinkInExecrootPaths = doNotSymlinkInExecrootPaths; } /** * Returns the package. This package may contain errors, in which case the caller should throw * a {@link BuildFileContainsErrorsException}. */ public Package getPackage() { return pkg; } @Override public String toString() { return "<WorkspaceFileValue path=" + path + " idx=" + idx + ">"; } /** * Creates a Key for the WorkspaceFileFunction. The path to the workspace file is specified by * {@code path}. This key will ask WorkspaceFileFunction to get the {@code idx+1}-th part of the * workspace file (so idx = 0 represents the first part, idx = 1, the second part, etc...). */ public static WorkspaceFileKey key(RootedPath path, int idx) { return WorkspaceFileKey.create(path, idx); } public static WorkspaceFileKey key(RootedPath path) { return key(path, 0); } /** * Get the key for the next WorkspaceFileValue or null if this value is the last part of the * workspace file. */ public SkyKey next() { if (hasNext) { return key(path, idx + 1); } else { return null; } } /** * The workspace file parsing is cut in several parts and this function returns the index of the * part of the workspace file that this value holds. For the first part, this index will be 0, for * the second part, it will be 1 and so on. */ public int getIndex() { return idx; } /** * The workspace file parsing is cut in several parts and this function returns true if there is * a part following the part holds by this value (or false if this is the last part of the * WORKSPACE file. * * <p>This method is public for serialization of the WorkspaceFileValue, #next() should be used * to iterate instead of this method. */ public boolean hasNext() { return hasNext; } public RootedPath getPath() { return path; } public ImmutableMap<String, Object> getBindings() { return bindings; } public ImmutableMap<String, Extension> getImportMap() { return importMap; } public ImmutableMap<String, Integer> getImportToChunkMap() { return importToChunkMap; } public ImmutableMap<RepositoryName, ImmutableMap<RepositoryName, RepositoryName>> getRepositoryMapping() { return repositoryMapping; } public ImmutableMap<PathFragment, RepositoryName> getManagedDirectories() { return managedDirectories; } public ImmutableSortedSet<String> getDoNotSymlinkInExecrootPaths() { return doNotSymlinkInExecrootPaths; } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.manager.api.gateway.rest; import io.apiman.common.util.AesEncrypter; import io.apiman.common.util.ApimanStrLookup; import io.apiman.common.util.crypt.CurrentDataEncrypter; import io.apiman.common.util.crypt.DataEncryptionContext; import io.apiman.gateway.engine.beans.Api; import io.apiman.gateway.engine.beans.ApiEndpoint; import io.apiman.gateway.engine.beans.Client; import io.apiman.gateway.engine.beans.SystemStatus; import io.apiman.gateway.engine.beans.exceptions.PublishingException; import io.apiman.gateway.engine.beans.exceptions.RegistrationException; import io.apiman.manager.api.beans.gateways.GatewayBean; import io.apiman.manager.api.beans.gateways.RestGatewayConfigBean; import io.apiman.manager.api.gateway.GatewayAuthenticationException; import io.apiman.manager.api.gateway.IGatewayLink; import io.apiman.manager.api.gateway.i18n.Messages; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang3.text.StrLookup; import org.apache.commons.lang3.text.StrSubstitutor; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.TrustStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.protocol.HttpContext; import org.apache.http.ssl.SSLContextBuilder; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * An implementation of a Gateway Link that uses the Gateway's simple REST * API to publish APIs. * * @author eric.wittmann@redhat.com */ public class RestGatewayLink implements IGatewayLink { private static StrLookup LOOKUP = new ApimanStrLookup(); private static StrSubstitutor PROPERTY_SUBSTITUTOR = new StrSubstitutor(LOOKUP); static { PROPERTY_SUBSTITUTOR.setValueDelimiter(':'); } private static final ObjectMapper mapper = new ObjectMapper(); private static SSLConnectionSocketFactory sslConnectionFactory; static { try { SSLContextBuilder builder = new SSLContextBuilder(); builder.loadTrustMaterial(null, new TrustStrategy() { @Override public boolean isTrusted(X509Certificate[] chain, String authType) throws CertificateException { return true; } }); sslConnectionFactory = new SSLConnectionSocketFactory(builder.build(), NoopHostnameVerifier.INSTANCE); } catch (Exception e) { throw new RuntimeException(e); } } @SuppressWarnings("unused") private GatewayBean gateway; private CloseableHttpClient httpClient; private GatewayClient gatewayClient; private RestGatewayConfigBean config; /** * Constructor. * @param gateway the gateway */ public RestGatewayLink(final GatewayBean gateway) { try { this.gateway = gateway; String cfg = gateway.getConfiguration(); cfg = CurrentDataEncrypter.instance.decrypt(cfg, new DataEncryptionContext()); cfg = PROPERTY_SUBSTITUTOR.replace(cfg); setConfig((RestGatewayConfigBean) mapper.reader(RestGatewayConfigBean.class).readValue(cfg)); getConfig().setPassword(AesEncrypter.decrypt(getConfig().getPassword())); httpClient = HttpClientBuilder.create() .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .setSSLSocketFactory(sslConnectionFactory) .addInterceptorFirst(new HttpRequestInterceptor() { @Override public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { configureBasicAuth(request); } }).build(); } catch (JsonProcessingException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } /** * @see io.apiman.manager.api.gateway.IGatewayLink#close() */ @Override public void close() { try { httpClient.close(); } catch (IOException e) { // TODO log the error? } } /** * Checks that the gateway is up. */ private boolean isGatewayUp() throws GatewayAuthenticationException { SystemStatus status = getClient().getStatus(); return status.isUp(); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#getStatus() */ @Override public SystemStatus getStatus() throws GatewayAuthenticationException { return getClient().getStatus(); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#getApiEndpoint(java.lang.String, java.lang.String, java.lang.String) */ @Override public ApiEndpoint getApiEndpoint(String organizationId, String apiId, String version) throws GatewayAuthenticationException { return getClient().getApiEndpoint(organizationId, apiId, version); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#publishApi(io.apiman.gateway.engine.beans.Api) */ @Override public void publishApi(Api api) throws PublishingException, GatewayAuthenticationException { if (!isGatewayUp()) { throw new PublishingException(Messages.i18n.format("RestGatewayLink.GatewayNotRunning")); //$NON-NLS-1$ } getClient().publish(api); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#retireApi(io.apiman.gateway.engine.beans.Api) */ @Override public void retireApi(Api api) throws PublishingException, GatewayAuthenticationException { if (!isGatewayUp()) { throw new PublishingException(Messages.i18n.format("RestGatewayLink.GatewayNotRunning")); //$NON-NLS-1$ } getClient().retire(api.getOrganizationId(), api.getApiId(), api.getVersion()); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#registerClient(io.apiman.gateway.engine.beans.Client) */ @Override public void registerClient(Client client) throws RegistrationException, GatewayAuthenticationException { if (!isGatewayUp()) { throw new RegistrationException(Messages.i18n.format("RestGatewayLink.GatewayNotRunning")); //$NON-NLS-1$ } getClient().register(client); } /** * @see io.apiman.manager.api.gateway.IGatewayLink#unregisterClient(io.apiman.gateway.engine.beans.Client) */ @Override public void unregisterClient(Client client) throws RegistrationException, GatewayAuthenticationException { if (!isGatewayUp()) { throw new RegistrationException(Messages.i18n.format("RestGatewayLink.GatewayNotRunning")); //$NON-NLS-1$ } getClient().unregister(client.getOrganizationId(), client.getClientId(), client.getVersion()); } /** * Configures BASIC authentication for the request. * @param request */ protected void configureBasicAuth(HttpRequest request) { try { String username = getConfig().getUsername(); String password = getConfig().getPassword(); String up = username + ":" + password; //$NON-NLS-1$ String base64 = new String(Base64.encodeBase64(up.getBytes("UTF-8"))); //$NON-NLS-1$ String authHeader = "Basic " + base64; //$NON-NLS-1$ request.setHeader("Authorization", authHeader); //$NON-NLS-1$ } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } /** * @return the gateway client */ protected GatewayClient getClient() { if (gatewayClient == null) { gatewayClient = createClient(); } return gatewayClient; } /** * @return a newly created rest gateway client */ private GatewayClient createClient() { String gatewayEndpoint = getConfig().getEndpoint(); return new GatewayClient(gatewayEndpoint, httpClient); } /** * @return the config */ public RestGatewayConfigBean getConfig() { return config; } /** * @param config the config to set */ public void setConfig(RestGatewayConfigBean config) { this.config = config; } }
/** * Opensec OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2015 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.oval.model.unix; import io.opensec.oval.model.ComponentType; import io.opensec.oval.model.Family; import io.opensec.oval.model.sc.EntityItemStringType; import io.opensec.oval.model.sc.ItemType; import io.opensec.oval.model.sc.StatusEnumeration; /** * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> * @deprecated Deprecated as of version 5.10: * The Source Code Control System (SCCS) is obsolete. * The sccs_test may be removed in a future version of the language. */ @Deprecated public class SccsItem extends ItemType { //{0..1} private EntityItemStringType filepath; private EntityItemStringType path; private EntityItemStringType filename; private EntityItemStringType module_name; private EntityItemStringType module_type; private EntityItemStringType release; private EntityItemStringType level; private EntityItemStringType branch; private EntityItemStringType sequence; private EntityItemStringType what_string; /** * Constructor. */ public SccsItem() { this( 0 ); } public SccsItem( final int id ) { this( id, null ); } public SccsItem( final int id, final StatusEnumeration status ) { super( id, status ); _oval_family = Family.UNIX; _oval_component = ComponentType.SCCS; } /** */ public void setFilepath( final EntityItemStringType filepath ) { this.filepath = filepath; } public EntityItemStringType getFilepath() { return filepath; } /** */ public void setPath( final EntityItemStringType path ) { this.path = path; } public EntityItemStringType getPath() { return path; } /** */ public void setFilename( final EntityItemStringType filename ) { this.filename = filename; } public EntityItemStringType getFilename() { return filename; } /** */ public void setModuleName( final EntityItemStringType module_name ) { this.module_name = module_name; } public EntityItemStringType getModuleName() { return module_name; } /** */ public void setModuleType( final EntityItemStringType module_type ) { this.module_type = module_type; } public EntityItemStringType getModuleType() { return module_type; } /** */ public void setRelease( final EntityItemStringType release ) { this.release = release; } public EntityItemStringType getRelease() { return release; } /** */ public void setLevel( final EntityItemStringType level ) { this.level = level; } public EntityItemStringType getLevel() { return level; } /** */ public void setBranch( final EntityItemStringType branch ) { this.branch = branch; } public EntityItemStringType getBranch() { return branch; } /** */ public void setSequence( final EntityItemStringType sequence ) { this.sequence = sequence; } public EntityItemStringType getSequence() { return sequence; } /** */ public void setWhatString( final EntityItemStringType what_string ) { this.what_string = what_string; } public EntityItemStringType getWhatString() { return what_string; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof SccsItem)) { return false; } return super.equals( obj ); } @Override public String toString() { return "sccs_item[" + super.toString() + ", filepath=" + getFilepath() + ", path=" + getPath() + ", filename=" + getFilename() + ", module_name=" + getModuleName() + ", module_type=" + getModuleType() + ", release=" + getRelease() + ", level=" + getLevel() + ", branch=" + getBranch() + ", sequence=" + getSequence() + ", what_string=" + getWhatString() + "]"; } } //
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.edu; import com.google.common.collect.Sets; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.intention.IntentionActionBean; import com.intellij.codeInsight.intention.IntentionManager; import com.intellij.execution.Executor; import com.intellij.execution.ExecutorRegistryImpl; import com.intellij.execution.executors.DefaultDebugExecutor; import com.intellij.ide.AppLifecycleListener; import com.intellij.ide.GeneralSettings; import com.intellij.ide.SelectInTarget; import com.intellij.ide.projectView.impl.AbstractProjectViewPane; import com.intellij.ide.scopeView.ScopeViewPane; import com.intellij.ide.ui.UISettings; import com.intellij.ide.ui.customization.ActionUrl; import com.intellij.ide.ui.customization.CustomActionsSchema; import com.intellij.ide.ui.customization.CustomizationUtil; import com.intellij.ide.util.PropertiesComponent; import com.intellij.ide.util.TipAndTrickBean; import com.intellij.notification.EventLog; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.ex.EditorSettingsExternalizable; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.ExtensionsArea; import com.intellij.openapi.fileChooser.impl.FileChooserUtil; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.openapi.keymap.impl.KeymapImpl; import com.intellij.openapi.keymap.impl.ui.Group; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectManagerListener; import com.intellij.openapi.project.ex.ProjectManagerEx; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.*; import com.intellij.platform.DirectoryProjectConfigurator; import com.intellij.platform.PlatformProjectViewOpener; import com.intellij.profile.codeInspection.InspectionProjectProfileManager; import com.intellij.projectImport.ProjectAttachProcessor; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBus; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.tree.TreeUtil; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.codeInsight.PyCodeInsightSettings; import com.jetbrains.python.inspections.PyPep8Inspection; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.util.Collections; import java.util.HashSet; import java.util.Set; /** * @author traff */ @SuppressWarnings({"UtilityClassWithoutPrivateConstructor", "UtilityClassWithPublicConstructor"}) public class PyCharmEduInitialConfigurator { @NonNls private static final String DISPLAYED_PROPERTY = "PyCharmEDU.initialConfigurationShown"; @NonNls private static final String CONFIGURED = "PyCharmEDU.InitialConfiguration"; @NonNls private static final String CONFIGURED_V1 = "PyCharmEDU.InitialConfiguration.V1"; @NonNls private static final String CONFIGURED_V2 = "PyCharmEDU.InitialConfiguration.V2"; @NonNls private static final String CONFIGURED_V3 = "PyCharmEDU.InitialConfiguration.V3"; @NonNls private static final String CONFIGURED_V4 = "PyCharmEDU.InitialConfiguration.V4"; private static final Set<String> UNRELATED_TIPS = Sets.newHashSet("LiveTemplatesDjango.html", "TerminalOpen.html", "Terminal.html", "ConfiguringTerminal.html"); private static final Set<String> HIDDEN_ACTIONS = ContainerUtil.newHashSet("CopyAsPlainText", "CopyAsRichText", "EditorPasteSimple", "Folding", "Generate", "CompareClipboardWithSelection", "ChangeFileEncodingAction", "CloseAllUnmodifiedEditors", "CloseAllUnpinnedEditors", "CloseAllEditorsButActive", "CopyReference", "MoveTabRight", "MoveTabDown", "External Tools", "MoveEditorToOppositeTabGroup", "OpenEditorInOppositeTabGroup", "ChangeSplitOrientation", "PinActiveTab", "Tabs Placement", "TabsAlphabeticalMode", "AddNewTabToTheEndMode", "NextTab", "PreviousTab", "Add to Favorites", "Add All To Favorites", "ValidateXml", "NewHtmlFile", "CleanPyc", "Images.ShowThumbnails", "CompareFileWithEditor", "SynchronizeCurrentFile", "Mark Directory As", "CompareTwoFiles", "ShowFilePath", "ChangesView.ApplyPatch", "TemplateProjectProperties", "ExportToHTML", "SaveAll", "Export/Import Actions", "Synchronize", "Line Separators", "ToggleReadOnlyAttribute", "Macros", "EditorToggleCase", "EditorJoinLines", "FillParagraph", "Convert Indents", "TemplateParametersNavigation", "EscapeEntities", "QuickDefinition", "ExpressionTypeInfo", "EditorContextInfo", "ShowErrorDescription", "RecentChanges", "CompareActions", "GotoCustomRegion", "JumpToLastChange", "JumpToNextChange", "SelectIn", "GotoTypeDeclaration", "QuickChangeScheme", "GotoTest", "GotoRelated", "Hierarchy Actions", "Bookmarks", "Goto Error/Bookmark Actions", "GoToEditPointGroup", "Change Navigation Actions", "Method Navigation Actions", "EvaluateExpression", "Pause", "ViewBreakpoints", "XDebugger.MuteBreakpoints", "SaveAs", "XDebugger.SwitchWatchesInVariables"); public static class First { public First() { patchRootAreaExtensions(); } } /** * @noinspection UnusedParameters */ public PyCharmEduInitialConfigurator(MessageBus bus, CodeInsightSettings codeInsightSettings, final PropertiesComponent propertiesComponent, FileTypeManager fileTypeManager, final ProjectManagerEx projectManager) { final UISettings uiSettings = UISettings.getInstance(); if (!propertiesComponent.getBoolean(CONFIGURED_V4)) { propertiesComponent.setValue(CONFIGURED_V4, true); } if (!propertiesComponent.getBoolean(CONFIGURED_V2)) { EditorSettingsExternalizable editorSettings = EditorSettingsExternalizable.getInstance(); editorSettings.setEnsureNewLineAtEOF(true); propertiesComponent.setValue(CONFIGURED_V2, true); } if (!propertiesComponent.getBoolean(CONFIGURED_V1)) { patchMainMenu(); uiSettings.setShowNavigationBar(false); propertiesComponent.setValue(CONFIGURED_V1, true); propertiesComponent.setValue("ShowDocumentationInToolWindow", true); } if (!propertiesComponent.getBoolean(CONFIGURED)) { propertiesComponent.setValue(CONFIGURED, "true"); propertiesComponent.setValue("toolwindow.stripes.buttons.info.shown", "true"); uiSettings.setHideToolStripes(false); uiSettings.setShowMemoryIndicator(false); uiSettings.setShowDirectoryForNonUniqueFilenames(true); uiSettings.setShowMainToolbar(false); codeInsightSettings.REFORMAT_ON_PASTE = CodeInsightSettings.NO_REFORMAT; GeneralSettings.getInstance().setShowTipsOnStartup(false); EditorSettingsExternalizable.getInstance().setVirtualSpace(false); EditorSettingsExternalizable.getInstance().getOptions().ARE_LINE_NUMBERS_SHOWN = true; final CodeStyleSettings settings = CodeStyleSettingsManager.getInstance().getCurrentSettings(); settings.getCommonSettings(PythonLanguage.getInstance()).ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; uiSettings.setShowDirectoryForNonUniqueFilenames(true); uiSettings.setShowMemoryIndicator(false); final String ignoredFilesList = fileTypeManager.getIgnoredFilesList(); ApplicationManager.getApplication().invokeLater(() -> ApplicationManager.getApplication().runWriteAction(() -> FileTypeManager.getInstance().setIgnoredFilesList(ignoredFilesList + ";*$py.class"))); PyCodeInsightSettings.getInstance().SHOW_IMPORT_POPUP = false; } final EditorColorsScheme editorColorsScheme = EditorColorsManager.getInstance().getScheme(EditorColorsScheme.DEFAULT_SCHEME_NAME); editorColorsScheme.setEditorFontSize(14); MessageBusConnection connection = bus.connect(); connection.subscribe(AppLifecycleListener.TOPIC, new AppLifecycleListener() { @Override public void welcomeScreenDisplayed() { if (!propertiesComponent.isValueSet(DISPLAYED_PROPERTY)) { ApplicationManager.getApplication().invokeLater(() -> { if (!propertiesComponent.isValueSet(DISPLAYED_PROPERTY)) { GeneralSettings.getInstance().setShowTipsOnStartup(false); patchKeymap(); propertiesComponent.setValue(DISPLAYED_PROPERTY, "true"); } }); } } @Override public void appFrameCreated(String[] commandLineArgs, @NotNull Ref<Boolean> willOpenProject) { if (!propertiesComponent.isValueSet(CONFIGURED_V3)) { propertiesComponent.setValue(CONFIGURED_V3, "true"); } } }); connection.subscribe(ProjectManager.TOPIC, new ProjectManagerListener() { @Override public void projectOpened(final Project project) { if (FileChooserUtil.getLastOpenedFile(project) == null) { FileChooserUtil.setLastOpenedFile(project, VfsUtil.getUserHomeDir()); } patchProjectAreaExtensions(project); StartupManager.getInstance(project).runWhenProjectIsInitialized(new DumbAwareRunnable() { @Override public void run() { if (project.isDisposed()) return; updateInspectionsProfile(); openProjectStructure(); } private void openProjectStructure() { ToolWindowManager.getInstance(project).invokeLater(new Runnable() { int count = 0; @Override public void run() { if (project.isDisposed()) return; if (count++ < 3) { // we need to call this after ToolWindowManagerImpl.registerToolWindowsFromBeans ToolWindowManager.getInstance(project).invokeLater(this); return; } ToolWindow toolWindow = ToolWindowManager.getInstance(project).getToolWindow("Project"); if (toolWindow != null && toolWindow.getType() != ToolWindowType.SLIDING) { toolWindow.activate(null); } } }); } private void updateInspectionsProfile() { final String[] codes = new String[]{"W29", "E501"}; final VirtualFile baseDir = project.getBaseDir(); final PsiDirectory directory = PsiManager.getInstance(project).findDirectory(baseDir); if (directory != null) { InspectionProjectProfileManager.getInstance(project).getCurrentProfile().modifyToolSettings( Key.<PyPep8Inspection>create(PyPep8Inspection.INSPECTION_SHORT_NAME), directory, inspection -> Collections.addAll(inspection.ignoredErrors, codes) ); } } }); } }); } private static void patchMainMenu() { final CustomActionsSchema schema = new CustomActionsSchema(); final JTree actionsTree = new Tree(); Group rootGroup = new Group("root", null, null); final DefaultMutableTreeNode root = new DefaultMutableTreeNode(rootGroup); DefaultTreeModel model = new DefaultTreeModel(root); actionsTree.setModel(model); schema.fillActionGroups(root); for (int i = 0; i < root.getChildCount(); i++) { final DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode)root.getChildAt(i); if ("Main menu".equals(getItemId(treeNode))) { hideActionFromMainMenu(root, schema, treeNode); } hideActions(schema, root, treeNode, HIDDEN_ACTIONS); } CustomActionsSchema.getInstance().copyFrom(schema); } private static void hideActionFromMainMenu(@NotNull final DefaultMutableTreeNode root, @NotNull final CustomActionsSchema schema, DefaultMutableTreeNode mainMenu){ final HashSet<String> menuItems = ContainerUtil.newHashSet("Tools", "VCS", "Refactor", "Window", "Run"); hideActions(schema, root, mainMenu, menuItems); } private static void hideActions(@NotNull CustomActionsSchema schema, @NotNull DefaultMutableTreeNode root, @NotNull final TreeNode actionGroup, Set<String> items) { for(int i = 0; i < actionGroup.getChildCount(); i++){ final DefaultMutableTreeNode child = (DefaultMutableTreeNode)actionGroup.getChildAt(i); final int childCount = child.getChildCount(); final String childId = getItemId(child); if (childId != null && items.contains(childId)){ final TreePath treePath = TreeUtil.getPath(root, child); final ActionUrl url = CustomizationUtil.getActionUrl(treePath, ActionUrl.DELETED); schema.addAction(url); } else if (childCount > 0) { hideActions(schema, child, child, items); } } } @Nullable private static String getItemId(@NotNull final DefaultMutableTreeNode child) { final Object userObject = child.getUserObject(); if (userObject instanceof String) return (String)userObject; return userObject instanceof Group ? ((Group)userObject).getName() : null; } private static void patchRootAreaExtensions() { ExtensionsArea rootArea = Extensions.getArea(null); rootArea.unregisterExtensionPoint("com.intellij.runLineMarkerContributor"); for (ToolWindowEP ep : Extensions.getExtensions(ToolWindowEP.EP_NAME)) { if (ToolWindowId.FAVORITES_VIEW.equals(ep.id) || ToolWindowId.TODO_VIEW.equals(ep.id) || EventLog.LOG_TOOL_WINDOW_ID.equals(ep.id) || ToolWindowId.STRUCTURE_VIEW.equals(ep.id)) { rootArea.getExtensionPoint(ToolWindowEP.EP_NAME).unregisterExtension(ep); } } for (DirectoryProjectConfigurator ep : Extensions.getExtensions(DirectoryProjectConfigurator.EP_NAME)) { if (ep instanceof PlatformProjectViewOpener) { rootArea.getExtensionPoint(DirectoryProjectConfigurator.EP_NAME).unregisterExtension(ep); } } // unregister unrelated tips for (TipAndTrickBean tip : Extensions.getExtensions(TipAndTrickBean.EP_NAME)) { if (UNRELATED_TIPS.contains(tip.fileName)) { rootArea.getExtensionPoint(TipAndTrickBean.EP_NAME).unregisterExtension(tip); } } for (IntentionActionBean ep : Extensions.getExtensions(IntentionManager.EP_INTENTION_ACTIONS)) { if ("org.intellij.lang.regexp.intention.CheckRegExpIntentionAction".equals(ep.className)) { rootArea.getExtensionPoint(IntentionManager.EP_INTENTION_ACTIONS).unregisterExtension(ep); } } final ExtensionPoint<ProjectAttachProcessor> point = Extensions.getRootArea().getExtensionPoint(ProjectAttachProcessor.EP_NAME); for (ProjectAttachProcessor attachProcessor : Extensions.getExtensions(ProjectAttachProcessor.EP_NAME)) { point.unregisterExtension(attachProcessor); } } private static void patchProjectAreaExtensions(@NotNull final Project project) { Executor debugExecutor = DefaultDebugExecutor.getDebugExecutorInstance(); unregisterAction(debugExecutor.getId(), ExecutorRegistryImpl.RUNNERS_GROUP); unregisterAction(debugExecutor.getContextActionId(), ExecutorRegistryImpl.RUN_CONTEXT_GROUP); ExtensionsArea projectArea = Extensions.getArea(project); for (SelectInTarget target : Extensions.getExtensions(SelectInTarget.EP_NAME, project)) { if (ToolWindowId.FAVORITES_VIEW.equals(target.getToolWindowId()) || ToolWindowId.STRUCTURE_VIEW.equals(target.getToolWindowId())) { projectArea.getExtensionPoint(SelectInTarget.EP_NAME).unregisterExtension(target); } } for (AbstractProjectViewPane pane : Extensions.getExtensions(AbstractProjectViewPane.EP_NAME, project)) { if (pane.getId().equals(ScopeViewPane.ID)) { Disposer.dispose(pane); projectArea.getExtensionPoint(AbstractProjectViewPane.EP_NAME).unregisterExtension(pane); } } } private static void unregisterAction(String actionId, String groupId) { ActionManager actionManager = ActionManager.getInstance(); AnAction action = actionManager.getAction(actionId); if (action != null) { AnAction actionGroup = actionManager.getAction(groupId); if (actionGroup instanceof DefaultActionGroup) { ((DefaultActionGroup)actionGroup).remove(action); actionManager.unregisterAction(actionId); } } } private static void patchKeymap() { Set<String> droppedActions = ContainerUtil.newHashSet( "AddToFavoritesPopup", "DatabaseView.ImportDataSources", "CompileDirty", "Compile", // hidden "AddNewFavoritesList", "EditFavorites", "RenameFavoritesList", "RemoveFavoritesList"); KeymapManagerEx keymapManager = KeymapManagerEx.getInstanceEx(); for (Keymap keymap : keymapManager.getAllKeymaps()) { if (keymap.canModify()) continue; KeymapImpl keymapImpl = (KeymapImpl)keymap; for (String id : keymapImpl.getOwnActionIds()) { if (droppedActions.contains(id)) keymapImpl.clearOwnActionsId(id); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.NodeRoles; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; import org.junit.Before; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.singleton; import static org.elasticsearch.cluster.routing.DelayedAllocationService.CLUSTER_UPDATE_TASK_SOURCE; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class DelayedAllocationServiceTests extends ESAllocationTestCase { private TestDelayAllocationService delayedAllocationService; private MockAllocationService allocationService; private ClusterService clusterService; private ThreadPool threadPool; @Before public void createDelayedAllocationService() { threadPool = new TestThreadPool(getTestName()); clusterService = mock(ClusterService.class); allocationService = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); when(clusterService.getSettings()).thenReturn(NodeRoles.masterOnlyNode()); delayedAllocationService = new TestDelayAllocationService(threadPool, clusterService, allocationService); verify(clusterService).addListener(delayedAllocationService); verify(clusterService).getSettings(); } @After public void shutdownThreadPool() throws Exception { terminate(threadPool); } public void testNoDelayedUnassigned() throws Exception { Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("test").settings(settings(Version.CURRENT) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metadata(metadata) .routingTable(RoutingTable.builder().addAsNew(metadata.index("test")).build()).build(); clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")).localNodeId("node1").masterNodeId("node1")) .build(); clusterState = allocationService.reroute(clusterState, "reroute"); // starting primaries clusterState = startInitializingShardsAndReroute(allocationService, clusterState); // starting replicas clusterState = startInitializingShardsAndReroute(allocationService, clusterState); assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false)); ClusterState prevState = clusterState; // remove node2 and reroute DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(clusterState.nodes()).remove("node2"); boolean nodeAvailableForAllocation = randomBoolean(); if (nodeAvailableForAllocation) { nodes.add(newNode("node3")); } clusterState = ClusterState.builder(clusterState).nodes(nodes).build(); clusterState = allocationService.disassociateDeadNodes(clusterState, true, "reroute"); ClusterState newState = clusterState; List<ShardRouting> unassignedShards = newState.getRoutingTable().shardsWithState(ShardRoutingState.UNASSIGNED); if (nodeAvailableForAllocation) { assertThat(unassignedShards.size(), equalTo(0)); } else { assertThat(unassignedShards.size(), equalTo(1)); assertThat(unassignedShards.get(0).unassignedInfo().isDelayed(), equalTo(false)); } delayedAllocationService.clusterChanged(new ClusterChangedEvent("test", newState, prevState)); verifyNoMoreInteractions(clusterService); assertNull(delayedAllocationService.delayedRerouteTask.get()); } public void testDelayedUnassignedScheduleReroute() throws Exception { TimeValue delaySetting = timeValueMillis(100); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("test").settings(settings(Version.CURRENT) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delaySetting)) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metadata(metadata) .routingTable(RoutingTable.builder().addAsNew(metadata.index("test")).build()).build(); clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")).localNodeId("node1").masterNodeId("node1")) .build(); final long baseTimestampNanos = System.nanoTime(); allocationService.setNanoTimeOverride(baseTimestampNanos); clusterState = allocationService.reroute(clusterState, "reroute"); // starting primaries clusterState = startInitializingShardsAndReroute(allocationService, clusterState); // starting replicas clusterState = startInitializingShardsAndReroute(allocationService, clusterState); assertFalse("no shards should be unassigned", clusterState.getRoutingNodes().unassigned().size() > 0); String nodeId = null; final List<ShardRouting> allShards = clusterState.getRoutingTable().allShards("test"); // we need to find the node with the replica otherwise we will not reroute for (ShardRouting shardRouting : allShards) { if (shardRouting.primary() == false) { nodeId = shardRouting.currentNodeId(); break; } } assertNotNull(nodeId); // remove node that has replica and reroute clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(nodeId)).build(); clusterState = allocationService.disassociateDeadNodes(clusterState, true, "reroute"); ClusterState stateWithDelayedShard = clusterState; // make sure the replica is marked as delayed (i.e. not reallocated) assertEquals(1, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShard)); ShardRouting delayedShard = stateWithDelayedShard.getRoutingNodes().unassigned().iterator().next(); assertEquals(baseTimestampNanos, delayedShard.unassignedInfo().getUnassignedTimeInNanos()); // mock ClusterService.submitStateUpdateTask() method CountDownLatch latch = new CountDownLatch(1); AtomicReference<ClusterStateUpdateTask> clusterStateUpdateTask = new AtomicReference<>(); doAnswer(invocationOnMock -> { clusterStateUpdateTask.set((ClusterStateUpdateTask)invocationOnMock.getArguments()[1]); latch.countDown(); return null; }).when(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), any(ClusterStateUpdateTask.class)); assertNull(delayedAllocationService.delayedRerouteTask.get()); long delayUntilClusterChangeEvent = TimeValue.timeValueNanos(randomInt((int)delaySetting.nanos() - 1)).nanos(); long clusterChangeEventTimestampNanos = baseTimestampNanos + delayUntilClusterChangeEvent; delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged(new ClusterChangedEvent("fake node left", stateWithDelayedShard, clusterState)); // check that delayed reroute task was created and registered with the proper settings DelayedAllocationService.DelayedRerouteTask delayedRerouteTask = delayedAllocationService.delayedRerouteTask.get(); assertNotNull(delayedRerouteTask); assertFalse(delayedRerouteTask.cancelScheduling.get()); assertThat(delayedRerouteTask.baseTimestampNanos, equalTo(clusterChangeEventTimestampNanos)); assertThat(delayedRerouteTask.nextDelay.nanos(), equalTo(delaySetting.nanos() - (clusterChangeEventTimestampNanos - baseTimestampNanos))); // check that submitStateUpdateTask() was invoked on the cluster service mock assertTrue(latch.await(30, TimeUnit.SECONDS)); verify(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), eq(clusterStateUpdateTask.get())); // advance the time on the allocation service to a timestamp that happened after the delayed scheduling long nanoTimeForReroute = clusterChangeEventTimestampNanos + delaySetting.nanos() + timeValueMillis(randomInt(200)).nanos(); allocationService.setNanoTimeOverride(nanoTimeForReroute); // apply cluster state ClusterState stateWithRemovedDelay = clusterStateUpdateTask.get().execute(stateWithDelayedShard); // check that shard is not delayed anymore assertEquals(0, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithRemovedDelay)); // check that task is now removed assertNull(delayedAllocationService.delayedRerouteTask.get()); // simulate calling listener (cluster change event) delayedAllocationService.setNanoTimeOverride(nanoTimeForReroute + timeValueMillis(randomInt(200)).nanos()); delayedAllocationService.clusterChanged( new ClusterChangedEvent(CLUSTER_UPDATE_TASK_SOURCE, stateWithRemovedDelay, stateWithDelayedShard)); // check that no new task is scheduled assertNull(delayedAllocationService.delayedRerouteTask.get()); // check that no further cluster state update was submitted verifyNoMoreInteractions(clusterService); } /** * This tests that a new delayed reroute is scheduled right after a delayed reroute was run */ public void testDelayedUnassignedScheduleRerouteAfterDelayedReroute() throws Exception { TimeValue shortDelaySetting = timeValueMillis(100); TimeValue longDelaySetting = TimeValue.timeValueSeconds(1); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("short_delay") .settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), shortDelaySetting)) .numberOfShards(1).numberOfReplicas(1)) .put(IndexMetadata.builder("long_delay") .settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), longDelaySetting)) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metadata(metadata) .routingTable(RoutingTable.builder().addAsNew(metadata.index("short_delay")).addAsNew(metadata.index("long_delay")).build()) .nodes(DiscoveryNodes.builder() .add(newNode("node0", singleton(DiscoveryNodeRole.MASTER_ROLE))).localNodeId("node0").masterNodeId("node0") .add(newNode("node1")).add(newNode("node2")).add(newNode("node3")).add(newNode("node4"))).build(); // allocate shards clusterState = allocationService.reroute(clusterState, "reroute"); // start primaries clusterState = startInitializingShardsAndReroute(allocationService, clusterState); // start replicas clusterState = startInitializingShardsAndReroute(allocationService, clusterState); assertThat("all shards should be started", clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4)); // find replica of short_delay ShardRouting shortDelayReplica = null; for (ShardRouting shardRouting : clusterState.getRoutingTable().allShards("short_delay")) { if (shardRouting.primary() == false) { shortDelayReplica = shardRouting; break; } } assertNotNull(shortDelayReplica); // find replica of long_delay ShardRouting longDelayReplica = null; for (ShardRouting shardRouting : clusterState.getRoutingTable().allShards("long_delay")) { if (shardRouting.primary() == false) { longDelayReplica = shardRouting; break; } } assertNotNull(longDelayReplica); final long baseTimestampNanos = System.nanoTime(); // remove node of shortDelayReplica and node of longDelayReplica and reroute ClusterState clusterStateBeforeNodeLeft = clusterState; clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder(clusterState.nodes()) .remove(shortDelayReplica.currentNodeId()) .remove(longDelayReplica.currentNodeId())) .build(); // make sure both replicas are marked as delayed (i.e. not reallocated) allocationService.setNanoTimeOverride(baseTimestampNanos); clusterState = allocationService.disassociateDeadNodes(clusterState, true, "reroute"); final ClusterState stateWithDelayedShards = clusterState; assertEquals(2, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShards)); RoutingNodes.UnassignedShards.UnassignedIterator iter = stateWithDelayedShards.getRoutingNodes().unassigned().iterator(); assertEquals(baseTimestampNanos, iter.next().unassignedInfo().getUnassignedTimeInNanos()); assertEquals(baseTimestampNanos, iter.next().unassignedInfo().getUnassignedTimeInNanos()); // mock ClusterService.submitStateUpdateTask() method CountDownLatch latch1 = new CountDownLatch(1); AtomicReference<ClusterStateUpdateTask> clusterStateUpdateTask1 = new AtomicReference<>(); doAnswer(invocationOnMock -> { clusterStateUpdateTask1.set((ClusterStateUpdateTask)invocationOnMock.getArguments()[1]); latch1.countDown(); return null; }).when(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), any(ClusterStateUpdateTask.class)); assertNull(delayedAllocationService.delayedRerouteTask.get()); long delayUntilClusterChangeEvent = TimeValue.timeValueNanos(randomInt((int)shortDelaySetting.nanos() - 1)).nanos(); long clusterChangeEventTimestampNanos = baseTimestampNanos + delayUntilClusterChangeEvent; delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged( new ClusterChangedEvent("fake node left", stateWithDelayedShards, clusterStateBeforeNodeLeft)); // check that delayed reroute task was created and registered with the proper settings DelayedAllocationService.DelayedRerouteTask firstDelayedRerouteTask = delayedAllocationService.delayedRerouteTask.get(); assertNotNull(firstDelayedRerouteTask); assertFalse(firstDelayedRerouteTask.cancelScheduling.get()); assertThat(firstDelayedRerouteTask.baseTimestampNanos, equalTo(clusterChangeEventTimestampNanos)); assertThat(firstDelayedRerouteTask.nextDelay.nanos(), equalTo(UnassignedInfo.findNextDelayedAllocation(clusterChangeEventTimestampNanos, stateWithDelayedShards))); assertThat(firstDelayedRerouteTask.nextDelay.nanos(), equalTo(shortDelaySetting.nanos() - (clusterChangeEventTimestampNanos - baseTimestampNanos))); // check that submitStateUpdateTask() was invoked on the cluster service mock assertTrue(latch1.await(30, TimeUnit.SECONDS)); verify(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), eq(clusterStateUpdateTask1.get())); // advance the time on the allocation service to a timestamp that happened after the delayed scheduling long nanoTimeForReroute = clusterChangeEventTimestampNanos + shortDelaySetting.nanos() + timeValueMillis(randomInt(50)).nanos(); allocationService.setNanoTimeOverride(nanoTimeForReroute); // apply cluster state ClusterState stateWithOnlyOneDelayedShard = clusterStateUpdateTask1.get().execute(stateWithDelayedShards); // check that shard is not delayed anymore assertEquals(1, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithOnlyOneDelayedShard)); // check that task is now removed assertNull(delayedAllocationService.delayedRerouteTask.get()); // mock ClusterService.submitStateUpdateTask() method again CountDownLatch latch2 = new CountDownLatch(1); AtomicReference<ClusterStateUpdateTask> clusterStateUpdateTask2 = new AtomicReference<>(); doAnswer(invocationOnMock -> { clusterStateUpdateTask2.set((ClusterStateUpdateTask)invocationOnMock.getArguments()[1]); latch2.countDown(); return null; }).when(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), any(ClusterStateUpdateTask.class)); // simulate calling listener (cluster change event) delayUntilClusterChangeEvent = timeValueMillis(randomInt(50)).nanos(); clusterChangeEventTimestampNanos = nanoTimeForReroute + delayUntilClusterChangeEvent; delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged( new ClusterChangedEvent(CLUSTER_UPDATE_TASK_SOURCE, stateWithOnlyOneDelayedShard, stateWithDelayedShards)); // check that new delayed reroute task was created and registered with the proper settings DelayedAllocationService.DelayedRerouteTask secondDelayedRerouteTask = delayedAllocationService.delayedRerouteTask.get(); assertNotNull(secondDelayedRerouteTask); assertFalse(secondDelayedRerouteTask.cancelScheduling.get()); assertThat(secondDelayedRerouteTask.baseTimestampNanos, equalTo(clusterChangeEventTimestampNanos)); assertThat(secondDelayedRerouteTask.nextDelay.nanos(), equalTo(UnassignedInfo.findNextDelayedAllocation(clusterChangeEventTimestampNanos, stateWithOnlyOneDelayedShard))); assertThat(secondDelayedRerouteTask.nextDelay.nanos(), equalTo(longDelaySetting.nanos() - (clusterChangeEventTimestampNanos - baseTimestampNanos))); // check that submitStateUpdateTask() was invoked on the cluster service mock assertTrue(latch2.await(30, TimeUnit.SECONDS)); verify(clusterService).submitStateUpdateTask(eq(CLUSTER_UPDATE_TASK_SOURCE), eq(clusterStateUpdateTask2.get())); // advance the time on the allocation service to a timestamp that happened after the delayed scheduling nanoTimeForReroute = clusterChangeEventTimestampNanos + longDelaySetting.nanos() + timeValueMillis(randomInt(50)).nanos(); allocationService.setNanoTimeOverride(nanoTimeForReroute); // apply cluster state ClusterState stateWithNoDelayedShards = clusterStateUpdateTask2.get().execute(stateWithOnlyOneDelayedShard); // check that shard is not delayed anymore assertEquals(0, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithNoDelayedShards)); // check that task is now removed assertNull(delayedAllocationService.delayedRerouteTask.get()); // simulate calling listener (cluster change event) delayedAllocationService.setNanoTimeOverride(nanoTimeForReroute + timeValueMillis(randomInt(50)).nanos()); delayedAllocationService.clusterChanged( new ClusterChangedEvent(CLUSTER_UPDATE_TASK_SOURCE, stateWithNoDelayedShards, stateWithOnlyOneDelayedShard)); // check that no new task is scheduled assertNull(delayedAllocationService.delayedRerouteTask.get()); // check that no further cluster state update was submitted verifyNoMoreInteractions(clusterService); } public void testDelayedUnassignedScheduleRerouteRescheduledOnShorterDelay() throws Exception { TimeValue delaySetting = timeValueSeconds(30); TimeValue shorterDelaySetting = timeValueMillis(100); Metadata metadata = Metadata.builder() .put(IndexMetadata.builder("foo").settings(settings(Version.CURRENT) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delaySetting)) .numberOfShards(1).numberOfReplicas(1)) .put(IndexMetadata.builder("bar").settings(settings(Version.CURRENT) .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), shorterDelaySetting)) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) .metadata(metadata) .routingTable(RoutingTable.builder() .addAsNew(metadata.index("foo")) .addAsNew(metadata.index("bar")) .build()).build(); clusterState = ClusterState.builder(clusterState) .nodes(DiscoveryNodes.builder() .add(newNode("node1")).add(newNode("node2")).add(newNode("node3")).add(newNode("node4")) .localNodeId("node1").masterNodeId("node1")) .build(); final long nodeLeftTimestampNanos = System.nanoTime(); allocationService.setNanoTimeOverride(nodeLeftTimestampNanos); clusterState = allocationService.reroute(clusterState, "reroute"); // starting primaries clusterState = startInitializingShardsAndReroute(allocationService, clusterState); // starting replicas clusterState = startInitializingShardsAndReroute(allocationService, clusterState); assertFalse("no shards should be unassigned", clusterState.getRoutingNodes().unassigned().size() > 0); String nodeIdOfFooReplica = null; for (ShardRouting shardRouting : clusterState.getRoutingTable().allShards("foo")) { if (shardRouting.primary() == false) { nodeIdOfFooReplica = shardRouting.currentNodeId(); break; } } assertNotNull(nodeIdOfFooReplica); // remove node that has replica and reroute clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder(clusterState.nodes()).remove(nodeIdOfFooReplica)).build(); clusterState = allocationService.disassociateDeadNodes(clusterState, true, "fake node left"); ClusterState stateWithDelayedShard = clusterState; // make sure the replica is marked as delayed (i.e. not reallocated) assertEquals(1, UnassignedInfo.getNumberOfDelayedUnassigned(stateWithDelayedShard)); ShardRouting delayedShard = stateWithDelayedShard.getRoutingNodes().unassigned().iterator().next(); assertEquals(nodeLeftTimestampNanos, delayedShard.unassignedInfo().getUnassignedTimeInNanos()); assertNull(delayedAllocationService.delayedRerouteTask.get()); long delayUntilClusterChangeEvent = TimeValue.timeValueNanos(randomInt((int)shorterDelaySetting.nanos() - 1)).nanos(); long clusterChangeEventTimestampNanos = nodeLeftTimestampNanos + delayUntilClusterChangeEvent; delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged(new ClusterChangedEvent("fake node left", stateWithDelayedShard, clusterState)); // check that delayed reroute task was created and registered with the proper settings DelayedAllocationService.DelayedRerouteTask delayedRerouteTask = delayedAllocationService.delayedRerouteTask.get(); assertNotNull(delayedRerouteTask); assertFalse(delayedRerouteTask.cancelScheduling.get()); assertThat(delayedRerouteTask.baseTimestampNanos, equalTo(clusterChangeEventTimestampNanos)); assertThat(delayedRerouteTask.nextDelay.nanos(), equalTo(delaySetting.nanos() - (clusterChangeEventTimestampNanos - nodeLeftTimestampNanos))); if (randomBoolean()) { // update settings with shorter delay ClusterState stateWithShorterDelay = ClusterState.builder(stateWithDelayedShard).metadata(Metadata.builder( stateWithDelayedShard.metadata()).updateSettings(Settings.builder().put( UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), shorterDelaySetting).build(), "foo")).build(); delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged( new ClusterChangedEvent("apply shorter delay", stateWithShorterDelay, stateWithDelayedShard)); } else { // node leaves with replica shard of index bar that has shorter delay String nodeIdOfBarReplica = null; for (ShardRouting shardRouting : stateWithDelayedShard.getRoutingTable().allShards("bar")) { if (shardRouting.primary() == false) { nodeIdOfBarReplica = shardRouting.currentNodeId(); break; } } assertNotNull(nodeIdOfBarReplica); // remove node that has replica and reroute clusterState = ClusterState.builder(stateWithDelayedShard).nodes( DiscoveryNodes.builder(stateWithDelayedShard.nodes()).remove(nodeIdOfBarReplica)).build(); ClusterState stateWithShorterDelay = allocationService.disassociateDeadNodes(clusterState, true, "fake node left"); delayedAllocationService.setNanoTimeOverride(clusterChangeEventTimestampNanos); delayedAllocationService.clusterChanged( new ClusterChangedEvent("fake node left", stateWithShorterDelay, stateWithDelayedShard)); } // check that delayed reroute task was replaced by shorter reroute task DelayedAllocationService.DelayedRerouteTask shorterDelayedRerouteTask = delayedAllocationService.delayedRerouteTask.get(); assertNotNull(shorterDelayedRerouteTask); assertNotEquals(shorterDelayedRerouteTask, delayedRerouteTask); assertTrue(delayedRerouteTask.cancelScheduling.get()); // existing task was cancelled assertFalse(shorterDelayedRerouteTask.cancelScheduling.get()); assertThat(delayedRerouteTask.baseTimestampNanos, equalTo(clusterChangeEventTimestampNanos)); assertThat(shorterDelayedRerouteTask.nextDelay.nanos(), equalTo(shorterDelaySetting.nanos() - (clusterChangeEventTimestampNanos - nodeLeftTimestampNanos))); } private static class TestDelayAllocationService extends DelayedAllocationService { private volatile long nanoTimeOverride = -1L; private TestDelayAllocationService(ThreadPool threadPool, ClusterService clusterService, AllocationService allocationService) { super(threadPool, clusterService, allocationService); } @Override protected void assertClusterOrMasterStateThread() { // do not check this in the unit tests } public void setNanoTimeOverride(long nanoTime) { this.nanoTimeOverride = nanoTime; } @Override protected long currentNanoTime() { return nanoTimeOverride == -1L ? super.currentNanoTime() : nanoTimeOverride; } } }
package de.test.antennapod.ui; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.test.ActivityInstrumentationTestCase2; import android.test.FlakyTest; import android.view.View; import android.widget.ListView; import com.robotium.solo.Solo; import com.robotium.solo.Timeout; import java.util.List; import de.danoeh.antennapod.R; import de.danoeh.antennapod.activity.MainActivity; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.service.playback.PlayerStatus; import de.danoeh.antennapod.core.storage.DBReader; import de.danoeh.antennapod.core.storage.DBWriter; import de.danoeh.antennapod.core.storage.PodDBAdapter; /** * test cases for starting and ending playback from the MainActivity and AudioPlayerActivity */ public class PlaybackTest extends ActivityInstrumentationTestCase2<MainActivity> { private static final String TAG = PlaybackTest.class.getSimpleName(); public static final int EPISODES_DRAWER_LIST_INDEX = 1; public static final int QUEUE_DRAWER_LIST_INDEX = 0; private Solo solo; private UITestUtils uiTestUtils; private Context context; public PlaybackTest() { super(MainActivity.class); } @Override public void setUp() throws Exception { super.setUp(); context = getInstrumentation().getTargetContext(); PodDBAdapter.init(context); PodDBAdapter.deleteDatabase(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); prefs.edit() .clear() .putBoolean(UserPreferences.PREF_UNPAUSE_ON_HEADSET_RECONNECT, false) .putBoolean(UserPreferences.PREF_PAUSE_ON_HEADSET_DISCONNECT, false) .commit(); solo = new Solo(getInstrumentation(), getActivity()); uiTestUtils = new UITestUtils(context); uiTestUtils.setup(); // create database PodDBAdapter adapter = PodDBAdapter.getInstance(); adapter.open(); adapter.close(); } @Override public void tearDown() throws Exception { solo.finishOpenedActivities(); uiTestUtils.tearDown(); // shut down playback service skipEpisode(); context.sendBroadcast(new Intent(PlaybackService.ACTION_SHUTDOWN_PLAYBACK_SERVICE)); super.tearDown(); } private void openNavDrawer() { solo.clickOnImageButton(0); getInstrumentation().waitForIdleSync(); } private void setContinuousPlaybackPreference(boolean value) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); prefs.edit().putBoolean(UserPreferences.PREF_FOLLOW_QUEUE, value).commit(); } private void skipEpisode() { Intent skipIntent = new Intent(PlaybackService.ACTION_SKIP_CURRENT_EPISODE); context.sendBroadcast(skipIntent); } private void startLocalPlayback() { openNavDrawer(); // if we try to just click on plain old text then // we might wind up clicking on the fragment title and not // the drawer element like we want. ListView drawerView = (ListView)solo.getView(R.id.nav_list); // this should be 'Episodes' View targetView = drawerView.getChildAt(EPISODES_DRAWER_LIST_INDEX); solo.waitForView(targetView); solo.clickOnView(targetView); solo.waitForText(solo.getString(R.string.all_episodes_short_label)); solo.clickOnText(solo.getString(R.string.all_episodes_short_label)); final List<FeedItem> episodes = DBReader.getRecentlyPublishedEpisodes(10); assertTrue(solo.waitForView(solo.getView(R.id.butSecondaryAction))); solo.clickOnView(solo.getView(R.id.butSecondaryAction)); long mediaId = episodes.get(0).getMedia().getId(); boolean playing = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(playing); } private void startLocalPlaybackFromQueue() { openNavDrawer(); // if we try to just click on plain old text then // we might wind up clicking on the fragment title and not // the drawer element like we want. ListView drawerView = (ListView)solo.getView(R.id.nav_list); // this should be 'Queue' View targetView = drawerView.getChildAt(QUEUE_DRAWER_LIST_INDEX); solo.waitForView(targetView); solo.clickOnView(targetView); assertTrue(solo.waitForView(solo.getView(R.id.butSecondaryAction))); final List<FeedItem> queue = DBReader.getQueue(); solo.clickOnImageButton(1); assertTrue(solo.waitForView(solo.getView(R.id.butPlay))); long mediaId = queue.get(0).getMedia().getId(); boolean playing = solo.waitForCondition(() -> { if(uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(playing); } public void testStartLocal() throws Exception { uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); startLocalPlayback(); } public void testContinousPlaybackOffSingleEpisode() throws Exception { setContinuousPlaybackPreference(false); uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); startLocalPlayback(); } @FlakyTest(tolerance = 3) public void testContinousPlaybackOffMultipleEpisodes() throws Exception { setContinuousPlaybackPreference(false); uiTestUtils.addLocalFeedData(true); List<FeedItem> queue = DBReader.getQueue(); final FeedItem first = queue.get(0); startLocalPlaybackFromQueue(); boolean stopped = solo.waitForCondition(() -> { if (uiTestUtils.getPlaybackController(getActivity()).getStatus() != PlayerStatus.PLAYING) { return true; } else if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() != first.getMedia().getId(); } else { return true; } }, Timeout.getSmallTimeout()); assertTrue(stopped); Thread.sleep(1000); PlayerStatus status = uiTestUtils.getPlaybackController(getActivity()).getStatus(); assertFalse(status.equals(PlayerStatus.PLAYING)); } @FlakyTest(tolerance = 3) public void testContinuousPlaybackOnMultipleEpisodes() throws Exception { setContinuousPlaybackPreference(true); uiTestUtils.addLocalFeedData(true); List<FeedItem> queue = DBReader.getQueue(); final FeedItem first = queue.get(0); final FeedItem second = queue.get(1); startLocalPlaybackFromQueue(); boolean firstPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == first.getMedia().getId(); } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(firstPlaying); boolean secondPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == second.getMedia().getId(); } else { return false; } }, Timeout.getLargeTimeout()); assertTrue(secondPlaying); } /** * Check if an episode can be played twice without problems. */ private void replayEpisodeCheck(boolean followQueue) throws Exception { setContinuousPlaybackPreference(followQueue); uiTestUtils.addLocalFeedData(true); DBWriter.clearQueue().get(); final List<FeedItem> episodes = DBReader.getRecentlyPublishedEpisodes(10); startLocalPlayback(); long mediaId = episodes.get(0).getMedia().getId(); boolean startedPlaying = solo.waitForCondition(() -> { if (uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getSmallTimeout()); assertTrue(startedPlaying); boolean stoppedPlaying = solo.waitForCondition(() -> uiTestUtils.getCurrentMedia(getActivity()) == null || uiTestUtils.getCurrentMedia(getActivity()).getId() != mediaId, Timeout.getLargeTimeout()); assertTrue(stoppedPlaying); startLocalPlayback(); boolean startedReplay = solo.waitForCondition(() -> { if(uiTestUtils.getCurrentMedia(getActivity()) != null) { return uiTestUtils.getCurrentMedia(getActivity()).getId() == mediaId; } else { return false; } }, Timeout.getLargeTimeout()); assertTrue(startedReplay); } public void testReplayEpisodeContinuousPlaybackOn() throws Exception { replayEpisodeCheck(true); } public void testReplayEpisodeContinuousPlaybackOff() throws Exception { replayEpisodeCheck(false); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashTable; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.BytesWritable; import org.apache.hive.common.util.HashCodeUtil; import org.apache.tez.runtime.library.api.KeyValueReader; import com.google.common.annotations.VisibleForTesting; /* * An single long value map optimized for vector map join. */ public abstract class VectorMapJoinFastLongHashTable extends VectorMapJoinFastHashTable implements VectorMapJoinLongHashTable { public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashTable.class); private transient final boolean isLogDebugEnabled = LOG.isDebugEnabled(); private final HashTableKeyType hashTableKeyType; private final boolean isOuterJoin; private final BinarySortableDeserializeRead keyBinarySortableDeserializeRead; private final boolean useMinMax; private long min; private long max; @Override public boolean useMinMax() { return useMinMax; } @Override public long min() { return min; } @Override public long max() { return max; } @Override public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { byte[] keyBytes = currentKey.getBytes(); int keyLength = currentKey.getLength(); keyBinarySortableDeserializeRead.set(keyBytes, 0, keyLength); try { if (!keyBinarySortableDeserializeRead.readNextField()) { return; } } catch (Exception e) { throw new HiveException( "\nDeserializeRead details: " + keyBinarySortableDeserializeRead.getDetailedReadPositionString() + "\nException: " + e.toString()); } long key = VectorMapJoinFastLongHashUtil.deserializeLongKey( keyBinarySortableDeserializeRead, hashTableKeyType); add(key, currentValue); } protected abstract void assignSlot(int slot, long key, boolean isNewKey, BytesWritable currentValue); public void add(long key, BytesWritable currentValue) { if (resizeThreshold <= keysAssigned) { expandAndRehash(); } long hashCode = HashCodeUtil.calculateLongHashCode(key); int intHashCode = (int) hashCode; int slot = (intHashCode & logicalHashBucketMask); long probeSlot = slot; int i = 0; boolean isNewKey; while (true) { int pairIndex = 2 * slot; long valueRef = slotPairs[pairIndex]; if (valueRef == 0) { // LOG.debug("VectorMapJoinFastLongHashTable add key " + key + " slot " + slot + " pairIndex " + pairIndex + " empty slot (i = " + i + ")"); isNewKey = true; break; } long tableKey = slotPairs[pairIndex + 1]; if (key == tableKey) { // LOG.debug("VectorMapJoinFastLongHashTable add key " + key + " slot " + slot + " pairIndex " + pairIndex + " found key (i = " + i + ")"); isNewKey = false; break; } ++metricPutConflict; // Some other key (collision) - keep probing. probeSlot += (++i); slot = (int)(probeSlot & logicalHashBucketMask); } if (largestNumberOfSteps < i) { if (isLogDebugEnabled) { LOG.debug("Probed " + i + " slots (the longest so far) to find space"); } largestNumberOfSteps = i; // debugDumpKeyProbe(keyOffset, keyLength, hashCode, slot); } // LOG.debug("VectorMapJoinFastLongHashTable add slot " + slot + " hashCode " + Long.toHexString(hashCode)); assignSlot(slot, key, isNewKey, currentValue); if (isNewKey) { keysAssigned++; if (useMinMax) { if (key < min) { min = key; } if (key > max) { max = key; } } } } private void expandAndRehash() { // We allocate pairs, so we cannot go above highest Integer power of 2 / 4. if (logicalHashBucketCount > ONE_QUARTER_LIMIT) { throwExpandError(ONE_QUARTER_LIMIT, "Long"); } int newLogicalHashBucketCount = logicalHashBucketCount * 2; int newLogicalHashBucketMask = newLogicalHashBucketCount - 1; int newMetricPutConflict = 0; int newLargestNumberOfSteps = 0; int newSlotPairArraySize = newLogicalHashBucketCount * 2; long[] newSlotPairs = new long[newSlotPairArraySize]; for (int slot = 0; slot < logicalHashBucketCount; slot++) { int pairIndex = slot * 2; long valueRef = slotPairs[pairIndex]; if (valueRef != 0) { long tableKey = slotPairs[pairIndex + 1]; // Copy to new slot table. long hashCode = HashCodeUtil.calculateLongHashCode(tableKey); int intHashCode = (int) hashCode; int newSlot = intHashCode & newLogicalHashBucketMask; long newProbeSlot = newSlot; int newPairIndex; int i = 0; while (true) { newPairIndex = newSlot * 2; long newValueRef = newSlotPairs[newPairIndex]; if (newValueRef == 0) { break; } ++newMetricPutConflict; // Some other key (collision) - keep probing. newProbeSlot += (++i); newSlot = (int)(newProbeSlot & newLogicalHashBucketMask); } if (newLargestNumberOfSteps < i) { if (isLogDebugEnabled) { LOG.debug("Probed " + i + " slots (the longest so far) to find space"); } newLargestNumberOfSteps = i; // debugDumpKeyProbe(keyOffset, keyLength, hashCode, slot); } // Use old value reference word. // LOG.debug("VectorMapJoinFastLongHashTable expandAndRehash key " + tableKey + " slot " + newSlot + " newPairIndex " + newPairIndex + " empty slot (i = " + i + ")"); newSlotPairs[newPairIndex] = valueRef; newSlotPairs[newPairIndex + 1] = tableKey; } } slotPairs = newSlotPairs; logicalHashBucketCount = newLogicalHashBucketCount; logicalHashBucketMask = newLogicalHashBucketMask; metricPutConflict = newMetricPutConflict; largestNumberOfSteps = newLargestNumberOfSteps; resizeThreshold = (int)(logicalHashBucketCount * loadFactor); metricExpands++; // LOG.debug("VectorMapJoinFastLongHashTable expandAndRehash new logicalHashBucketCount " + logicalHashBucketCount + " resizeThreshold " + resizeThreshold + " metricExpands " + metricExpands); } protected long findReadSlot(long key, long hashCode) { int intHashCode = (int) hashCode; int slot = intHashCode & logicalHashBucketMask; long probeSlot = slot; int i = 0; while (true) { int pairIndex = 2 * slot; long valueRef = slotPairs[pairIndex]; if (valueRef == 0) { // Given that we do not delete, an empty slot means no match. // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " empty slot (i = " + i + ")"); return -1; } long tableKey = slotPairs[pairIndex + 1]; if (key == tableKey) { // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " found key (i = " + i + ")"); return slotPairs[pairIndex]; } // Some other key (collision) - keep probing. probeSlot += (++i); if (i > largestNumberOfSteps) { // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot returning not found"); // We know we never went that far when we were inserting. // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " largestNumberOfSteps " + largestNumberOfSteps + " (i = " + i + ")"); return -1; } slot = (int)(probeSlot & logicalHashBucketMask); } } /* * The hash table slots. For a long key hash table, each slot is 2 longs and the array is * 2X sized. * * The slot pair is 1) a non-zero reference word to the first value bytes and 2) the long value. */ protected long[] slotPairs; private void allocateBucketArray() { int slotPairArraySize = 2 * logicalHashBucketCount; slotPairs = new long[slotPairArraySize]; } public VectorMapJoinFastLongHashTable( boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); this.isOuterJoin = isOuterJoin; this.hashTableKeyType = hashTableKeyType; PrimitiveTypeInfo[] primitiveTypeInfos = { hashTableKeyType.getPrimitiveTypeInfo() }; keyBinarySortableDeserializeRead = new BinarySortableDeserializeRead( primitiveTypeInfos, /* useExternalBuffer */ false); allocateBucketArray(); useMinMax = minMaxEnabled; min = Long.MAX_VALUE; max = Long.MIN_VALUE; } }
package org.odata4j.producer; import java.util.ArrayList; import java.util.List; import org.odata4j.expression.EntitySimpleProperty; import org.odata4j.expression.ExpressionParser; /** * Helps producers determine if a property is $selected and/or $expanded. * * <p>Note on recursive extensions: * The idea here is that when one has an object graph that is a tree of like * nodes (such as a class hierarchy), it should be possible to specify a $expand * that is applied recursively. * * <p>Two new custom options are proposed: * * <p>expandR and selectR * * <p>ABNF: * <pre> * expandRQueryOp = "expandR=" recursiveExpandClause *("," recursiveExpandClause) * recursiveExpandClause = entityNavProperty "/" expandDepth * expandDepth = integer * * selectRQueryOp = "selectR=" recursiveSelectClause *("," recursiveSelectClause) * recursiveSelectClause = rSelectItem *("," recursiveSelectClause) * rSelectItem = selectedNavProperty "/" rPropItem * rPropItem = "*" / selectedProperty * </pre> * * <p>expandDepth drives the number of traversal iterations. An expandDepth of 0 is * unlimited. During query processing, the max expandDepth of all recursivExpandClauses * is computed and drives processing. * * <p>example: * expandR=SubTypes/0,Properties/1 * * <p>This says that at each position in the object graph traversal * during query we will expand the SubTypes navigation property. At the first * level we will also expand the Properties navigation property * <pre> * selectR=SubTypes/Namespace,SubTypes/Type * </pre> * <p>This says that whenever we expand the SubTypes navigation property we will only * include Namespace and Type properties. */ public class PropertyPathHelper { public static final String OptionExpandR = "expandR"; public static final String OptionSelectR = "selectR"; /* * Our current path in the navigation. An empty path means we are currently * at the root object. */ private PropertyPath currentNavPath = new PropertyPath(""); protected List<PropertyPath> selectPaths; protected List<PropertyPath> expandPaths; protected List<PropertyPath> selectRPaths; protected List<RecursivePropertyPath> expandRPaths; public PropertyPathHelper() { setup((String) null, null, null, null); } public PropertyPathHelper(QueryInfo qi) { setup(qi == null ? null : qi.select, qi == null ? null : qi.expand, null, null); } public PropertyPathHelper(String select, String expand) { setup(select, expand, null, null); } public PropertyPathHelper(String select, String expand, String selectR, String expandR) { setup(select, expand, selectR, expandR); } public PropertyPathHelper(List<EntitySimpleProperty> select, List<EntitySimpleProperty> expand) { setup(select, expand, null, null); } public PropertyPathHelper(List<EntitySimpleProperty> select, List<EntitySimpleProperty> expand, String selectR, String expandR) { setup(select, expand, selectR != null && selectR.length() > 0 ? ExpressionParser.parseExpand(selectR) : null, expandR != null && expandR.length() > 0 ? ExpressionParser.parseExpand(expandR) : null); } private void setup(String select, String expand, String selectR, String expandR) { setup(select != null && select.length() > 0 ? ExpressionParser.parseExpand(select) : null, expand != null && expand.length() > 0 ? ExpressionParser.parseExpand(expand) : null, selectR != null && selectR.length() > 0 ? ExpressionParser.parseExpand(selectR) : null, expandR != null && expandR.length() > 0 ? ExpressionParser.parseExpand(expandR) : null); } private void setup(List<EntitySimpleProperty> select, List<EntitySimpleProperty> expand, List<EntitySimpleProperty> selectR, List<EntitySimpleProperty> expandR) { if (select != null && select.size() > 0) { selectPaths = new ArrayList<PropertyPath>(select.size()); for (EntitySimpleProperty p : select) { selectPaths.add(new PropertyPath(p.getPropertyName())); } } if (expand != null && expand.size() > 0) { expandPaths = new ArrayList<PropertyPath>(expand.size()); for (EntitySimpleProperty p : expand) { expandPaths.add(new PropertyPath(p.getPropertyName())); } } if (selectR != null && selectR.size() > 0) { selectRPaths = new ArrayList<PropertyPath>(selectR.size()); for (EntitySimpleProperty p : selectR) { PropertyPath path = new PropertyPath(p.getPropertyName()); // must have 2 components if (path.getNComponents() != 2) { throw new RuntimeException("selectR clause must have 2 components: " + p.getPropertyName()); } selectRPaths.add(path); } } if (expandR != null && expandR.size() > 0) { expandRPaths = new ArrayList<RecursivePropertyPath>(expandR.size()); for (EntitySimpleProperty p : expandR) { PropertyPath path = new PropertyPath(p.getPropertyName()); // must have 2 components if (path.getNComponents() != 2) { throw new RuntimeException("expandR clause must have 2 components: " + p.getPropertyName()); } int depth = 0; try { depth = Integer.parseInt(path.getLastComponent()); } catch (Exception ex) { throw new RuntimeException("2nd component of expandR clause must be the integer depth: " + p.getPropertyName()); } expandRPaths.add(new RecursivePropertyPath(path.removeLastComponent(), depth)); } } } /** * Returns true if the $select contains any limiting items on the current navPath. * * @return true if select is limited, false if not */ protected boolean isSelectionLimited() { // selection is only limited if the $select explicitly says so. if (selectPaths == null) { return false; } // a match starts with navPath and has a single additional component. int nComponentsToMatch = currentNavPath.getNComponents() + 1; // we search the entire list, a wild match can occur anywhere and trumps // any other matches for (PropertyPath p : selectPaths) { if (p.getNComponents() == nComponentsToMatch) { // this is a candidate path that matches in length if (p.isWild()) { // wild card says don't limit selection. return false; } else if (p.startsWith(currentNavPath)) { // any match means selection is explicitly limited. return true; } } } return false; } protected boolean isSelectionLimitedRecursive() { // selection is only limited if the selectR explicitly says so. if (selectRPaths == null) { return false; } // empty current nav paths: see design notes. These are a problem. // for now we force them to use $select redundantly...not perfect.. if (this.currentNavPath.isEmpty()) { return false; } for (PropertyPath p : selectRPaths) { // blat/<propname> matches foo/bar/blat matches if (p.getFirstComponent().equals(this.currentNavPath.getLastComponent())) { // this is a candidate path that matches in length if (p.isWild()) { // wild card says don't limit selection. return false; } else { return true; } } } return false; } /** * Determines if the given property is selected on the current navigation path. * * @param propName name of a regular property or a navigation property * @return true if property is selected, false if not */ public boolean isSelected(String propName) { boolean limited = false; if (this.isSelectionLimited()) { limited = true; PropertyPath checkPath = currentNavPath.addComponent(propName); for (PropertyPath p : selectPaths) { if (p.equals(checkPath)) { return true; } } } // allow the selectR to override the $select limiters if (this.isSelectionLimitedRecursive()) { limited = true; for (PropertyPath p : this.selectRPaths) { // p of: // blat/<propname> // matches current of: // .../blat if (p.getLastComponent().equals(propName) && this.currentNavPath.getNComponents() >= 1 && this.currentNavPath.getLastComponent().equals(p.getFirstComponent())) { return true; } } } if (limited) { // found one or more limiters but did not find a match return false; } else { // no limiters found, must be selected return true; } } /** determines if the given navigation property is expanded on the current navigation path */ protected boolean isExpandedExplicit(String navPropName) { // expand paths don't have wildcarding...hmmh...why not? if (expandPaths == null) { return false; } PropertyPath checkPath = currentNavPath.addComponent(navPropName); for (PropertyPath p : expandPaths) { if (p.equals(checkPath)) { return true; } } return false; } protected boolean isExpandedRecursive(String navPropName) { if (expandRPaths == null) { return false; } // recursive expansion doesn't care about the current navigation path. for (RecursivePropertyPath p : this.expandRPaths) { if (p.getFirstComponent().equals(navPropName) && p.isValidAtDepth(this.getCurrentDepth())) { return true; } } return false; } public boolean isExpanded(String navPropName) { return isExpandedExplicit(navPropName) || isExpandedRecursive(navPropName); } public void navigate(String propName) { this.currentNavPath = this.currentNavPath.addComponent(propName); } public void popPath() { this.currentNavPath = this.currentNavPath.removeLastComponent(); } public PropertyPath getCurrentNavPath() { return currentNavPath; } public int getCurrentDepth() { // depth numbered from 1 return currentNavPath.getNComponents() + 1; } public boolean isRecursive() { return selectRPaths != null || expandRPaths != null; } @Override public String toString() { return currentNavPath.toString(); } }
/* * Copyright (c) 2007-2018 Siemens AG * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ package com.siemens.ct.exi.core.datatype; import java.io.IOException; import com.siemens.ct.exi.core.exceptions.EXIException; import com.siemens.ct.exi.core.io.channel.DecoderChannel; import com.siemens.ct.exi.core.io.channel.EncoderChannel; import com.siemens.ct.exi.core.types.TypeDecoder; import com.siemens.ct.exi.core.types.TypeEncoder; import com.siemens.ct.exi.core.types.TypedTypeDecoder; import com.siemens.ct.exi.core.types.TypedTypeEncoder; import com.siemens.ct.exi.core.values.IntegerValue; import com.siemens.ct.exi.core.values.StringValue; import com.siemens.ct.exi.core.values.Value; public class NBitUnsignedIntegerCoreTest extends AbstractCoreTestCase { static final int log2CeilValues[] = new int[64]; // Cache values of log2Ceil(n) for n in 0..63 static { log2CeilValues[0] = 1; // by definition for (int j = 1; j < 64; j++) { for (int i = 31; i >= 0; i--) { if ((j >>> i) > 0) { log2CeilValues[j] = i + 1; break; } } } } /** * Returns the least number of bits that is needed to represent the int * <param>n</param>. Returns 1 if <param>n</param> is 0. * * @param n * Integer value. If <param>n</param> is negative it is * interpreted as a unsigned int. Thus, for every n < 0 we have * log2Ceil(n) = 32. * */ public static int numberOfBitsToRepresent(int n) { if (0 <= n && n < log2CeilValues.length) { return log2CeilValues[n]; } for (int i = 31; i >= 0; i--) { if ((n >>> i) > 0) { return i + 1; } } return 1; } public static int numberOfBitsToRepresent(long l) { if (l <= Integer.MAX_VALUE) { return numberOfBitsToRepresent((int) l); } else { for (int i = 63; i >= 0; i--) { if ((l >>> i) > 0) { return i + 1; } } } return 1; } public NBitUnsignedIntegerCoreTest(String testName) { super(testName); } public void testNBitUnsignedInteger0_1() throws IOException { int value = 0; int nbits = 1; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedInteger1_1() throws IOException { int value = 1; int nbits = 1; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedInteger8_4() throws IOException { int value = 8; int nbits = 4; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedInteger33_9() throws IOException { int value = 33; int nbits = 9; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedInteger78935_20() throws IOException { int value = 78935; int nbits = 20; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedInteger8448_20() throws IOException { int value = 8448; int nbits = 20; // Bit EncoderChannel bitEC = getBitEncoder(); bitEC.encodeNBitUnsignedInteger(value, nbits); bitEC.flush(); assertTrue(getBitDecoder().decodeNBitUnsignedInteger(nbits) == value); // Byte getByteEncoder().encodeNBitUnsignedInteger(value, nbits); assertTrue(getByteDecoder().decodeNBitUnsignedInteger(nbits) == value); } public void testNBitUnsignedIntegerFailureBit() throws IOException { int value = -3; int nbits = 5; try { getBitEncoder().encodeNBitUnsignedInteger(value, nbits); fail("Negative values accepted"); } catch (RuntimeException e) { // ok } } public void testNBitUnsignedIntegerFailureByte() throws IOException { int value = -3; int nbits = 5; try { getByteEncoder().encodeNBitUnsignedInteger(value, nbits); fail("Negative values accepted"); } catch (RuntimeException e) { // ok } } public void testNBitUnsignedIntegerFacet1() throws IOException, EXIException { // String schemaAsString = // "<xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'>" // + " <xs:simpleType name='NBit'>" // + " <xs:restriction base='xs:integer'>" // + " <xs:minInclusive value='2' />" // + " <xs:maxExclusive value='10'/>" // + " </xs:restriction>" // + " </xs:simpleType>" // + "</xs:schema>"; // // Datatype datatype = DatatypeMappingTest.getSimpleDatatypeFor( // schemaAsString, "NBit", ""); Datatype datatype = new NBitUnsignedIntegerDatatype( IntegerValue.parse("2"), IntegerValue.parse("10"), null); TypeEncoder typeEncoder = new TypedTypeEncoder(); // try to validate assertFalse(typeEncoder.isValid(datatype, new StringValue("12"))); } public void testNBitUnsignedIntegerFacet2() throws IOException, EXIException { // String schemaAsString = // "<xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'>" // + " <xs:simpleType name='NBit'>" // + " <xs:restriction base='xs:long'>" // + " <xs:minInclusive value='-200' />" // + " <xs:maxExclusive value='-10'/>" // + " </xs:restriction>" // + " </xs:simpleType>" // + "</xs:schema>"; // StringValue sValue = new StringValue("-12"); // // Datatype datatype = DatatypeMappingTest.getSimpleDatatypeFor( // schemaAsString, "NBit", ""); Datatype datatype = new NBitUnsignedIntegerDatatype( IntegerValue.parse("-200"), IntegerValue.parse("-10"), null); TypeDecoder typeDecoder = new TypedTypeDecoder(); TypeEncoder typeEncoder = new TypedTypeEncoder(); // write (bit & byte ) assertTrue(typeEncoder.isValid(datatype, sValue)); // bit EncoderChannel bitEC = getBitEncoder(); typeEncoder.writeValue(null, bitEC, null); bitEC.flush(); // byte typeEncoder.writeValue(null, getByteEncoder(), null); // read Value sDecoded; // bit sDecoded = typeDecoder.readValue(datatype, null, getBitDecoder(), null); assertTrue(sValue + " != " + sDecoded, sDecoded.equals(sValue)); // byte sDecoded = typeDecoder .readValue(datatype, null, getByteDecoder(), null); assertTrue(sValue + " != " + sDecoded, sDecoded.equals(sValue)); } public void testNBitUnsignedIntegerFacet3() throws IOException, EXIException { // String schemaAsString = // "<xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'>" // + " <xs:simpleType name='NBit'>" // + " <xs:restriction base='xs:int'>" // + " <xs:minInclusive value='-200' />" // + " <xs:maxExclusive value='-10'/>" // + " </xs:restriction>" // + " </xs:simpleType>" // + "</xs:schema>"; StringValue sValue = new StringValue("-12"); Datatype datatype = new NBitUnsignedIntegerDatatype( IntegerValue.parse("-200"), IntegerValue.parse("-10"), null); TypeDecoder typeDecoder = new TypedTypeDecoder(); TypeEncoder typeEncoder = new TypedTypeEncoder(); // Datatype datatype = DatatypeMappingTest.getSimpleDatatypeFor( // schemaAsString, "NBit", ""); // write (bit & byte ) assertTrue(typeEncoder.isValid(datatype, sValue)); // bit EncoderChannel bitEC = getBitEncoder(); typeEncoder.writeValue(null, bitEC, null); bitEC.flush(); // byte typeEncoder.writeValue(null, getByteEncoder(), null); // read Value sDecoded; // bit sDecoded = typeDecoder.readValue(datatype, null, getBitDecoder(), null); assertTrue(sValue + " != " + sDecoded, sDecoded.equals(sValue)); // byte sDecoded = typeDecoder .readValue(datatype, null, getByteDecoder(), null); assertTrue(sValue + " != " + sDecoded, sDecoded.equals(sValue)); } public void testNBitUnsignedIntegerSequence() throws IOException { // Bit / Byte EncoderChannel ecBit = getBitEncoder(); EncoderChannel ecByte = getByteEncoder(); for (int i = 0; i < 1000000; i++) { int value = i; int nbits = numberOfBitsToRepresent(value); ecBit.encodeNBitUnsignedInteger(value, nbits); ecByte.encodeNBitUnsignedInteger(value, nbits); } ecBit.flush(); DecoderChannel dcBit = getBitDecoder(); DecoderChannel dcByte = getByteDecoder(); for (int i = 0; i < 1000000; i++) { int value = i; int nbits = numberOfBitsToRepresent(value); assertEquals(dcBit.decodeNBitUnsignedInteger(nbits), value); assertEquals(dcByte.decodeNBitUnsignedInteger(nbits), value); } } }
/* * $Id$ * This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc * * Copyright (c) 2000-2012 Stephane GALLAND. * Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports, * Universite de Technologie de Belfort-Montbeliard. * Copyright (c) 2013-2016 The original authors, and other authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arakhne.afc.math.matrix; import org.arakhne.afc.math.continous.object3d.Quaternion; import org.arakhne.afc.math.continous.object3d.Vector3f; import org.arakhne.afc.math.generic.Tuple3D; /** A 3D transformation. * Is represented internally as a 4x4 floating point matrix. The * mathematical representation is row major, as in traditional * matrix mathematics. * * <p>The transformation matrix is: * <pre><code> * | r11 | r12 | r13 | Tx | * | r21 | r22 | r23 | Ty | * | r31 | r32 | r33 | Tz | * | 0 | 0 | 0 | 1 | * </code></pre> * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @deprecated Replacement will be provided in Version 14.0 */ @Deprecated @SuppressWarnings("all") public class Transform3D extends Matrix4d { private static final long serialVersionUID = -8427812783666663224L; /** This is the identifity transformation. */ public static final Transform3D IDENTITY = new Transform3D(); /** * Constructs a new Transform3D object and sets it to the identity transformation. */ public Transform3D() { setIdentity(); } /** * Constructs and initializes a Matrix4f from the specified nine values. * * @param m00 * the [0][0] element * @param m01 * the [0][1] element * @param m02 * the [0][2] element * @param m03 * the [0][3] element * @param m10 * the [1][0] element * @param m11 * the [1][1] element * @param m12 * the [1][2] element * @param m13 * the [1][3] element * @param m20 * the [2][0] element * @param m21 * the [2][1] element * @param m22 * the [2][2] element * @param m23 * the [2][3] element */ public Transform3D(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23) { super(m00, m01, m02, m03, m10, m11, m12, m13, m20, m21, m22, m23, 0f, 0f, 0f, 1f); } /** * Constructs a new Transform3D object and initializes it from the * specified transform. * * @param t */ public Transform3D(Transform3D t) { super(t); } /** * @param m */ public Transform3D(Matrix4d m) { super(m); } @Override public Transform3D clone() { return (Transform3D)super.clone(); } /** Set the position. * <p> * This function changes only the elements of * the matrix related to the translation. * The scaling and the shearing are not changed. * <p> * After a call to this function, the matrix will * contains (? means any value): * <pre> * [ ? ? x ] * [ ? ? y ] * [ ? ? z ] * [ ? ? ? ] * </pre> * * @param x * @param y * @param z * @see #makeTranslationMatrix(float, float, float) */ public void setTranslation(float x, float y, float z) { this.m03 = x; this.m13 = y; this.m23 = z; } /** Set the position. * <p> * This function changes only the elements of * the matrix related to the translation. * The scaling and the shearing are not changed. * <p> * After a call to this function, the matrix will * contains (? means any value): * <pre> * [ ? ? t.x ] * [ ? ? t.y ] * [ ? ? t.z ] * [ ? ? ? ] * </pre> * * @param t * @see #makeTranslationMatrix(float, float, float) */ public void setTranslation(Tuple3D<?> t) { this.m03 = t.getX(); this.m13 = t.getY(); this.m23 = t.getZ(); } /** Translate the position. * <p> * This function is equivalent to: * <pre> * this = this * [ 0 0 0 dx ] * [ 0 0 0 dy ] * [ 0 0 0 dz ] * [ 0 0 0 1 ] * </pre> * * @param dx * @param dy * @param dz */ public void translate(float dx, float dy, float dz) { this.m03 += dx; this.m13 += dy; this.m23 += dz; } /** Translate the position. * <p> * This function is equivalent to: * <pre> * this = this * [ 0 0 0 t.x ] * [ 0 0 0 t.y ] * [ 0 0 0 t.z ] * [ 0 0 0 1 ] * </pre> * * @param t */ public void translate(Vector3f t) { this.m03 += t.getX(); this.m13 += t.getY(); this.m23 += t.getZ(); } /** Replies the X translation. * * @return the amount */ public float getTranslationX() { return (float)this.m03; } /** Replies the Y translation. * * @return the amount */ public float getTranslationY() { return (float)this.m13; } /** Replies the Z translation. * * @return the amount */ public float getTranslationZ() { return (float)this.m23; } /** Replies the translation. * * @return the amount */ public Vector3f getTranslation() { return new Vector3f(this.m03, this.m13, this.m23); } /** * Replies the rotation for the object. * * @return the amount */ public Quaternion getRotation() { Quaternion q = new Quaternion(); q.setFromMatrix(this); return q; } /** * Set the rotation for the object but do not change the translation. * <p> * This function changes only the elements of * the matrix related to the rotation. * The translation is not changed. * <p> * After a call to this function, the matrix will * contains (? means any value, and r is the translation * of the quaternion as a 3x3 matrix): * <pre> * [ r r r ? ] * [ r r r ? ] * [ r r r ? ] * [ ? ? ? ? ] * </pre> * * @param rotation * @see #makeRotationMatrix(Quaternion) */ public void setRotation(Quaternion rotation) { this.m00 = (1.0f - 2.0f*rotation.getY()*rotation.getY() - 2.0f*rotation.getZ()*rotation.getZ()); this.m10 = (2.0f*(rotation.getX()*rotation.getY() + rotation.getW()*rotation.getZ())); this.m20 = (2.0f*(rotation.getX()*rotation.getZ() - rotation.getW()*rotation.getY())); this.m01 = (2.0f*(rotation.getX()*rotation.getY() - rotation.getW()*rotation.getZ())); this.m11 = (1.0f - 2.0f*rotation.getX()*rotation.getX() - 2.0f*rotation.getZ()*rotation.getZ()); this.m21 = (2.0f*(rotation.getY()*rotation.getZ() + rotation.getW()*rotation.getX())); this.m02 = (2.0f*(rotation.getX()*rotation.getZ() + rotation.getW()*rotation.getY())); this.m12 = (2.0f*(rotation.getY()*rotation.getZ() - rotation.getW()*rotation.getX())); this.m22 = (1.0f - 2.0f*rotation.getX()*rotation.getX() - 2.0f*rotation.getY()*rotation.getY()); } /** * Rotate the object. * <p> * This function is equivalent to (where r is the translation * of the quaternion as a 3x3 matrix): * <pre> * this = this * [ r r r 0 ] * [ r r r 0 ] * [ r r r 0 ] * [ 0 0 0 1 ] * </pre> * * @param rotation */ public void rotate(Quaternion rotation) { Transform3D m = new Transform3D(); m.makeRotationMatrix(rotation); mul(m); } /** * Sets the value of this matrix to a rotation matrix, and no translation. * <p> * This function changes all the elements of * the matrix, including the translation. * <p> * After a call to this function, the matrix will * contains (? means any value, and r a value from * the quaternion): * <pre> * [ r r r 0 ] * [ r r r 0 ] * [ r r r 0 ] * [ 0 0 0 1 ] * </pre> * * @param rotation * @see #setRotation(Quaternion) */ public final void makeRotationMatrix(Quaternion rotation) { this.m00 = (1.0f - 2.0f*rotation.getY()*rotation.getY() - 2.0f*rotation.getZ()*rotation.getZ()); this.m10 = (2.0f*(rotation.getX()*rotation.getY() + rotation.getW()*rotation.getZ())); this.m20 = (2.0f*(rotation.getX()*rotation.getZ() - rotation.getW()*rotation.getY())); this.m01 = (2.0f*(rotation.getX()*rotation.getY() - rotation.getW()*rotation.getZ())); this.m11 = (1.0f - 2.0f*rotation.getX()*rotation.getX() - 2.0f*rotation.getZ()*rotation.getZ()); this.m21 = (2.0f*(rotation.getY()*rotation.getZ() + rotation.getW()*rotation.getX())); this.m02 = (2.0f*(rotation.getX()*rotation.getZ() + rotation.getW()*rotation.getY())); this.m12 = (2.0f*(rotation.getY()*rotation.getZ() - rotation.getW()*rotation.getX())); this.m22 = (1.0f - 2.0f*rotation.getX()*rotation.getX() - 2.0f*rotation.getY()*rotation.getY()); this.m03 = (float) 0.0; this.m13 = (float) 0.0; this.m23 = (float) 0.0; this.m30 = (float) 0.0; this.m31 = (float) 0.0; this.m32 = (float) 0.0; this.m33 = (float) 1.0; } /** * Sets the value of this matrix to the given translation, without rotation. * <p> * This function changes all the elements of * the matrix including the scaling and the shearing. * <p> * After a call to this function, the matrix will * contains (? means any value): * <pre> * [ 1 0 0 dx ] * [ 0 1 0 dy ] * [ 0 0 1 dz ] * [ 0 0 0 1 ] * </pre> * * @param dx is the position to put in the matrix. * @param dy is the position to put in the matrix. * @param dz is the position to put in the matrix. * @see #setTranslation(float, float, float) * @see #setTranslation(Tuple3D) */ public final void makeTranslationMatrix(float dx, float dy, float dz) { this.m00 = 1f; this.m01 = 0f; this.m02 = 0f; this.m03 = dx; this.m10 = 0f; this.m11 = 1f; this.m12 = 0f; this.m13 = dy; this.m20 = 0f; this.m21 = 0f; this.m22 = 1f; this.m23 = dz; this.m30 = 0f; this.m31 = 0f; this.m32 = 0f; this.m33 = 1f; } /** * Multiply this matrix by the tuple t and place the result back into the * tuple (t = this*t). * * @param t * the tuple to be multiplied by this matrix and then replaced */ public void transform(Tuple3D<?> t) { float x, y, z; x = (float)(this.m00 * t.getX() + this.m01 * t.getY() + this.m02 * t.getZ() + this.m03); y = (float)(this.m10 * t.getX() + this.m11 * t.getY() + this.m12 * t.getZ() + this.m13); z = (float)(this.m20 * t.getX() + this.m21 * t.getY() + this.m22 * t.getZ() + this.m23); t.set(x, y, z); } /** * Multiply this matrix by the tuple t and and place the result into the * tuple "result" (result = this*t). * * @param t * the tuple to be multiplied by this matrix * @param result * the tuple into which the product is placed */ public void transform(Tuple3D<?> t, Tuple3D<?> result) { result.set( (float)(this.m00 * t.getX() + this.m01 * t.getY() + this.m02 * t.getZ() + this.m03), (float)(this.m10 * t.getX() + this.m11 * t.getY() + this.m12 * t.getZ() + this.m13), (float)(this.m20 * t.getX() + this.m21 * t.getY() + this.m22 * t.getZ() + this.m23)); } /** * Set the components of the transformation. * * @param m00 * the [0][0] element * @param m01 * the [0][1] element * @param m02 * the [0][2] element * @param m03 * the [0][3] element * @param m10 * the [1][0] element * @param m11 * the [1][1] element * @param m12 * the [1][2] element * @param m13 * the [1][3] element * @param m20 * the [2][0] element * @param m21 * the [2][1] element * @param m22 * the [2][2] element * @param m23 * the [2][3] element */ public void set(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23) { set(m00, m01, m02, m03, m10, m11, m12, m13, m20, m21, m22, m23, 0f, 0f, 0f, 1f); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.security; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeoutException; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.FakeTimer; import org.junit.Before; import org.junit.Test; import com.google.common.base.Supplier; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; public class TestGroupsCaching { public static final Log LOG = LogFactory.getLog(TestGroupsCaching.class); private static String[] myGroups = {"grp1", "grp2"}; private Configuration conf; @Before public void setup() throws IOException { FakeGroupMapping.clearAll(); ExceptionalGroupMapping.resetRequestCount(); conf = new Configuration(); conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, FakeGroupMapping.class, ShellBasedUnixGroupsMapping.class); } public static class FakeGroupMapping extends ShellBasedUnixGroupsMapping { // any to n mapping private static Set<String> allGroups = new HashSet<String>(); private static Set<String> blackList = new HashSet<String>(); private static int requestCount = 0; private static long getGroupsDelayMs = 0; private static boolean throwException; private static volatile CountDownLatch latch = null; @Override public List<String> getGroups(String user) throws IOException { LOG.info("Getting groups for " + user); delayIfNecessary(); requestCount++; if (throwException) { throw new IOException("For test"); } if (blackList.contains(user)) { return new LinkedList<String>(); } return new LinkedList<String>(allGroups); } /** * Delay returning on a latch or a specific amount of time. */ private void delayIfNecessary() { // cause current method to pause // resume until get notified if (latch != null) { try { latch.await(); return; } catch (InterruptedException e) {} } if (getGroupsDelayMs > 0) { try { Thread.sleep(getGroupsDelayMs); } catch (InterruptedException e) { throw new RuntimeException(e); } } } @Override public void cacheGroupsRefresh() throws IOException { LOG.info("Cache is being refreshed."); clearBlackList(); return; } public static void clearBlackList() throws IOException { LOG.info("Clearing the blacklist"); blackList.clear(); } public static void clearAll() throws IOException { LOG.info("Resetting FakeGroupMapping"); blackList.clear(); allGroups.clear(); requestCount = 0; getGroupsDelayMs = 0; throwException = false; latch = null; } @Override public void cacheGroupsAdd(List<String> groups) throws IOException { LOG.info("Adding " + groups + " to groups."); allGroups.addAll(groups); } public static void addToBlackList(String user) throws IOException { LOG.info("Adding " + user + " to the blacklist"); blackList.add(user); } public static int getRequestCount() { return requestCount; } public static void resetRequestCount() { requestCount = 0; } public static void setGetGroupsDelayMs(long delayMs) { getGroupsDelayMs = delayMs; } public static void setThrowException(boolean throwIfTrue) { throwException = throwIfTrue; } /** * Hold on returning the group names unless being notified, * ensure this method is called before {@link #getGroups(String)}. * Call {@link #resume()} will resume the process. */ public static void pause() { // Set a static latch, multiple background refresh threads // share this instance. So when await is called, all the // threads will pause until the it decreases the count of // the latch. latch = new CountDownLatch(1); } /** * Resume the background refresh thread and return the value * of group names. */ public static void resume() { // if latch is null, it means pause was not called and it is // safe to ignore. if (latch != null) { latch.countDown(); } } } public static class ExceptionalGroupMapping extends ShellBasedUnixGroupsMapping { private static int requestCount = 0; @Override public List<String> getGroups(String user) throws IOException { requestCount++; throw new IOException("For test"); } public static int getRequestCount() { return requestCount; } public static void resetRequestCount() { requestCount = 0; } } @Test public void testGroupsCaching() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.addToBlackList("user1"); // regular entry assertTrue(groups.getGroups("me").size() == 2); // this must be cached. blacklisting should have no effect. FakeGroupMapping.addToBlackList("me"); assertTrue(groups.getGroups("me").size() == 2); // ask for a negative entry try { LOG.error("We are not supposed to get here." + groups.getGroups("user1").toString()); fail(); } catch (IOException ioe) { if(!ioe.getMessage().startsWith("No groups found")) { LOG.error("Got unexpected exception: " + ioe.getMessage()); fail(); } } // this shouldn't be cached. remove from the black list and retry. FakeGroupMapping.clearBlackList(); assertTrue(groups.getGroups("user1").size() == 2); } public static class FakeunPrivilegedGroupMapping extends FakeGroupMapping { private static boolean invoked = false; @Override public List<String> getGroups(String user) throws IOException { invoked = true; return super.getGroups(user); } } /* * Group lookup should not happen for static users */ @Test public void testGroupLookupForStaticUsers() throws Exception { conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, FakeunPrivilegedGroupMapping.class, ShellBasedUnixGroupsMapping.class); conf.set(CommonConfigurationKeys.HADOOP_USER_GROUP_STATIC_OVERRIDES, "me=;user1=group1;user2=group1,group2"); Groups groups = new Groups(conf); List<String> userGroups = groups.getGroups("me"); assertTrue("non-empty groups for static user", userGroups.isEmpty()); assertFalse("group lookup done for static user", FakeunPrivilegedGroupMapping.invoked); List<String> expected = new ArrayList<String>(); expected.add("group1"); FakeunPrivilegedGroupMapping.invoked = false; userGroups = groups.getGroups("user1"); assertTrue("groups not correct", expected.equals(userGroups)); assertFalse("group lookup done for unprivileged user", FakeunPrivilegedGroupMapping.invoked); expected.add("group2"); FakeunPrivilegedGroupMapping.invoked = false; userGroups = groups.getGroups("user2"); assertTrue("groups not correct", expected.equals(userGroups)); assertFalse("group lookup done for unprivileged user", FakeunPrivilegedGroupMapping.invoked); } @Test public void testNegativeGroupCaching() throws Exception { final String user = "negcache"; final String failMessage = "Did not throw IOException: "; conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2); FakeTimer timer = new FakeTimer(); Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.addToBlackList(user); // In the first attempt, the user will be put in the negative cache. try { groups.getGroups(user); fail(failMessage + "Failed to obtain groups from FakeGroupMapping."); } catch (IOException e) { // Expects to raise exception for the first time. But the user will be // put into the negative cache GenericTestUtils.assertExceptionContains("No groups found for user", e); } // The second time, the user is in the negative cache. try { groups.getGroups(user); fail(failMessage + "The user is in the negative cache."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Brings back the backend user-group mapping service. FakeGroupMapping.clearBlackList(); // It should still get groups from the negative cache. try { groups.getGroups(user); fail(failMessage + "The user is still in the negative cache, even " + "FakeGroupMapping has resumed."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Let the elements in the negative cache expire. timer.advance(4 * 1000); // The groups for the user is expired in the negative cache, a new copy of // groups for the user is fetched. assertEquals(Arrays.asList(myGroups), groups.getGroups(user)); } @Test public void testCachePreventsImplRequest() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); assertEquals(0, FakeGroupMapping.getRequestCount()); // First call hits the wire assertTrue(groups.getGroups("me").size() == 2); assertEquals(1, FakeGroupMapping.getRequestCount()); // Second count hits cache assertTrue(groups.getGroups("me").size() == 2); assertEquals(1, FakeGroupMapping.getRequestCount()); } @Test public void testExceptionsFromImplNotCachedInNegativeCache() { conf.setClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, ExceptionalGroupMapping.class, ShellBasedUnixGroupsMapping.class); conf.setLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 10000); Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); assertEquals(0, ExceptionalGroupMapping.getRequestCount()); // First call should hit the wire try { groups.getGroups("anything"); fail("Should have thrown"); } catch (IOException e) { // okay } assertEquals(1, ExceptionalGroupMapping.getRequestCount()); // Second call should hit the wire (no negative caching) try { groups.getGroups("anything"); fail("Should have thrown"); } catch (IOException e) { // okay } assertEquals(2, ExceptionalGroupMapping.getRequestCount()); } @Test public void testOnlyOneRequestWhenNoEntryIsCached() throws Exception { // Disable negative cache. conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0); final Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.setGetGroupsDelayMs(100); ArrayList<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < 10; i++) { threads.add(new Thread() { public void run() { try { assertEquals(2, groups.getGroups("me").size()); } catch (IOException e) { fail("Should not happen"); } } }); } // We start a bunch of threads who all see no cached value for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } // But only one thread should have made the request assertEquals(1, FakeGroupMapping.getRequestCount()); } @Test public void testOnlyOneRequestWhenExpiredEntryExists() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.setGetGroupsDelayMs(100); // We make an initial request to populate the cache groups.getGroups("me"); int startingRequestCount = FakeGroupMapping.getRequestCount(); // Then expire that entry timer.advance(400 * 1000); Thread.sleep(100); ArrayList<Thread> threads = new ArrayList<Thread>(); for (int i = 0; i < 10; i++) { threads.add(new Thread() { public void run() { try { assertEquals(2, groups.getGroups("me").size()); } catch (IOException e) { fail("Should not happen"); } } }); } // We start a bunch of threads who all see the cached value for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } // Only one extra request is made assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testThreadNotBlockedWhenExpiredEntryExistsWithBackgroundRefresh() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, true); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an initial request to populate the cache groups.getGroups("me"); // Further lookups will have a delay FakeGroupMapping.setGetGroupsDelayMs(100); // add another groups groups.cacheGroupsAdd(Arrays.asList("grp3")); int startingRequestCount = FakeGroupMapping.getRequestCount(); // Then expire that entry timer.advance(4 * 1000); // Now get the cache entry - it should return immediately // with the old value and the cache will not have completed // a request to getGroups yet. assertEquals(groups.getGroups("me").size(), 2); assertEquals(startingRequestCount, FakeGroupMapping.getRequestCount()); // Now sleep for over the delay time and the request count should // have completed Thread.sleep(110); assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); // Another call to get groups should give 3 groups instead of 2 assertEquals(groups.getGroups("me").size(), 3); } @Test public void testThreadBlockedWhenExpiredEntryExistsWithoutBackgroundRefresh() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, false); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an initial request to populate the cache groups.getGroups("me"); // Further lookups will have a delay FakeGroupMapping.setGetGroupsDelayMs(100); // add another group groups.cacheGroupsAdd(Arrays.asList("grp3")); int startingRequestCount = FakeGroupMapping.getRequestCount(); // Then expire that entry timer.advance(4 * 1000); // Now get the cache entry - it should block and return the new // 3 group value assertEquals(groups.getGroups("me").size(), 3); assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testExceptionOnBackgroundRefreshHandled() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, true); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an initial request to populate the cache groups.getGroups("me"); // add another group groups.cacheGroupsAdd(Arrays.asList("grp3")); int startingRequestCount = FakeGroupMapping.getRequestCount(); // Arrange for an exception to occur only on the // second call FakeGroupMapping.setThrowException(true); // Then expire that entry timer.advance(4 * 1000); // Now get the cache entry - it should return immediately // with the old value and the cache will not have completed // a request to getGroups yet. assertEquals(groups.getGroups("me").size(), 2); assertEquals(startingRequestCount, FakeGroupMapping.getRequestCount()); // Now sleep for a short time and re-check the request count. It should have // increased, but the exception means the cache will not have updated Thread.sleep(50); FakeGroupMapping.setThrowException(false); assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); assertEquals(groups.getGroups("me").size(), 2); // Now sleep another short time - the 3rd call to getGroups above // will have kicked off another refresh that updates the cache Thread.sleep(50); assertEquals(startingRequestCount + 2, FakeGroupMapping.getRequestCount()); assertEquals(groups.getGroups("me").size(), 3); } @Test public void testEntriesExpireIfBackgroundRefreshFails() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, true); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an initial request to populate the cache groups.getGroups("me"); // Now make all calls to the FakeGroupMapper throw exceptions FakeGroupMapping.setThrowException(true); // The cache entry expires for refresh after 1 second // It expires for eviction after 1 * 10 seconds after it was last written // So if we call getGroups repeatedly over 9 seconds, 9 refreshes should // be triggered which will fail to update the key, but the keys old value // will be retrievable until it is evicted after about 10 seconds. for(int i=0; i<9; i++) { assertEquals(groups.getGroups("me").size(), 2); timer.advance(1 * 1000); } // Wait until the 11th second. The call to getGroups should throw // an exception as the key will have been evicted and FakeGroupMapping // will throw IO Exception when it is asked for new groups. In this case // load must be called synchronously as there is no key present timer.advance(2 * 1000); try { groups.getGroups("me"); fail("Should have thrown an exception here"); } catch (Exception e) { // pass } // Finally check groups are retrieve again after FakeGroupMapping // stops throw exceptions FakeGroupMapping.setThrowException(false); assertEquals(groups.getGroups("me").size(), 2); } @Test public void testBackgroundRefreshCounters() throws IOException, InterruptedException { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, true); conf.setInt( CommonConfigurationKeys. HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD_THREADS, 2); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // populate the cache String[] grps = {"one", "two", "three", "four", "five"}; for (String g: grps) { groups.getGroups(g); } // expire the cache timer.advance(2*1000); FakeGroupMapping.pause(); // Request all groups again, as there are 2 threads to process them // 3 should get queued and 2 should be running for (String g: grps) { groups.getGroups(g); } waitForGroupCounters(groups, 3, 2, 0, 0); FakeGroupMapping.resume(); // Once resumed, all results should be returned immediately waitForGroupCounters(groups, 0, 0, 5, 0); // Now run again, this time throwing exceptions but no delay timer.advance(2*1000); FakeGroupMapping.setGetGroupsDelayMs(0); FakeGroupMapping.setThrowException(true); for (String g: grps) { groups.getGroups(g); } waitForGroupCounters(groups, 0, 0, 5, 5); } private void waitForGroupCounters(final Groups groups, long expectedQueued, long expectedRunning, long expectedSuccess, long expectedExpection) throws InterruptedException { final long[] expected = {expectedQueued, expectedRunning, expectedSuccess, expectedExpection}; final long[] actual = new long[expected.length]; // wait for a certain time until the counters reach // to expected values. Check values in 20 ms interval. try { GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { actual[0] = groups.getBackgroundRefreshQueued(); actual[1] = groups.getBackgroundRefreshRunning(); actual[2] = groups.getBackgroundRefreshSuccess(); actual[3] = groups.getBackgroundRefreshException(); return Arrays.equals(actual, expected); } }, 20, 1000); } catch (TimeoutException e) { fail("Excepted group counter values are not reached in given time," + " expecting (Queued, Running, Success, Exception) : " + Arrays.toString(expected) + " but actual : " + Arrays.toString(actual)); } } @Test public void testExceptionCallingLoadWithoutBackgroundRefreshReturnsOldValue() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_BACKGROUND_RELOAD, false); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // First populate the cash assertEquals(groups.getGroups("me").size(), 2); // Advance the timer so a refresh is required timer.advance(2 * 1000); // This call should throw an exception FakeGroupMapping.setThrowException(true); assertEquals(groups.getGroups("me").size(), 2); } @Test public void testCacheEntriesExpire() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 1); FakeTimer timer = new FakeTimer(); final Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); // We make an entry groups.getGroups("me"); int startingRequestCount = FakeGroupMapping.getRequestCount(); timer.advance(20 * 1000); // Cache entry has expired so it results in a new fetch groups.getGroups("me"); assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testNegativeCacheClearedOnRefresh() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 100); final Groups groups = new Groups(conf); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); FakeGroupMapping.clearBlackList(); FakeGroupMapping.addToBlackList("dne"); try { groups.getGroups("dne"); fail("Should have failed to find this group"); } catch (IOException e) { // pass } int startingRequestCount = FakeGroupMapping.getRequestCount(); groups.refresh(); FakeGroupMapping.addToBlackList("dne"); try { List<String> g = groups.getGroups("dne"); fail("Should have failed to find this group"); } catch (IOException e) { // pass } assertEquals(startingRequestCount + 1, FakeGroupMapping.getRequestCount()); } @Test public void testNegativeCacheEntriesExpire() throws Exception { conf.setLong( CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2); FakeTimer timer = new FakeTimer(); // Ensure that stale entries are removed from negative cache every 2 seconds Groups groups = new Groups(conf, timer); groups.cacheGroupsAdd(Arrays.asList(myGroups)); groups.refresh(); // Add both these users to blacklist so that they // can be added to negative cache FakeGroupMapping.addToBlackList("user1"); FakeGroupMapping.addToBlackList("user2"); // Put user1 in negative cache. try { groups.getGroups("user1"); fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Check if user1 exists in negative cache assertTrue(groups.getNegativeCache().contains("user1")); // Advance fake timer timer.advance(1000); // Put user2 in negative cache try { groups.getGroups("user2"); fail("Did not throw IOException : Failed to obtain groups" + " from FakeGroupMapping."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("No groups found for user", e); } // Check if user2 exists in negative cache assertTrue(groups.getNegativeCache().contains("user2")); // Advance timer. Only user2 should be present in negative cache. timer.advance(1100); assertFalse(groups.getNegativeCache().contains("user1")); assertTrue(groups.getNegativeCache().contains("user2")); // Advance timer. Even user2 should not be present in negative cache. timer.advance(1000); assertFalse(groups.getNegativeCache().contains("user2")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Vladimir N. Molotkov * @version $Revision$ */ package org.apache.harmony.security.tests.java.security.cert; import java.security.InvalidAlgorithmParameterException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertPathParameters; import java.security.cert.CertStore; import java.security.cert.CollectionCertStoreParameters; import java.security.cert.PKIXCertPathChecker; import java.security.cert.PKIXParameters; import java.security.cert.X509CertSelector; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.harmony.security.tests.support.cert.TestUtils; import junit.framework.TestCase; /** * Tests for <code>PKIXParameters</code> fields and methods * */ public class PKIXParameters_ImplTest extends TestCase { /** * Test #1 for <code>PKIXParameters(KeyStore)</code> constructor<br> * Assertion: Creates an instance of <code>PKIXParameters</code> * that populates the set of most-trusted CAs from the trusted * certificate entries contained in the specified <code>KeyStore</code> * @throws InvalidAlgorithmParameterException * @throws KeyStoreException */ public final void testPKIXParametersKeyStore01() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } // use valid parameter - KeyStore containing // only trusted X.509 certificates CertPathParameters cpp = new PKIXParameters(ks); assertTrue(cpp instanceof PKIXParameters); } /** * Test #2 for <code>PKIXParameters(KeyStore)</code> constructor<br> * Assertion: Only keystore entries that contain trusted * <code>X509Certificates</code> are considered; all other * certificate types are ignored * @throws InvalidAlgorithmParameterException * @throws KeyStoreException */ public final void testPKIXParametersKeyStore02() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED_AND_UNTRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } // use valid parameter - KeyStore containing // both trusted and untrusted X.509 certificates PKIXParameters cpp = new PKIXParameters(ks); assertEquals("size", 1, cpp.getTrustAnchors().size()); } /** * Test #4 for <code>PKIXParameters(KeyStore)</code> constructor<br> * Assertion: <code>KeyStoreException</code> - * if the <code>keystore</code> has not been initialized */ public final void testPKIXParametersKeyStore04() throws Exception { KeyStore ks = TestUtils.getKeyStore(false, 0); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } try { // pass not initialized KeyStore new PKIXParameters(ks); fail("KeyStoreException expected"); } catch (KeyStoreException e) { } } /** * Test #5 for <code>PKIXParameters(KeyStore)</code> constructor<br> * Assertion: <code>InvalidAlgorithmParameterException</code> - * if the <code>keystore</code> does not contain at least one * trusted certificate entry */ public final void testPKIXParametersKeyStore05() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.UNTRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } try { // pass KeyStore that does not contain trusted certificates new PKIXParameters(ks); fail("InvalidAlgorithmParameterException expected"); } catch (InvalidAlgorithmParameterException e) { } } /** * Test #5 for <code>setTrustAnchors(Set)</code> method<br> * Assertion: <code>Set</code> is copied to protect against * subsequent modifications * @throws InvalidAlgorithmParameterException * @throws KeyStoreException */ public final void testSetTrustAnchors05() throws Exception { // use several trusted certs in this test KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } PKIXParameters p = new PKIXParameters(ks); // prepare new Set HashSet newSet = new HashSet(p.getTrustAnchors()); HashSet newSetCopy = (HashSet)newSet.clone(); // set new Set p.setTrustAnchors(newSetCopy); // modify set - remove one element assertTrue("modified", newSetCopy.remove(newSetCopy.iterator().next())); // check that set maintained internally has // not been changed by the above modification assertEquals("isCopied", newSet, p.getTrustAnchors()); } /** * Test #1 for <code>clone()</code> method<br> * Assertion: Makes a copy of this <code>PKIXParameters</code> object * @throws KeyStoreException * @throws InvalidAlgorithmParameterException * @throws NoSuchAlgorithmException */ public final void testClone01() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } PKIXParameters p1 = new PKIXParameters(ks); // set to some non-default values p1.setPolicyQualifiersRejected(false); p1.setAnyPolicyInhibited(true); p1.setExplicitPolicyRequired(true); p1.setPolicyMappingInhibited(true); p1.setRevocationEnabled(false); String sigProviderName = "Some Provider"; p1.setSigProvider(sigProviderName); X509CertSelector x509cs = new X509CertSelector(); p1.setTargetCertConstraints(x509cs); p1.setCertStores(TestUtils.getCollectionCertStoresList()); PKIXCertPathChecker cpc = TestUtils.getTestCertPathChecker(); List l = new ArrayList(); assertTrue("addedOk", l.add(cpc)); p1.setCertPathCheckers(l); p1.setDate(new Date(555L)); Set s = new HashSet(); s.add("1.2.3.4.5.6.7"); s.add("1.2.3.4.5.6.8"); p1.setInitialPolicies(s); // TrustAnchors already set PKIXParameters p2 = (PKIXParameters)p1.clone(); // check that objects match assertEquals("check1", p1.getPolicyQualifiersRejected(), p2.getPolicyQualifiersRejected()); assertEquals("check2", p1.isAnyPolicyInhibited(), p2.isAnyPolicyInhibited()); assertEquals("check3", p1.isExplicitPolicyRequired(), p2.isExplicitPolicyRequired()); assertEquals("check4", p1.isPolicyMappingInhibited(), p2.isPolicyMappingInhibited()); assertEquals("check5", p1.isRevocationEnabled(), p2.isRevocationEnabled()); assertEquals("check6", p1.getSigProvider(), p2.getSigProvider()); // just check that not null assertNotNull("check7", p2.getTargetCertConstraints()); assertEquals("check8", p1.getCertStores(), p2.getCertStores()); // just check that not empty assertFalse("check9", p2.getCertPathCheckers().isEmpty()); assertEquals("check10", p1.getDate(), p2.getDate()); assertEquals("check11", p1.getInitialPolicies(), p2.getInitialPolicies()); assertEquals("check12", p1.getTrustAnchors(), p2.getTrustAnchors()); } /** * Test #2 for <code>clone()</code> method<br> * Assertion: Changes to the copy will not affect * the original and vice versa * @throws KeyStoreException * @throws InvalidAlgorithmParameterException * @throws NoSuchAlgorithmException */ public final void testClone02() throws Exception { PKIXParameters[] p = new PKIXParameters[2]; KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } for (int i = 0; i<p.length; i++) { p[i] = new PKIXParameters(ks); p[i].setCertStores(TestUtils.getCollectionCertStoresList()); PKIXCertPathChecker cpc = TestUtils.getTestCertPathChecker(); List l = new ArrayList(); assertTrue("addedOk", l.add(cpc)); p[i].setCertPathCheckers(l); p[i].setDate(new Date(555L)); p[(i == 0 ? 1 : 0)] = (PKIXParameters)p[i].clone(); // modify the first object (original or copy) p[1].addCertStore(CertStore.getInstance("Collection", new CollectionCertStoreParameters())); p[1].addCertPathChecker(TestUtils.getTestCertPathChecker()); // check that the second object has not been affected by // above modification assertTrue("certStores["+i+"]", p[0].getCertStores().size() == 1); assertTrue("certPathCheckers["+i+"]", p[0].getCertPathCheckers().size() == 1); } } /** * Test for <code>toString()</code> method<br> * Assertion: Returns a formatted string describing the parameters * @throws InvalidAlgorithmParameterException * @throws KeyStoreException */ public final void testToString() throws Exception { KeyStore ks = TestUtils.getKeyStore(true, TestUtils.TRUSTED_AND_UNTRUSTED); if (ks == null) { fail(getName() + ": not performed (could not create test KeyStore)"); } PKIXParameters p = new PKIXParameters(ks); assertNotNull(p.toString()); } }
/* * Copyright 2006-2011 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.mail; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import mocks.MockEmailNotificationService; import mocks.MockEmailNotificationServiceImpl; import org.junit.Test; import org.kuali.rice.core.api.config.property.ConfigContext; import org.kuali.rice.kew.api.WorkflowDocument; import org.kuali.rice.kew.api.WorkflowDocumentFactory; import org.kuali.rice.kew.api.action.ActionRequestType; import org.kuali.rice.kew.preferences.Preferences; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.test.KEWTestCase; import org.kuali.rice.kew.util.KEWConstants; public class EmailReminderLifecycleTest extends KEWTestCase { private static final String DEFAULT_EMAIL_CRON_WEEKLY = "0 0 2 ? * 2"; private static final String DEFAULT_EMAIL_CRON_DAILY = "0 0 1 * * ?"; private EmailReminderLifecycle emailReminderLifecycle; /** * This method used to reset email sending to false for both daily and weekly reminders * * @see org.kuali.rice.test.RiceTestCase#tearDown() */ @Override public void tearDown() throws Exception { ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.DAILY_EMAIL_ACTIVE, "false"); ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.WEEKLY_EMAIL_ACTIVE, "false"); super.tearDown(); } @Test public void testDailyEmails() throws Exception { // fire daily every 2 seconds ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.DAILY_EMAIL_CRON_EXPRESSION, "0/2 * * * * ?"); // turn daily on and weekly off ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.DAILY_EMAIL_ACTIVE, "true"); ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.WEEKLY_EMAIL_ACTIVE, "false"); String ewestfalPrincipalId = getPrincipalIdForName("ewestfal"); String rkirkendPrincipalId = getPrincipalIdForName("rkirkend"); // setup ewestfal to recieve daily emails Preferences prefs = KEWServiceLocator.getPreferencesService().getPreferences(ewestfalPrincipalId); prefs.setEmailNotification(KEWConstants.DAILY); KEWServiceLocator.getPreferencesService().savePreferences(ewestfalPrincipalId, prefs); WorkflowDocument document = WorkflowDocumentFactory.createDocument(rkirkendPrincipalId, "TestDocumentType"); document.adHocToPrincipal(ActionRequestType.APPROVE, "", ewestfalPrincipalId, "", Boolean.TRUE); document.route(""); document = WorkflowDocumentFactory.loadDocument(ewestfalPrincipalId, document.getDocumentId()); assertTrue(document.isApprovalRequested()); int emailsSent = getMockEmailService().immediateReminderEmailsSent("ewestfal", document.getDocumentId(), KEWConstants.ACTION_REQUEST_APPROVE_REQ); assertEquals("ewestfal should have no emails.", 0, emailsSent); MockEmailNotificationServiceImpl.SEND_DAILY_REMINDER_CALLED = false; MockEmailNotificationServiceImpl.SEND_WEEKLY_REMINDER_CALLED = false; // let's fire up the lifecycle emailReminderLifecycle = new EmailReminderLifecycle(); emailReminderLifecycle.start(); // sleep for 10 seconds Thread.sleep(10000); // send daily reminder should have now been called assertTrue("daily reminder should have been called.", MockEmailNotificationServiceImpl.SEND_DAILY_REMINDER_CALLED); assertFalse("weekly reminder should NOT have been called.", MockEmailNotificationServiceImpl.SEND_WEEKLY_REMINDER_CALLED); emailReminderLifecycle.stop(); // setting cron to empty so job will cease ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.DAILY_EMAIL_CRON_EXPRESSION, DEFAULT_EMAIL_CRON_DAILY); // try restarting to verify rescheduling of tasks emailReminderLifecycle.start(); emailReminderLifecycle.stop(); } @Test public void testWeeklyEmails() throws Exception { // fire daily every 2 seconds ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.WEEKLY_EMAIL_CRON_EXPRESSION, "0/2 * * * * ?"); // turn weekly on and daily off ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.WEEKLY_EMAIL_ACTIVE, "true"); ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.DAILY_EMAIL_ACTIVE, "false"); String ewestfalPrincipalId = getPrincipalIdForName("ewestfal"); String rkirkendPrincipalId = getPrincipalIdForName("rkirkend"); // setup ewestfal to recieve weekly emails Preferences prefs = KEWServiceLocator.getPreferencesService().getPreferences(ewestfalPrincipalId); prefs.setEmailNotification(KEWConstants.WEEKLY); KEWServiceLocator.getPreferencesService().savePreferences(ewestfalPrincipalId, prefs); WorkflowDocument document = WorkflowDocumentFactory.createDocument(rkirkendPrincipalId, "TestDocumentType"); document.adHocToPrincipal(ActionRequestType.APPROVE, "", ewestfalPrincipalId, "", Boolean.TRUE); document.route(""); document = WorkflowDocumentFactory.loadDocument(ewestfalPrincipalId, document.getDocumentId()); assertTrue(document.isApprovalRequested()); int emailsSent = getMockEmailService().immediateReminderEmailsSent("ewestfal", document.getDocumentId(), KEWConstants.ACTION_REQUEST_APPROVE_REQ); assertEquals("ewestfal should have no emails.", 0, emailsSent); MockEmailNotificationServiceImpl.SEND_DAILY_REMINDER_CALLED = false; MockEmailNotificationServiceImpl.SEND_WEEKLY_REMINDER_CALLED = false; // let's fire up the lifecycle emailReminderLifecycle = new EmailReminderLifecycle(); emailReminderLifecycle.start(); // sleep for 10 seconds Thread.sleep(10000); // send weekly reminder should have now been called assertTrue("weekly reminder should have been called.", MockEmailNotificationServiceImpl.SEND_WEEKLY_REMINDER_CALLED); assertFalse("daily reminder should NOT have been called.", MockEmailNotificationServiceImpl.SEND_DAILY_REMINDER_CALLED); emailReminderLifecycle.stop(); // setting cron to empty so job will cease ConfigContext.getCurrentContextConfig().putProperty(KEWConstants.WEEKLY_EMAIL_CRON_EXPRESSION, DEFAULT_EMAIL_CRON_WEEKLY); // try restarting to verify rescheduling of tasks emailReminderLifecycle.start(); emailReminderLifecycle.stop(); } // /** // * Verify that no more messages are put in the queue if there are already weekly and daily reminders in the // * queue // * @throws Exception // */ // @Test // public void testEmailMessagesInQueue() throws Exception { // // setUpConfigForEmail(); // // PersistedMessageBO dailyMessage = getMockDailyMessage(); // PersistedMessageBO weeklyMessage = getMockWeeklyMessage(); // KEWServiceLocator.getRouteQueueService().save(dailyMessage); // KEWServiceLocator.getRouteQueueService().save(weeklyMessage); // // Collection messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 2 items present in queue", 2, messages.size()); // // emailReminderLifecycle.start(); // // messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 2 items present in queue", 2, messages.size()); // // PersistedMessageBO fetchedDaily = null; // PersistedMessageBO fetchedWeekly = null; // // for (Iterator iter = messages.iterator(); iter.hasNext();) { // PersistedMessageBO fetchedMessage = (PersistedMessageBO) iter.next(); // if (fetchedMessage.getMethodName().equals("sendDailyReminder")) { // fetchedDaily = fetchedMessage; // } else { // fetchedWeekly = fetchedMessage; // } // } // assertEquals("Daily message was re-inserted or removed when it should have been allowed to stay in queue for later processing", dailyMessage.getRouteQueueId(), fetchedDaily.getRouteQueueId()); // assertEquals("Weekly message was re-inserted or removed when it should have been allowed to stay in queue for later processing", weeklyMessage.getRouteQueueId(), fetchedWeekly.getRouteQueueId()); // assertTrue("Lifecycle should report itself as started", emailReminderLifecycle.isStarted()); // } // // /** // * If only a daily is in the queue then the other reminder should be put in the queue // * // * @throws Exception // */ // @Test public void testOnlyDailyReminderInQueue() throws Exception { // // setUpConfigForEmail(); // // PersistedMessageBO dailyMessage = getMockDailyMessage(); // KEWServiceLocator.getRouteQueueService().save(dailyMessage); // // Collection messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 1 items present in queue", 1, messages.size()); // // emailReminderLifecycle.start(); // // messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 2 items present in queue", 2, messages.size()); // // PersistedMessageBO fetchedDaily = null; // PersistedMessageBO fetchedWeekly = null; // // for (Iterator iter = messages.iterator(); iter.hasNext();) { // PersistedMessageBO fetchedMessage = (PersistedMessageBO) iter.next(); // if (fetchedMessage.getMethodName().equals("sendDailyReminder")) { // fetchedDaily = fetchedMessage; // } else { // fetchedWeekly = fetchedMessage; // } // } // assertEquals("Daily message was re-inserted or removed when it should have been allowed to stay in queue for later processing", dailyMessage.getRouteQueueId(), fetchedDaily.getRouteQueueId()); // assertTrue(fetchedWeekly != null); // assertTrue("Lifecycle should report itself as started", emailReminderLifecycle.isStarted()); // } // // /** // * If only a weekly reminder is in the queue then the other reminder should be put in the queue // * // * @throws Exception // */ // @Test public void testOnlyWeeklyReminderInQueue() throws Exception { // // setUpConfigForEmail(); // // PersistedMessageBO weeklyMessage = getMockWeeklyMessage(); // KEWServiceLocator.getRouteQueueService().save(weeklyMessage); // // Collection messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 1 items present in queue", 1, messages.size()); // // emailReminderLifecycle.start(); // // messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 2 items present in queue", 2, messages.size()); // // PersistedMessageBO fetchedDaily = null; // PersistedMessageBO fetchedWeekly = null; // // for (Iterator iter = messages.iterator(); iter.hasNext();) { // PersistedMessageBO fetchedMessage = (PersistedMessageBO) iter.next(); // if (fetchedMessage.getMethodName().equals("sendDailyReminder")) { // fetchedDaily = fetchedMessage; // } else { // fetchedWeekly = fetchedMessage; // } // } // assertEquals("Weekly message was re-inserted or removed when it should have been allowed to stay in queue for later processing", weeklyMessage.getRouteQueueId(), fetchedWeekly.getRouteQueueId()); // assertTrue("Daily message not sent", fetchedDaily != null); // assertTrue("Lifecycle should report itself as started", emailReminderLifecycle.isStarted()); // // } // // /** // * Tests that email reminder calls are sent to the queue when none are present. New messages should // * be set for the proper delay. // * // * @throws Exception // */ // @Test public void testNoEmailRemindersInQueue() throws Exception { // // setUpConfigForEmail(); // // emailReminderLifecycle.start(); // Collection messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should only be 2 items present in queue", 2, messages.size()); // PersistedMessageBO fetchedDaily = null; // PersistedMessageBO fetchedWeekly = null; // // for (Iterator iter = messages.iterator(); iter.hasNext();) { // PersistedMessageBO fetchedMessage = (PersistedMessageBO) iter.next(); // if (fetchedMessage.getMethodName().equals("sendDailyReminder")) { // fetchedDaily = fetchedMessage; // } else { // fetchedWeekly = fetchedMessage; // } // } // assertNotNull("No daily message sent", fetchedDaily); // assertNotNull("No weekly message sent", fetchedWeekly); // // assertTrue("Daily message not sent", fetchedDaily != null); // assertTrue("Weekly message not sent", fetchedWeekly != null); // assertTrue("Lifecycle should report itself as started", emailReminderLifecycle.isStarted()); // // // AsynchronousCall methodCall = (AsynchronousCall)KSBServiceLocator.getMessageHelper().deserializeObject(fetchedWeekly.getPayload()); // assertEquals("Weekly email not on a weekly delay", EmailReminderLifecycle.WEEKLY_DELAY, methodCall.getRepeatCallTimeIncrement().longValue()); // // methodCall = (AsynchronousCall)KSBServiceLocator.getMessageHelper().deserializeObject(fetchedDaily.getPayload()); // assertEquals("Weekly email not on a weekly delay", EmailReminderLifecycle.DAILY_DELAY, methodCall.getRepeatCallTimeIncrement().longValue()); // } // // /** // * the lifecycle should not blow up if this ip is the designated emailer but no email options are sent. it should // * do nothing and report started. // * // * @throws Exception // */ // @Test public void testNoEmailDatesInConfig() throws Exception { // KEWServiceLocator.getApplicationConstantsService().save(new ApplicationConstant(KEWConstants.APP_CONST_EMAIL_FIRST_SEND_IP_KEY, Utilities.getIpNumber())); // // Config config = ConfigContext.getCurrentContextConfig(); // config.getProperties().remove(Config.FIRST_DAILY_EMAIL_DELIVERY_DATE); // config.getProperties().remove(Config.FIRST_WEEKLY_EMAIL_DELIVERY_DATE); // emailReminderLifecycle.start(); // Collection messages = KEWServiceLocator.getRouteQueueService().findAll(); // assertEquals("Should not be items present in queue", 0, messages.size()); // // assertTrue("Lifecycle should report itself as started", emailReminderLifecycle.isStarted()); // emailReminderLifecycle.stop(); // assertFalse("Lifecycle should not report itself as started", emailReminderLifecycle.isStarted()); // } // // /** // * Keep the threadpool on and synchronous. Start the lifecycle and verify that // * the action list email service got called. // * @throws Exception // */ // @Test public void testActionListEmailServiceBeingCalled() throws Exception { // KEWServiceLocator.getApplicationConstantsService().save(new ApplicationConstant(KEWConstants.APP_CONST_EMAIL_FIRST_SEND_IP_KEY, Utilities.getIpNumber())); // Config config = ConfigContext.getCurrentContextConfig(); // config.overrideProperty(Config.FIRST_DAILY_EMAIL_DELIVERY_DATE, DAILY_REMINDER_DATE); // config.overrideProperty(Config.FIRST_WEEKLY_EMAIL_DELIVERY_DATE, WEEKLY_REMINDER_DATE); // emailReminderLifecycle.start(); // assertTrue("Send daily not called on email notification service", MockEmailNotificationServiceImpl.SEND_DAILY_REMINDER_CALLED); // assertTrue("Send weekly not called on email notification service", MockEmailNotificationServiceImpl.SEND_WEEKLY_REMINDER_CALLED); // } // // private void setUpConfigForEmail() throws Exception { // KEWServiceLocator.getApplicationConstantsService().save(new ApplicationConstant(KEWConstants.APP_CONST_EMAIL_FIRST_SEND_IP_KEY, Utilities.getIpNumber())); // // Config config = ConfigContext.getCurrentContextConfig(); // config.overrideProperty(Config.FIRST_DAILY_EMAIL_DELIVERY_DATE, DAILY_REMINDER_DATE); // config.overrideProperty(Config.FIRST_WEEKLY_EMAIL_DELIVERY_DATE, WEEKLY_REMINDER_DATE); // // } // // // private PersistedMessageBO getMockDailyMessage() throws Exception { // PersistedMessageBO dailyMessage = new PersistedMessageBO(); // dailyMessage.setServiceName(emailReminderLifecycle.getEmailServiceName().toString()); // dailyMessage.setMethodName("sendDailyReminder"); // dailyMessage.setQueueDate(new Timestamp(System.currentTimeMillis())); // dailyMessage.setQueuePriority(1); // dailyMessage.setQueueStatus("Q"); // dailyMessage.setRetryCount(1); // dailyMessage.setIpNumber(Utilities.getIpNumber()); // dailyMessage.setApplicationId("KEW"); // dailyMessage.setPayload(KSBServiceLocator.getMessageHelper().serializeObject("payload")); // return dailyMessage; // } // // private PersistedMessageBO getMockWeeklyMessage() throws Exception { // PersistedMessageBO weeklyMessage = new PersistedMessageBO(); // weeklyMessage.setServiceName(emailReminderLifecycle.getEmailServiceName().toString()); // weeklyMessage.setQueueDate(new Timestamp(System.currentTimeMillis())); // weeklyMessage.setMethodName("sendWeeklyReminder"); // weeklyMessage.setQueuePriority(1); // weeklyMessage.setQueueStatus("Q"); // weeklyMessage.setRetryCount(1); // weeklyMessage.setIpNumber(Utilities.getIpNumber()); // weeklyMessage.setApplicationId("KEW"); // weeklyMessage.setPayload(KSBServiceLocator.getMessageHelper().serializeObject("payload")); // return weeklyMessage; // } private MockEmailNotificationService getMockEmailService() { return (MockEmailNotificationService)KEWServiceLocator.getActionListEmailService(); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.core.config.BuckConfig; import com.facebook.buck.core.config.FakeBuckConfig; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.util.FakeProcess; import com.facebook.buck.util.FakeProcessExecutor; import com.facebook.buck.util.ProcessExecutorParams; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.util.zip.ZipCompressionLevel; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.util.Optional; import java.util.function.Supplier; import org.hamcrest.junit.ExpectedException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; public class AppleConfigTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); } @Test public void getUnspecifiedAppleDeveloperDirectorySupplier() { BuckConfig buckConfig = FakeBuckConfig.builder().build(); AppleConfig config = buckConfig.getView(AppleConfig.class); assertNotNull(config.getAppleDeveloperDirectorySupplier(new FakeProcessExecutor())); } @Test public void getExtraAppleDeveloperDirectories() { BuckConfig buckConfig = FakeBuckConfig.builder() .setSections( ImmutableMap.of( "apple", ImmutableMap.of( "extra_toolchain_paths", "/path/to/somewhere/Toolchain", "extra_platform_paths", "/path/to/somewhere/Platform"))) .build(); AppleConfig config = buckConfig.getView(AppleConfig.class); ImmutableList<Path> extraToolchainPaths = config.getExtraToolchainPaths(); ImmutableList<Path> extraPlatformPaths = config.getExtraPlatformPaths(); assertEquals(ImmutableList.of(Paths.get("/path/to/somewhere/Toolchain")), extraToolchainPaths); assertEquals(ImmutableList.of(Paths.get("/path/to/somewhere/Platform")), extraPlatformPaths); } @Test public void getXcodeSelectDetectedAppleDeveloperDirectorySupplier() { BuckConfig buckConfig = FakeBuckConfig.builder().build(); AppleConfig config = buckConfig.getView(AppleConfig.class); ProcessExecutorParams xcodeSelectParams = ProcessExecutorParams.builder() .setCommand(ImmutableList.of("xcode-select", "--print-path")) .build(); FakeProcess fakeXcodeSelect = new FakeProcess(0, "/path/to/another/place", ""); FakeProcessExecutor processExecutor = new FakeProcessExecutor(ImmutableMap.of(xcodeSelectParams, fakeXcodeSelect)); Supplier<Optional<Path>> supplier = config.getAppleDeveloperDirectorySupplier(processExecutor); assertNotNull(supplier); assertEquals(Optional.of(Paths.get("/path/to/another/place")), supplier.get()); } @Test public void packageConfigCommandWithoutExtensionShouldThrow() { AppleConfig config = FakeBuckConfig.builder() .setSections("[apple]", "iphoneos_package_command = echo") .build() .getView(AppleConfig.class); thrown.expect(HumanReadableException.class); thrown.expectMessage(containsString("be both specified, or be both omitted")); config.getPackageConfigForPlatform(ApplePlatform.IPHONEOS); } @Test public void packageConfigExtensionWithoutCommandShouldThrow() { AppleConfig config = FakeBuckConfig.builder() .setSections("[apple]", "iphoneos_package_extension = api") .build() .getView(AppleConfig.class); thrown.expect(HumanReadableException.class); thrown.expectMessage(containsString("be both specified, or be both omitted")); config.getPackageConfigForPlatform(ApplePlatform.IPHONEOS); } @Test public void packageConfigTreatsEmptyStringAsOmitted() { AppleConfig config = FakeBuckConfig.builder() .setSections("[apple]", "iphoneos_package_extension = ", "iphoneos_package_command = ") .build() .getView(AppleConfig.class); assertThat( config.getPackageConfigForPlatform(ApplePlatform.IPHONEOS), equalTo(Optional.empty())); } @Test public void packageConfigExtractsValuesFromConfig() { AppleConfig config = FakeBuckConfig.builder() .setSections( "[apple]", "iphoneos_package_extension = api", "iphoneos_package_command = echo $OUT") .build() .getView(AppleConfig.class); Optional<AppleConfig.ApplePackageConfig> packageConfig = config.getPackageConfigForPlatform(ApplePlatform.IPHONEOS); assertThat(packageConfig.get().getCommand(), equalTo("echo $OUT")); assertThat(packageConfig.get().getExtension(), equalTo("api")); } @Test public void getOverridenCompressionLevel() { BuckConfig buckConfig = FakeBuckConfig.builder() .setSections(ImmutableMap.of("apple", ImmutableMap.of("ipa_compression_level", "Max"))) .build(); AppleConfig config = buckConfig.getView(AppleConfig.class); ZipCompressionLevel compressionLevel = config.getZipCompressionLevel(); assertEquals(ZipCompressionLevel.MAX, compressionLevel); } @Test public void getDefaultCompressionLevel() { BuckConfig buckConfig = FakeBuckConfig.builder() .setSections(ImmutableMap.of("apple", ImmutableMap.of("ipa_compression_level", ""))) .build(); AppleConfig config = buckConfig.getView(AppleConfig.class); ZipCompressionLevel compressionLevel = config.getZipCompressionLevel(); assertEquals(ZipCompressionLevel.DEFAULT, compressionLevel); } @Test public void testNegativeCodesignTimout() { /* negative values should throw */ AppleConfig config = FakeBuckConfig.builder() .setSections("[apple]", "codesign_timeout = -1") .build() .getView(AppleConfig.class); HumanReadableException exception = null; try { config.getCodesignTimeout(); } catch (HumanReadableException e) { exception = e; } assertThat(exception, notNullValue()); assertThat( "Should throw exceptions for negative timeouts.", exception.getHumanReadableErrorMessage(), startsWith("negative timeout")); } @Test public void testDefaultCodesignTimeout() { /* make sure that we have a sane default of 300s when the value is not specified */ AppleConfig config = FakeBuckConfig.builder().build().getView(AppleConfig.class); assertThat(config.getCodesignTimeout(), equalTo(Duration.ofSeconds(300))); } @Test public void testShouldWorkAroundDsymutilLTOStackOverflowBug() { AppleConfig configExplicitTrue = FakeBuckConfig.builder() .setSections("[apple]", "work_around_dsymutil_lto_stack_overflow_bug = true") .build() .getView(AppleConfig.class); assertTrue(configExplicitTrue.shouldWorkAroundDsymutilLTOStackOverflowBug()); AppleConfig configExplicitFalse = FakeBuckConfig.builder() .setSections("[apple]", "work_around_dsymutil_lto_stack_overflow_bug = false") .build() .getView(AppleConfig.class); assertFalse(configExplicitFalse.shouldWorkAroundDsymutilLTOStackOverflowBug()); AppleConfig configUnset = FakeBuckConfig.builder().build().getView(AppleConfig.class); assertFalse(configUnset.shouldWorkAroundDsymutilLTOStackOverflowBug()); } }
/* * Copyright 2008 Tim Jansen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.actorsguildframework.internal; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ThreadFactory; import java.util.concurrent.locks.ReentrantLock; import org.actorsguildframework.Agent; import org.actorsguildframework.internal.util.FastQueue; import org.actorsguildframework.internal.util.L; /** * Main class. Controls threads as well as the list of actors that have work to do. */ public final class ControllerImplementation implements Controller { /** * Logger for this class. */ private final static L log = new L(ControllerImplementation.class); /** * Contains the Agent (interface) of the controller. */ private final Agent agent; /** * An executor (for thread-pooling) to use. */ private final ExecutorService executor; /** * Lock for accessing an ActorState managed by this Controller or the mActorsWithWork list. */ private final ReentrantLock actorLock = new ReentrantLock(); /** * Contains a list of all actors that have unprocessed messages. Note that some * actors may be busy and are unable to execute a message. It is also not guaranteed * that the list is always complete, for implementation reasons * (in {@link ActorState#reacquireBusyLock(MessageInvocation)}) there is a small lag. * * Threads should pick up the first ActorState that can be executed. If they find a task, * they should put it at the end. New entries should be inserted at the end as well. * * Locking policy: you must synchronize actorLock before accessing this queue. */ private final FastQueue<ActorState> actorsWithWork = new FastQueue<ActorState>(); /** * Returns the maximum number of physical worker threads to run. */ final int maxPhysicalWorker; /** * Returns the maximum number of effective worker threads to run. */ final int maxEffectiveWorker; /** * If enabled, the Agent will log all messages that Agents send * (using Java's logging system as INFO messages). */ private final boolean logActions; /** * Lock for accessing threadStatistics and activeThreads. * Anti-Deadlock: You may lock this after the actorLock, but never before! */ private Object threadLock = new Object(); /** * An array that counts threads in the {@link WorkerState} states. * The index corresponds to the WorkerState ordinal number. * * Locking policy: you must synchronize threadLock before accessing this queue. */ private int[] threadStatistics = new int[WorkerState.values().length]; /** * Counts the number of threads that the Controller is currently managing. * * Locking policy: you must synchronize threadLock before accessing this field. */ private int workerThreads; /** * Returns the number of additional parallel tasks that could be processed, if there were * enough threads for this. * * Locking policy: you must synchronize threadLock before accessing this field. */ private int numberOfOpenParallelTasks; /** * Returns the number of threads that should terminate themselves in order to get * have an optimal number of threads running * * Locking policy: you must synchronize threadLock before accessing this field. */ private int numberOfThreadsToKill; /** * Creates a new controller. * @param agent the agent of the controller. Possible not initialized yet. * @param threadFactory the ThreadFactory to use * @param maxPhysicalWorker the maximum number of physical worker threads to run * @param maxEffectiveWorker the maximum number of effective worker threads to run * @param logActions if true, actions like messages will be logged */ public ControllerImplementation(Agent agent, ThreadFactory threadFactory, int maxPhysicalWorker, int maxEffectiveWorker, boolean logActions) { this.agent = agent; this.executor = Executors.newCachedThreadPool(threadFactory); this.maxPhysicalWorker = maxPhysicalWorker; this.maxEffectiveWorker = maxEffectiveWorker; this.logActions = logActions; } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#getNextFromQueueUnsynchronized() */ public ActorState getNextFromQueueUnsynchronized() throws InterruptedException{ return actorsWithWork.rotate(); } /** * Returns the number of effective threads running in the system. * This is the formular to create the number of threads that is really running * (ignoring waiting threads, not counting I/O threads fully). * * You must be synchronized on threadLock before calling this! * @return the number of effective threads */ private int getEffectiveThreadsUnsynchronized() { return threadStatistics[WorkerState.Running.ordinal()] + threadStatistics[WorkerState.RunningIO.ordinal()] / 8 + threadStatistics[WorkerState.WaitingExternal.ordinal()] / 128; } /** * Returns the number of threads by which the number of worker threads should change. * If the number is positive, new threads should be created (or, rather, woken up * from the pool). If it is negative, threads should be terminated (or returned to the pool). * * You must be synchronized on threadLock before calling this! * @return the number of threads needed */ private int getThreadNumberCorrectionUnsynchronized() { final int n = workerThreads - numberOfThreadsToKill; if (n > maxPhysicalWorker) return maxPhysicalWorker - n; final int e = getEffectiveThreadsUnsynchronized(); if (e > maxEffectiveWorker) return -Math.min(n, e - maxEffectiveWorker); return Math.min(Math.min(numberOfOpenParallelTasks, maxPhysicalWorker - n), maxEffectiveWorker - e); } /** * Creates the given number of threads. * * You must be synchronized on threadLock before calling this! * @param n the number of threads to create */ private void createThreadsUnsynchronized(int n) { int c = 0; try { for (int i = 0; i < n; i++) { executor.execute(new Worker(this, createKeepRunningInterface())); c++; } } catch (RejectedExecutionException e) { log.info("Thread pool size increase has been rejected by ThreadPoolExecutor. Current pool size is %d. This can happen if too many threads need to be created by the scheduler.", workerThreads); } workerThreads += c; threadStatistics[WorkerState.Running.ordinal()] += c; } /** * Adds or kills threads to have the right number running. * * You must be synchronized on threadLock before calling this! */ private void correctWorkerThreadsUnsynchronized() { int c = getThreadNumberCorrectionUnsynchronized(); if (c > 0) { if (numberOfThreadsToKill > 0) { int k = Math.min(c, numberOfThreadsToKill); numberOfThreadsToKill -= k; c -= k; } if (c > 0) createThreadsUnsynchronized(c); } else if (c < 0) numberOfThreadsToKill += -c; } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#updateActorStateQueueUnsynchronized(org.actorsguildframework.internal.ActorState, int, int) */ public void updateActorStateQueueUnsynchronized(ActorState actorState, int oldNumberOfOpenParallelTasks, int newNumberOfOpenParallelTasks) { boolean shouldBeInQueue = (newNumberOfOpenParallelTasks>0); if (shouldBeInQueue != actorsWithWork.isInQueue(actorState)) { if (shouldBeInQueue) actorsWithWork.add(actorState); else actorsWithWork.remove(actorState); } synchronized (threadLock) { numberOfOpenParallelTasks += (newNumberOfOpenParallelTasks - oldNumberOfOpenParallelTasks); correctWorkerThreadsUnsynchronized(); } } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#changeWorkerThreadState(org.actorsguildframework.internal.WorkerState, org.actorsguildframework.internal.WorkerState) */ public void changeWorkerThreadState(WorkerState oldState, WorkerState newState) { if (oldState == newState) return; synchronized (threadLock) { threadStatistics[oldState.ordinal()]--; threadStatistics[newState.ordinal()]++; correctWorkerThreadsUnsynchronized(); } } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#removeWorkerThreadState(org.actorsguildframework.internal.WorkerState) */ public void removeWorkerThreadState(WorkerState oldState) { synchronized (threadLock) { threadStatistics[oldState.ordinal()]--; workerThreads--; correctWorkerThreadsUnsynchronized(); } } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#getAgent() */ public Agent getAgent() { return agent; } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#createKeepRunningInterface() */ public KeepRunningInterface createKeepRunningInterface() { return new KeepRunningInterface() { private boolean gotKilled = false; public boolean shouldContinue() { synchronized (threadLock) { if (gotKilled) return false; if (numberOfThreadsToKill == 0) return true; numberOfThreadsToKill--; gotKilled = true; return false; } } }; } /* (non-Javadoc) * @see org.actorsguildframework.internal.Controller#isLoggingActions() */ public boolean isLoggingActions() { return logActions; } /** * Returns the actor lock for accessing an ActorState of this Controller or and the * mActorsWithWork list. * @return the actor lock */ public ReentrantLock getActorLock() { return actorLock; } /* * (non-Javadoc) * @see org.actorsguildframework.internal.Controller#shutdown() */ public void shutdown() { actorLock.lock(); try { actorsWithWork.clear(); executor.shutdownNow(); } finally { actorLock.unlock(); } } }
package com.synopsys.integration.blackduck.configuration; import static org.junit.jupiter.api.Assertions.*; import java.net.URL; import org.apache.commons.lang3.math.NumberUtils; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import com.synopsys.integration.blackduck.TimingExtension; import com.synopsys.integration.blackduck.rest.IntHttpClientTestHelper; import com.synopsys.integration.blackduck.rest.TestingPropertyKey; import com.synopsys.integration.log.SilentIntLogger; @Tag("integration") @ExtendWith(TimingExtension.class) public class BlackDuckServerConfigBuilderTestIT { private static final IntHttpClientTestHelper INT_HTTP_CLIENT_TEST_HELPER = new IntHttpClientTestHelper(); private static final String URL = BlackDuckServerConfigBuilderTestIT.INT_HTTP_CLIENT_TEST_HELPER.getIntegrationBlackDuckServerUrl(); private static final String USERNAME = BlackDuckServerConfigBuilderTestIT.INT_HTTP_CLIENT_TEST_HELPER.getTestUsername(); private static final String PASSWORD = BlackDuckServerConfigBuilderTestIT.INT_HTTP_CLIENT_TEST_HELPER.getTestPassword(); private static final String PROXY_PASSTHROUGH_HOST = BlackDuckServerConfigBuilderTestIT.INT_HTTP_CLIENT_TEST_HELPER.getProperty(TestingPropertyKey.TEST_PROXY_HOST_PASSTHROUGH); private static final int PROXY_PASSTHROUGH_PORT = NumberUtils.toInt(BlackDuckServerConfigBuilderTestIT.INT_HTTP_CLIENT_TEST_HELPER.getProperty(TestingPropertyKey.TEST_PROXY_PORT_PASSTHROUGH)); private static final int TIMEOUT = 120; @Test public void testValidConfigWithProxies() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setBuilderDefaults(builder); setBuilderProxyDefaults(builder); BlackDuckServerConfig config = builder.build(); String blackDuckServer = BlackDuckServerConfigBuilderTestIT.URL; assertEquals(new URL(blackDuckServer).getHost(), config.getBlackDuckUrl().getHost()); assertEquals("User", config.getCredentials().get().getUsername().get()); assertEquals("Pass", config.getCredentials().get().getPassword().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_HOST, config.getProxyInfo().getHost().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_PORT, config.getProxyInfo().getPort()); assertTrue(config.getProxyInfo().shouldUseProxy()); } @Test public void testValidConfigWithProxiesNoProxy() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setBuilderDefaults(builder); builder.setProxyPort(0); builder.setProxyHost(null); builder.setProxyNtlmDomain(null); builder.setProxyNtlmWorkstation(null); builder.setProxyUsername(null); builder.setProxyPassword(null); BlackDuckServerConfig config = builder.build(); assertEquals(new URL(BlackDuckServerConfigBuilderTestIT.URL).getHost(), config.getBlackDuckUrl().getHost()); assertEquals("User", config.getCredentials().get().getUsername().get()); assertEquals("Pass", config.getCredentials().get().getPassword().get()); assertFalse(config.getProxyInfo().shouldUseProxy()); } @Test public void testValidCanConnect() { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); BlackDuckServerConfig blackDuckServerConfig = builder.build(); assertTrue(blackDuckServerConfig.canConnect()); ConnectionResult connectionResult = blackDuckServerConfig.attemptConnection(new SilentIntLogger()); assertTrue(connectionResult.isSuccess()); assertFalse(connectionResult.getFailureMessage().isPresent()); } @Test public void testInvalidUrlCanNotConnect() { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); builder.setUrl("https://www.google.com"); BlackDuckServerConfig blackDuckServerConfig = builder.build(); assertFalse(blackDuckServerConfig.canConnect()); ConnectionResult connectionResult = blackDuckServerConfig.attemptConnection(new SilentIntLogger()); assertFalse(connectionResult.isSuccess()); assertEquals("The connection was not successful for an unknown reason. If an api token is being used, it could be incorrect.", connectionResult.getFailureMessage().get()); } @Test public void testInvalidPasswordCanNotConnect() { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); builder.setPassword("not a real password"); BlackDuckServerConfig blackDuckServerConfig = builder.build(); assertFalse(blackDuckServerConfig.canConnect()); ConnectionResult connectionResult = blackDuckServerConfig.attemptConnection(new SilentIntLogger()); assertFalse(connectionResult.isSuccess()); assertEquals("Invalid username or password", connectionResult.getFailureMessage().get()); } @Test public void testInvalidApiTokenCanNotConnect() { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); builder.setUsername(null); builder.setPassword(null); builder.setApiToken("not a real token"); BlackDuckServerConfig blackDuckServerConfig = builder.build(); assertFalse(blackDuckServerConfig.canConnect()); ConnectionResult connectionResult = blackDuckServerConfig.attemptConnection(new SilentIntLogger()); assertFalse(connectionResult.isSuccess()); assertEquals("The connection was not successful for an unknown reason. If an api token is being used, it could be incorrect.", connectionResult.getFailureMessage().get()); } @Test public void testValidBuild() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); BlackDuckServerConfig config = builder.build(); assertEquals(new URL(BlackDuckServerConfigBuilderTestIT.URL).getHost(), config.getBlackDuckUrl().getHost()); assertEquals(BlackDuckServerConfigBuilderTestIT.TIMEOUT, config.getTimeout()); assertEquals(BlackDuckServerConfigBuilderTestIT.USERNAME, config.getCredentials().get().getUsername().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PASSWORD, config.getCredentials().get().getPassword().get()); } @Test public void testValidBuildTimeoutString() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); BlackDuckServerConfig config = builder.build(); assertEquals(new URL(BlackDuckServerConfigBuilderTestIT.URL).getHost(), config.getBlackDuckUrl().getHost()); assertEquals(120, config.getTimeout()); assertEquals(BlackDuckServerConfigBuilderTestIT.USERNAME, config.getCredentials().get().getUsername().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PASSWORD, config.getCredentials().get().getPassword().get()); } @Test public void testValidBuildWithProxy() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); builder.setProxyHost(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_HOST); builder.setProxyPort(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_PORT); BlackDuckServerConfig config = builder.build(); assertEquals(new URL(BlackDuckServerConfigBuilderTestIT.URL).getHost(), config.getBlackDuckUrl().getHost()); assertEquals(BlackDuckServerConfigBuilderTestIT.TIMEOUT, config.getTimeout()); assertEquals(BlackDuckServerConfigBuilderTestIT.USERNAME, config.getCredentials().get().getUsername().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PASSWORD, config.getCredentials().get().getPassword().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_HOST, config.getProxyInfo().getHost().get()); assertEquals(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_PORT, config.getProxyInfo().getPort()); } @Test public void testUrlwithTrailingSlash() throws Exception { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); BlackDuckServerConfig config = builder.build(); assertFalse(config.getBlackDuckUrl().toString().endsWith("/")); assertEquals("https", config.getBlackDuckUrl().getProtocol()); assertEquals(new URL(BlackDuckServerConfigBuilderTestIT.URL).getHost(), config.getBlackDuckUrl().getHost()); assertEquals(-1, config.getBlackDuckUrl().getPort()); } @Test public void testValidBuildWithProxyPortZero() { BlackDuckServerConfigBuilder builder = new BlackDuckServerConfigBuilder(); setValidDefaults(builder); BlackDuckServerConfig config = builder.build(); assertFalse(config.shouldUseProxyForBlackDuck()); builder.setProxyPort(0); config = builder.build(); assertFalse(config.shouldUseProxyForBlackDuck()); builder.setProxyPort("0"); config = builder.build(); assertFalse(config.shouldUseProxyForBlackDuck()); builder.setProxyPort(1); try { builder.build(); fail("Should have thrown an IllegalStateException with invalid proxy state"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("proxy")); } } private void setValidDefaults(BlackDuckServerConfigBuilder builder) { builder.setUrl(BlackDuckServerConfigBuilderTestIT.URL); builder.setUsername(BlackDuckServerConfigBuilderTestIT.USERNAME); builder.setPassword(BlackDuckServerConfigBuilderTestIT.PASSWORD); builder.setTrustCert(true); } private void setBuilderDefaults(BlackDuckServerConfigBuilder builder) { setValidDefaults(builder); builder.setTimeout("100"); builder.setUsername("User"); builder.setPassword("Pass"); } private void setBuilderProxyDefaults(BlackDuckServerConfigBuilder builder) { builder.setProxyHost(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_HOST); builder.setProxyPort(BlackDuckServerConfigBuilderTestIT.PROXY_PASSTHROUGH_PORT); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules.java; import static com.google.common.base.Strings.isNullOrEmpty; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.Runfiles; import com.google.devtools.build.lib.analysis.RunfilesProvider; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.ComputedSubstitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Substitution; import com.google.devtools.build.lib.analysis.actions.TemplateExpansionAction.Template; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.bazel.rules.BazelConfiguration; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder; import com.google.devtools.build.lib.rules.java.DeployArchiveBuilder.Compression; import com.google.devtools.build.lib.rules.java.JavaCommon; import com.google.devtools.build.lib.rules.java.JavaCompilationArgsProvider; import com.google.devtools.build.lib.rules.java.JavaCompilationArtifacts; import com.google.devtools.build.lib.rules.java.JavaCompilationHelper; import com.google.devtools.build.lib.rules.java.JavaConfiguration; import com.google.devtools.build.lib.rules.java.JavaHelper; import com.google.devtools.build.lib.rules.java.JavaRuleOutputJarsProvider; import com.google.devtools.build.lib.rules.java.JavaRunfilesProvider; import com.google.devtools.build.lib.rules.java.JavaSemantics; import com.google.devtools.build.lib.rules.java.JavaSourceJarsProvider; import com.google.devtools.build.lib.rules.java.JavaTargetAttributes; import com.google.devtools.build.lib.rules.java.JavaUtil; import com.google.devtools.build.lib.rules.java.Jvm; import com.google.devtools.build.lib.rules.java.proto.GeneratedExtensionRegistryProvider; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.OS; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.annotation.Nullable; /** * Semantics for Bazel Java rules */ public class BazelJavaSemantics implements JavaSemantics { public static final BazelJavaSemantics INSTANCE = new BazelJavaSemantics(); private static final Template STUB_SCRIPT = Template.forResource(BazelJavaSemantics.class, "java_stub_template.txt"); private static final Template STUB_SCRIPT_WINDOWS = Template.forResource(BazelJavaSemantics.class, "java_stub_template_windows.txt"); private static final String JAVABUILDER_CLASS_NAME = "com.google.devtools.build.buildjar.BazelJavaBuilder"; private static final String JACOCO_COVERAGE_RUNNER_MAIN_CLASS = "com.google.testing.coverage.JacocoCoverageRunner"; private BazelJavaSemantics() { } private boolean isJavaBinaryOrJavaTest(RuleContext ruleContext) { String ruleClass = ruleContext.getRule().getRuleClass(); return ruleClass.equals("java_binary") || ruleClass.equals("java_test"); } @Override public void checkRule(RuleContext ruleContext, JavaCommon javaCommon) { } @Override public void checkForProtoLibraryAndJavaProtoLibraryOnSameProto( RuleContext ruleContext, JavaCommon javaCommon) {} private static final String JUNIT4_RUNNER = "org.junit.runner.JUnitCore"; private String getMainClassInternal(RuleContext ruleContext, ImmutableList<Artifact> sources) { if (!ruleContext.attributes().get("create_executable", Type.BOOLEAN)) { return null; } String mainClass = ruleContext.attributes().get("main_class", Type.STRING); // Legacy behavior for java_test rules: main_class defaulted to JUnit4 runner. // TODO(dmarting): remove once we drop the legacy bazel java_test behavior. if (mainClass.isEmpty() && useLegacyJavaTest(ruleContext) && "java_test".equals(ruleContext.getRule().getRuleClass())) { mainClass = JUNIT4_RUNNER; } if (mainClass.isEmpty()) { if (ruleContext.attributes().get("use_testrunner", Type.BOOLEAN) && !useLegacyJavaTest(ruleContext)) { return "com.google.testing.junit.runner.BazelTestRunner"; } mainClass = JavaCommon.determinePrimaryClass(ruleContext, sources); } return mainClass; } private void checkMainClass(RuleContext ruleContext, ImmutableList<Artifact> sources) { boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN); String mainClass = getMainClassInternal(ruleContext, sources); if (!createExecutable && !isNullOrEmpty(mainClass)) { ruleContext.ruleError("main class must not be specified when executable is not created"); } if (createExecutable && isNullOrEmpty(mainClass)) { if (sources.isEmpty()) { ruleContext.ruleError("need at least one of 'main_class' or Java source files"); } mainClass = JavaCommon.determinePrimaryClass(ruleContext, sources); if (mainClass == null) { ruleContext.ruleError("cannot determine main class for launching " + "(found neither a source file '" + ruleContext.getTarget().getName() + ".java', nor a main_class attribute, and package name " + "doesn't include 'java' or 'javatests')"); } } } @Override public String getMainClass(RuleContext ruleContext, ImmutableList<Artifact> sources) { checkMainClass(ruleContext, sources); return getMainClassInternal(ruleContext, sources); } @Override public ImmutableList<Artifact> collectResources(RuleContext ruleContext) { if (!ruleContext.getRule().isAttrDefined("resources", BuildType.LABEL_LIST)) { return ImmutableList.of(); } return ruleContext.getPrerequisiteArtifacts("resources", Mode.TARGET).list(); } @Override public Artifact createStubAction( RuleContext ruleContext, final JavaCommon javaCommon, List<String> jvmFlags, Artifact executable, String javaStartClass, String javaExecutable) { Preconditions.checkState(ruleContext.getConfiguration().hasFragment(Jvm.class)); Preconditions.checkNotNull(jvmFlags); Preconditions.checkNotNull(executable); Preconditions.checkNotNull(javaStartClass); Preconditions.checkNotNull(javaExecutable); List<Substitution> arguments = new ArrayList<>(); String workspaceName = ruleContext.getWorkspaceName(); final String workspacePrefix = workspaceName + (workspaceName.isEmpty() ? "" : "/"); final boolean isRunfilesEnabled = ruleContext.getConfiguration().runfilesEnabled(); if (!isRunfilesEnabled) { arguments.add(Substitution.of("%runfiles_manifest_only%", "1")); } arguments.add(Substitution.of("%workspace_prefix%", workspacePrefix)); arguments.add(Substitution.of("%javabin%", javaExecutable)); arguments.add(Substitution.of("%needs_runfiles%", ruleContext.getFragment(Jvm.class).getJavaExecutable().isAbsolute() ? "0" : "1")); arguments.add( new ComputedSubstitution("%classpath%") { @Override public String getValue() { StringBuilder buffer = new StringBuilder(); Iterable<Artifact> jars = javaCommon.getRuntimeClasspath(); char delimiter = File.pathSeparatorChar; appendRunfilesRelativeEntries( buffer, jars, workspacePrefix, delimiter, isRunfilesEnabled); return buffer.toString(); } }); JavaCompilationArtifacts javaArtifacts = javaCommon.getJavaCompilationArtifacts(); String path = javaArtifacts.getInstrumentedJar() != null ? "${JAVA_RUNFILES}/" + workspacePrefix + javaArtifacts.getInstrumentedJar().getRootRelativePath().getPathString() : ""; arguments.add( Substitution.of( "%set_jacoco_metadata%", ruleContext.getConfiguration().isCodeCoverageEnabled() ? "export JACOCO_METADATA_JAR=" + path : "")); arguments.add(Substitution.of("%java_start_class%", ShellEscaper.escapeString(javaStartClass))); arguments.add(Substitution.ofSpaceSeparatedList("%jvm_flags%", ImmutableList.copyOf(jvmFlags))); ruleContext.registerAction(new TemplateExpansionAction( ruleContext.getActionOwner(), executable, STUB_SCRIPT, arguments, true)); if (OS.getCurrent() == OS.WINDOWS) { Artifact newExecutable = ruleContext.getImplicitOutputArtifact(ruleContext.getTarget().getName() + ".cmd"); ruleContext.registerAction( new TemplateExpansionAction( ruleContext.getActionOwner(), newExecutable, STUB_SCRIPT_WINDOWS, ImmutableList.of( Substitution.of( "%bash_exe_path%", ruleContext .getFragment(BazelConfiguration.class) .getShellExecutable() .getPathString()), Substitution.of( "%cygpath_exe_path%", ruleContext .getFragment(BazelConfiguration.class) .getShellExecutable() .replaceName("cygpath.exe") .getPathString())), true)); return newExecutable; } else { return executable; } } /** * Builds a class path by concatenating the root relative paths of the artifacts separated by the * delimiter. Each relative path entry is prepended with "${RUNPATH}" which will be expanded by * the stub script at runtime, to either "${JAVA_RUNFILES}/" or if we are lucky, the empty string. * * @param buffer the buffer to use for concatenating the entries * @param artifacts the entries to concatenate in the buffer * @param delimiter the delimiter character to separate the entries */ private static void appendRunfilesRelativeEntries( StringBuilder buffer, Iterable<Artifact> artifacts, String workspacePrefix, char delimiter, boolean isRunfilesEnabled) { buffer.append("\""); for (Artifact artifact : artifacts) { if (buffer.length() > 1) { buffer.append(delimiter); } if (!isRunfilesEnabled) { buffer.append("$(rlocation "); PathFragment runfilePath = new PathFragment(new PathFragment(workspacePrefix), artifact.getRunfilesPath()); buffer.append(runfilePath.normalize().getPathString()); buffer.append(")"); } else { buffer.append("${RUNPATH}"); buffer.append(artifact.getRunfilesPath().getPathString()); } } buffer.append("\""); } private TransitiveInfoCollection getTestSupport(RuleContext ruleContext) { if (!isJavaBinaryOrJavaTest(ruleContext)) { return null; } if (useLegacyJavaTest(ruleContext)) { return null; } boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN); if (createExecutable && ruleContext.attributes().get("use_testrunner", Type.BOOLEAN)) { return Iterables.getOnlyElement(ruleContext.getPrerequisites("$testsupport", Mode.TARGET)); } else { return null; } } @Override public void addRunfilesForBinary(RuleContext ruleContext, Artifact launcher, Runfiles.Builder runfilesBuilder) { TransitiveInfoCollection testSupport = getTestSupport(ruleContext); if (testSupport != null) { runfilesBuilder.addTarget(testSupport, JavaRunfilesProvider.TO_RUNFILES); runfilesBuilder.addTarget(testSupport, RunfilesProvider.DEFAULT_RUNFILES); } } @Override public void addRunfilesForLibrary(RuleContext ruleContext, Runfiles.Builder runfilesBuilder) { } @Override public void collectTargetsTreatedAsDeps( RuleContext ruleContext, ImmutableList.Builder<TransitiveInfoCollection> builder) { TransitiveInfoCollection testSupport = getTestSupport(ruleContext); if (testSupport != null) { // TODO(bazel-team): The testsupport is used as the test framework // and really only needs to be on the runtime, not compile-time // classpath. builder.add(testSupport); } } @Override public Iterable<String> getExtraJavacOpts(RuleContext ruleContext) { return ImmutableList.<String>of(); } @Override public void addProviders(RuleContext ruleContext, JavaCommon javaCommon, List<String> jvmFlags, Artifact classJar, Artifact srcJar, Artifact genJar, Artifact gensrcJar, ImmutableMap<Artifact, Artifact> compilationToRuntimeJarMap, NestedSetBuilder<Artifact> filesBuilder, RuleConfiguredTargetBuilder ruleBuilder) { } // TODO(dmarting): simplify that logic when we remove the legacy Bazel java_test behavior. private String getPrimaryClassLegacy(RuleContext ruleContext, ImmutableList<Artifact> sources) { boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN); if (!createExecutable) { return null; } return getMainClassInternal(ruleContext, sources); } private String getPrimaryClassNew(RuleContext ruleContext, ImmutableList<Artifact> sources) { boolean createExecutable = ruleContext.attributes().get("create_executable", Type.BOOLEAN); if (!createExecutable) { return null; } boolean useTestrunner = ruleContext.attributes().get("use_testrunner", Type.BOOLEAN); String testClass = ruleContext.getRule().isAttrDefined("test_class", Type.STRING) ? ruleContext.attributes().get("test_class", Type.STRING) : ""; if (useTestrunner) { if (testClass.isEmpty()) { testClass = JavaCommon.determinePrimaryClass(ruleContext, sources); if (testClass == null) { ruleContext.ruleError("cannot determine junit.framework.Test class " + "(Found no source file '" + ruleContext.getTarget().getName() + ".java' and package name doesn't include 'java' or 'javatests'. " + "You might want to rename the rule or add a 'test_class' " + "attribute.)"); } } return testClass; } else { if (!testClass.isEmpty()) { ruleContext.attributeError("test_class", "this attribute is only meaningful to " + "BazelTestRunner, but you are not using it (use_testrunner = 0)"); } return getMainClassInternal(ruleContext, sources); } } @Override public String getPrimaryClass(RuleContext ruleContext, ImmutableList<Artifact> sources) { return useLegacyJavaTest(ruleContext) ? getPrimaryClassLegacy(ruleContext, sources) : getPrimaryClassNew(ruleContext, sources); } @Override public Iterable<String> getJvmFlags( RuleContext ruleContext, ImmutableList<Artifact> sources, List<String> userJvmFlags) { ImmutableList.Builder<String> jvmFlags = ImmutableList.builder(); jvmFlags.addAll(userJvmFlags); if (!useLegacyJavaTest(ruleContext)) { if (ruleContext.attributes().get("use_testrunner", Type.BOOLEAN)) { String testClass = ruleContext.getRule().isAttrDefined("test_class", Type.STRING) ? ruleContext.attributes().get("test_class", Type.STRING) : ""; if (testClass.isEmpty()) { testClass = JavaCommon.determinePrimaryClass(ruleContext, sources); } if (testClass == null) { ruleContext.ruleError("cannot determine test class"); } else { // Always run junit tests with -ea (enable assertion) jvmFlags.add("-ea"); // "suite" is a misnomer. jvmFlags.add("-Dbazel.test_suite=" + ShellEscaper.escapeString(testClass)); } } } return jvmFlags.build(); } /** * Returns whether coverage has instrumented artifacts. */ public static boolean hasInstrumentationMetadata(JavaTargetAttributes.Builder attributes) { return !attributes.getInstrumentationMetadata().isEmpty(); } // TODO(yueg): refactor this (only mainClass different for now) @Override public String addCoverageSupport( JavaCompilationHelper helper, JavaTargetAttributes.Builder attributes, Artifact executable, Artifact instrumentationMetadata, JavaCompilationArtifacts.Builder javaArtifactsBuilder, String mainClass) throws InterruptedException { // This method can be called only for *_binary/*_test targets. Preconditions.checkNotNull(executable); // Add our own metadata artifact (if any). if (instrumentationMetadata != null) { attributes.addInstrumentationMetadataEntries(ImmutableList.of(instrumentationMetadata)); } if (!hasInstrumentationMetadata(attributes)) { return mainClass; } Artifact instrumentedJar = helper .getRuleContext() .getBinArtifact(helper.getRuleContext().getLabel().getName() + "_instrumented.jar"); // Create an instrumented Jar. This will be referenced on the runtime classpath prior // to all other Jars. JavaCommon.createInstrumentedJarAction( helper.getRuleContext(), this, attributes.getInstrumentationMetadata(), instrumentedJar, mainClass); javaArtifactsBuilder.setInstrumentedJar(instrumentedJar); // Add the coverage runner to the list of dependencies when compiling in coverage mode. TransitiveInfoCollection runnerTarget = helper.getRuleContext().getPrerequisite("$jacocorunner", Mode.TARGET); if (runnerTarget.getProvider(JavaCompilationArgsProvider.class) != null) { helper.addLibrariesToAttributes(ImmutableList.of(runnerTarget)); } else { helper .getRuleContext() .ruleError( "this rule depends on " + helper.getRuleContext().attributes().get("$jacocorunner", BuildType.LABEL) + " which is not a java_library rule, or contains errors"); } // We do not add the instrumented jar to the runtime classpath, but provide it in the shell // script via an environment variable. return JACOCO_COVERAGE_RUNNER_MAIN_CLASS; } @Override public CustomCommandLine buildSingleJarCommandLine(BuildConfiguration configuration, Artifact output, String mainClass, ImmutableList<String> manifestLines, Iterable<Artifact> buildInfoFiles, ImmutableList<Artifact> resources, Iterable<Artifact> classpath, boolean includeBuildData, Compression compression, Artifact launcher) { return DeployArchiveBuilder.defaultSingleJarCommandLine(output, mainClass, manifestLines, buildInfoFiles, resources, classpath, includeBuildData, compression, launcher).build(); } @Override public ImmutableList<Artifact> translate(RuleContext ruleContext, JavaConfiguration javaConfig, List<Artifact> messages) { return ImmutableList.<Artifact>of(); } @Override public Artifact getLauncher(RuleContext ruleContext, JavaCommon common, DeployArchiveBuilder deployArchiveBuilder, Runfiles.Builder runfilesBuilder, List<String> jvmFlags, JavaTargetAttributes.Builder attributesBuilder, boolean shouldStrip) { return JavaHelper.launcherArtifactForTarget(this, ruleContext); } @Override public void addDependenciesForRunfiles(RuleContext ruleContext, Runfiles.Builder builder) { } @Override public void addArtifactToJavaTargetAttribute(JavaTargetAttributes.Builder builder, Artifact srcArtifact) { } @Override public void commonDependencyProcessing(RuleContext ruleContext, JavaTargetAttributes.Builder attributes, Collection<? extends TransitiveInfoCollection> deps) { } @Override public PathFragment getDefaultJavaResourcePath(PathFragment path) { // Look for src/.../resources to match Maven repository structure. for (int i = 0; i < path.segmentCount() - 2; ++i) { if (path.getSegment(i).equals("src") && path.getSegment(i + 2).equals("resources")) { return path.subFragment(i + 3, path.segmentCount()); } } PathFragment javaPath = JavaUtil.getJavaPath(path); return javaPath == null ? path : javaPath; } @Override public List<String> getExtraArguments(RuleContext ruleContext, ImmutableList<Artifact> sources) { if (ruleContext.getRule().getRuleClass().equals("java_test")) { if (useLegacyJavaTest(ruleContext)) { if (ruleContext.getConfiguration().getTestArguments().isEmpty() && !ruleContext.attributes().isAttributeValueExplicitlySpecified("args")) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Artifact artifact : sources) { PathFragment path = artifact.getRootRelativePath(); String className = JavaUtil.getJavaFullClassname(FileSystemUtils.removeExtension(path)); if (className != null) { builder.add(className); } } return builder.build(); } } } return ImmutableList.<String>of(); } private boolean useLegacyJavaTest(RuleContext ruleContext) { return !ruleContext.attributes().isAttributeValueExplicitlySpecified("test_class") && ruleContext.getFragment(JavaConfiguration.class).useLegacyBazelJavaTest(); } @Override public String getJavaBuilderMainClass() { return JAVABUILDER_CLASS_NAME; } @Override public Artifact getProtoMapping(RuleContext ruleContext) throws InterruptedException { return null; } @Nullable @Override public GeneratedExtensionRegistryProvider createGeneratedExtensionRegistry( RuleContext ruleContext, JavaCommon common, NestedSetBuilder<Artifact> filesBuilder, JavaCompilationArtifacts.Builder javaCompilationArtifactsBuilder, JavaRuleOutputJarsProvider.Builder javaRuleOutputJarsProviderBuilder, JavaSourceJarsProvider.Builder javaSourceJarsProviderBuilder) throws InterruptedException { return null; } @Override public Artifact getObfuscatedConstantStringMap(RuleContext ruleContext) throws InterruptedException { return null; } }
package com.delmar.sys.model; import java.util.Date; import java.util.List; import com.delmar.core.dao.UserInterface; import com.delmar.core.model.CoreModel; public class User extends CoreModel implements UserInterface{ private String username; private String password; private String name; private String email; private String telephone; private byte[] userPic; private Integer managerId; private String managerName; private Integer userTypeId; private String remark; private Integer orgId; private Integer clientId; private String init; private Integer isActive; private Date passNextDate; private Date created; private Integer createdBy; private String createdByName; private Date updated; private Integer updatedBy; private String updatedByName; private List<Role> roles; private List<Org> accessOrgs; private Client client; private Org org; /** * @return the org */ public Org getOrg() { return org; } /** * @param org the org to set */ public void setOrg(Org org) { this.org = org; } /** * @return the accessOrgs */ public List<Org> getAccessOrgs() { return accessOrgs; } /** * @param accessOrgs the accessOrgs to set */ public void setAccessOrgs(List<Org> accessOrgs) { this.accessOrgs = accessOrgs; } /** * @return the roles */ public List<Role> getRoles() { return roles; } /** * @param roles the roles to set */ public void setRoles(List<Role> roles) { this.roles = roles; } public String getInit() { return init; } /** * @param init the init to set */ public void setInit(String init) { this.init = init; } /** * @return the orgId */ public Integer getOrgId() { return orgId; } /** * @param orgId the orgId to set */ public void setOrgId(Integer orgId) { this.orgId = orgId; } /** * @return the clientId */ public Integer getClientId() { return clientId; } /** * @param clientId the clientId to set */ public void setClientId(Integer clientId) { this.clientId = clientId; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column sys_user.id * * @return the value of sys_user.id * * */ public Integer getId() { return id; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column sys_user.id * * @param id the value for sys_user.id * * */ public void setId(Integer id) { this.id = id; } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column sys_user.username * * @return the value of sys_user.username * * */ public String getUsername() { return username; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column sys_user.username * * @param username the value for sys_user.username * * */ public void setUsername(String username) { this.username = username == null ? null : username.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column sys_user.password * * @return the value of sys_user.password * * */ public String getPassword() { return password; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column sys_user.password * * @param password the value for sys_user.password * * */ public void setPassword(String password) { this.password = password == null ? null : password.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column sys_user.name * * @return the value of sys_user.name * * */ public String getName() { return name; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column sys_user.name * * @param name the value for sys_user.name * * */ public void setName(String name) { this.name = name == null ? null : name.trim(); } /** * This method was generated by MyBatis Generator. * This method returns the value of the database column sys_user.remark * * @return the value of sys_user.remark * * */ public String getRemark() { return remark; } /** * This method was generated by MyBatis Generator. * This method sets the value of the database column sys_user.remark * * @param remark the value for sys_user.remark * * */ public void setRemark(String remark) { this.remark = remark == null ? null : remark.trim(); } /** * @return the client */ public Client getClient() { return client; } /** * @param client the client to set */ public void setClient(Client client) { this.client = client; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getTelephone() { return telephone; } public void setTelephone(String telephone) { this.telephone = telephone; } public byte[] getUserPic() { return userPic; } public void setUserPic(byte[] userPic) { this.userPic = userPic; } public Integer getManagerId() { return managerId; } public void setManagerId(Integer managerId) { this.managerId = managerId; } public String getManagerName() { return managerName; } public void setManagerName(String managerName) { this.managerName = managerName; } public Integer getUserTypeId() { return userTypeId; } public void setUserTypeId(Integer userTypeId) { this.userTypeId = userTypeId; } public Integer getIsActive() { return isActive; } public void setIsActive(Integer isActive) { this.isActive = isActive; } public Date getPassNextDate() { return passNextDate; } public void setPassNextDate(Date passNextDate) { this.passNextDate = passNextDate; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } public Integer getCreatedBy() { return createdBy; } public void setCreatedBy(Integer createdBy) { this.createdBy = createdBy; } public String getCreatedByName() { return createdByName; } public void setCreatedByName(String createdByName) { this.createdByName = createdByName; } public Date getUpdated() { return updated; } public void setUpdated(Date updated) { this.updated = updated; } public Integer getUpdatedBy() { return updatedBy; } public void setUpdatedBy(Integer updatedBy) { this.updatedBy = updatedBy; } public String getUpdatedByName() { return updatedByName; } public void setUpdatedByName(String updatedByName) { this.updatedByName = updatedByName; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector; import java.io.IOException; import java.sql.Date; import java.sql.Timestamp; import java.util.Arrays; import java.util.Properties; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.OpenCSVSerde; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; import org.apache.hadoop.hive.serde2.fast.DeserializeRead; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleDeserializeRead; import org.apache.hadoop.hive.serde2.lazy.fast.LazySimpleSerializeWrite; import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinaryDeserializeRead; import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinarySerializeWrite; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.hive.serde2.fast.SerializeWrite; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import com.google.common.base.Charsets; import junit.framework.TestCase; /** * Unit test for the vectorized serialize and deserialize row. */ public class TestVectorSerDeRow extends TestCase { public static enum SerializationType { NONE, BINARY_SORTABLE, LAZY_BINARY, LAZY_SIMPLE } void deserializeAndVerify(Output output, DeserializeRead deserializeRead, VectorRandomRowSource source, Object[] expectedRow) throws HiveException, IOException { deserializeRead.set(output.getData(), 0, output.getLength()); PrimitiveCategory[] primitiveCategories = source.primitiveCategories(); for (int i = 0; i < primitiveCategories.length; i++) { Object expected = expectedRow[i]; PrimitiveCategory primitiveCategory = primitiveCategories[i]; PrimitiveTypeInfo primitiveTypeInfo = source.primitiveTypeInfos()[i]; if (!deserializeRead.readNextField()) { throw new HiveException("Unexpected NULL when reading primitiveCategory " + primitiveCategory + " expected (" + expected.getClass().getName() + ", " + expected.toString() + ") " + " deserializeRead " + deserializeRead.getClass().getName()); } switch (primitiveCategory) { case BOOLEAN: { Boolean value = deserializeRead.currentBoolean; BooleanWritable expectedWritable = (BooleanWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")"); } } break; case BYTE: { Byte value = deserializeRead.currentByte; ByteWritable expectedWritable = (ByteWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")"); } } break; case SHORT: { Short value = deserializeRead.currentShort; ShortWritable expectedWritable = (ShortWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")"); } } break; case INT: { Integer value = deserializeRead.currentInt; IntWritable expectedWritable = (IntWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")"); } } break; case LONG: { Long value = deserializeRead.currentLong; LongWritable expectedWritable = (LongWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")"); } } break; case DATE: { DateWritable value = deserializeRead.currentDateWritable; DateWritable expectedWritable = (DateWritable) expected; if (!value.equals(expectedWritable)) { TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")"); } } break; case FLOAT: { Float value = deserializeRead.currentFloat; FloatWritable expectedWritable = (FloatWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")"); } } break; case DOUBLE: { Double value = deserializeRead.currentDouble; DoubleWritable expectedWritable = (DoubleWritable) expected; if (!value.equals(expectedWritable.get())) { TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")"); } } break; case STRING: case CHAR: case VARCHAR: case BINARY: { byte[] stringBytes = Arrays.copyOfRange( deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength); Text text = new Text(stringBytes); String string = text.toString(); switch (primitiveCategory) { case STRING: { Text expectedWritable = (Text) expected; if (!string.equals(expectedWritable.toString())) { TestCase.fail("String field mismatch (expected '" + expectedWritable.toString() + "' found '" + string + "')"); } } break; case CHAR: { HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength()); HiveCharWritable expectedWritable = (HiveCharWritable) expected; if (!hiveChar.equals(expectedWritable.getHiveChar())) { TestCase.fail("Char field mismatch (expected '" + expectedWritable.getHiveChar() + "' found '" + hiveChar + "')"); } } break; case VARCHAR: { HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength()); HiveVarcharWritable expectedWritable = (HiveVarcharWritable) expected; if (!hiveVarchar.equals(expectedWritable.getHiveVarchar())) { TestCase.fail("Varchar field mismatch (expected '" + expectedWritable.getHiveVarchar() + "' found '" + hiveVarchar + "')"); } } break; case BINARY: { BytesWritable expectedWritable = (BytesWritable) expected; if (stringBytes.length != expectedWritable.getLength()){ TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")"); } byte[] expectedBytes = expectedWritable.getBytes(); for (int b = 0; b < stringBytes.length; b++) { if (stringBytes[b] != expectedBytes[b]) { TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")"); } } } break; default: throw new HiveException("Unexpected primitive category " + primitiveCategory); } } break; case DECIMAL: { HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal(); if (value == null) { TestCase.fail("Decimal field evaluated to NULL"); } HiveDecimalWritable expectedWritable = (HiveDecimalWritable) expected; if (!value.equals(expectedWritable.getHiveDecimal())) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; int precision = decimalTypeInfo.getPrecision(); int scale = decimalTypeInfo.getScale(); TestCase.fail("Decimal field mismatch (expected " + expectedWritable.getHiveDecimal() + " found " + value.toString() + ") precision " + precision + ", scale " + scale); } } break; case TIMESTAMP: { Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp(); TimestampWritable expectedWritable = (TimestampWritable) expected; if (!value.equals(expectedWritable.getTimestamp())) { TestCase.fail("Timestamp field mismatch (expected " + expectedWritable.getTimestamp() + " found " + value.toString() + ")"); } } break; case INTERVAL_YEAR_MONTH: { HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth(); HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) expected; HiveIntervalYearMonth expectedValue = expectedWritable.getHiveIntervalYearMonth(); if (!value.equals(expectedValue)) { TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expectedValue + " found " + value.toString() + ")"); } } break; case INTERVAL_DAY_TIME: { HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime(); HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) expected; HiveIntervalDayTime expectedValue = expectedWritable.getHiveIntervalDayTime(); if (!value.equals(expectedValue)) { TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expectedValue + " found " + value.toString() + ")"); } } break; default: throw new HiveException("Unexpected primitive category " + primitiveCategory); } } TestCase.assertTrue(deserializeRead.isEndOfInputReached()); } void serializeBatch(VectorizedRowBatch batch, VectorSerializeRow vectorSerializeRow, DeserializeRead deserializeRead, VectorRandomRowSource source, Object[][] randomRows, int firstRandomRowIndex) throws HiveException, IOException { Output output = new Output(); for (int i = 0; i < batch.size; i++) { output.reset(); vectorSerializeRow.setOutput(output); vectorSerializeRow.serializeWrite(batch, i); Object[] expectedRow = randomRows[firstRandomRowIndex + i]; byte[] bytes = output.getData(); int length = output.getLength(); char[] chars = new char[length]; for (int c = 0; c < chars.length; c++) { chars[c] = (char) (bytes[c] & 0xFF); } deserializeAndVerify(output, deserializeRead, source, expectedRow); } } void testVectorSerializeRow(Random r, SerializationType serializationType) throws HiveException, IOException, SerDeException { String[] emptyScratchTypeNames = new String[0]; VectorRandomRowSource source = new VectorRandomRowSource(); source.init(r); VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(); batchContext.init(source.rowStructObjectInspector(), emptyScratchTypeNames); VectorizedRowBatch batch = batchContext.createVectorizedRowBatch(); VectorAssignRow vectorAssignRow = new VectorAssignRow(); vectorAssignRow.init(source.typeNames()); int fieldCount = source.typeNames().size(); DeserializeRead deserializeRead; SerializeWrite serializeWrite; switch (serializationType) { case BINARY_SORTABLE: deserializeRead = new BinarySortableDeserializeRead(source.primitiveTypeInfos(), /* useExternalBuffer */ false); serializeWrite = new BinarySortableSerializeWrite(fieldCount); break; case LAZY_BINARY: deserializeRead = new LazyBinaryDeserializeRead(source.primitiveTypeInfos(), /* useExternalBuffer */ false); serializeWrite = new LazyBinarySerializeWrite(fieldCount); break; case LAZY_SIMPLE: { StructObjectInspector rowObjectInspector = source.rowStructObjectInspector(); LazySerDeParameters lazySerDeParams = getSerDeParams(rowObjectInspector); byte separator = (byte) '\t'; deserializeRead = new LazySimpleDeserializeRead(source.primitiveTypeInfos(), /* useExternalBuffer */ false, separator, lazySerDeParams); serializeWrite = new LazySimpleSerializeWrite(fieldCount, separator, lazySerDeParams); } break; default: throw new Error("Unknown serialization type " + serializationType); } VectorSerializeRow vectorSerializeRow = new VectorSerializeRow(serializeWrite); vectorSerializeRow.init(source.typeNames()); Object[][] randomRows = source.randomRows(100000); int firstRandomRowIndex = 0; for (int i = 0; i < randomRows.length; i++) { Object[] row = randomRows[i]; vectorAssignRow.assignRow(batch, batch.size, row); batch.size++; if (batch.size == batch.DEFAULT_SIZE) { serializeBatch(batch, vectorSerializeRow, deserializeRead, source, randomRows, firstRandomRowIndex); firstRandomRowIndex = i + 1; batch.reset(); } } if (batch.size > 0) { serializeBatch(batch, vectorSerializeRow, deserializeRead, source, randomRows, firstRandomRowIndex); } } void examineBatch(VectorizedRowBatch batch, VectorExtractRow vectorExtractRow, PrimitiveTypeInfo[] primitiveTypeInfos, Object[][] randomRows, int firstRandomRowIndex ) { int rowSize = vectorExtractRow.getCount(); Object[] row = new Object[rowSize]; for (int i = 0; i < batch.size; i++) { vectorExtractRow.extractRow(batch, i, row); Object[] expectedRow = randomRows[firstRandomRowIndex + i]; for (int c = 0; c < rowSize; c++) { Object rowObj = row[c]; Object expectedObj = expectedRow[c]; if (rowObj == null) { fail("Unexpected NULL from extractRow. Expected class " + expectedObj.getClass().getName() + " value " + expectedObj.toString() + " batch index " + i + " firstRandomRowIndex " + firstRandomRowIndex); } if (!rowObj.equals(expectedObj)) { fail("Row " + (firstRandomRowIndex + i) + " and column " + c + " mismatch (" + primitiveTypeInfos[c].getPrimitiveCategory() + " actual value " + rowObj + " and expected value " + expectedObj + ")"); } } } } private Output serializeRow(Object[] row, VectorRandomRowSource source, SerializeWrite serializeWrite) throws HiveException, IOException { Output output = new Output(); serializeWrite.set(output); PrimitiveTypeInfo[] primitiveTypeInfos = source.primitiveTypeInfos(); for (int i = 0; i < primitiveTypeInfos.length; i++) { Object object = row[i]; PrimitiveCategory primitiveCategory = primitiveTypeInfos[i].getPrimitiveCategory(); switch (primitiveCategory) { case BOOLEAN: { BooleanWritable expectedWritable = (BooleanWritable) object; boolean value = expectedWritable.get(); serializeWrite.writeBoolean(value); } break; case BYTE: { ByteWritable expectedWritable = (ByteWritable) object; byte value = expectedWritable.get(); serializeWrite.writeByte(value); } break; case SHORT: { ShortWritable expectedWritable = (ShortWritable) object; short value = expectedWritable.get(); serializeWrite.writeShort(value); } break; case INT: { IntWritable expectedWritable = (IntWritable) object; int value = expectedWritable.get(); serializeWrite.writeInt(value); } break; case LONG: { LongWritable expectedWritable = (LongWritable) object; long value = expectedWritable.get(); serializeWrite.writeLong(value); } break; case DATE: { DateWritable expectedWritable = (DateWritable) object; Date value = expectedWritable.get(); serializeWrite.writeDate(value); } break; case FLOAT: { FloatWritable expectedWritable = (FloatWritable) object; float value = expectedWritable.get(); serializeWrite.writeFloat(value); } break; case DOUBLE: { DoubleWritable expectedWritable = (DoubleWritable) object; double value = expectedWritable.get(); serializeWrite.writeDouble(value); } break; case STRING: { Text text = (Text) object; serializeWrite.writeString(text.getBytes(), 0, text.getLength()); } break; case CHAR: { HiveCharWritable expectedWritable = (HiveCharWritable) object; HiveChar value = expectedWritable.getHiveChar(); serializeWrite.writeHiveChar(value); } break; case VARCHAR: { HiveVarcharWritable expectedWritable = (HiveVarcharWritable) object; HiveVarchar value = expectedWritable.getHiveVarchar(); serializeWrite.writeHiveVarchar(value); } break; case BINARY: { BytesWritable expectedWritable = (BytesWritable) object; byte[] bytes = expectedWritable.getBytes(); int length = expectedWritable.getLength(); serializeWrite.writeBinary(bytes, 0, length); } break; case TIMESTAMP: { TimestampWritable expectedWritable = (TimestampWritable) object; Timestamp value = expectedWritable.getTimestamp(); serializeWrite.writeTimestamp(value); } break; case INTERVAL_YEAR_MONTH: { HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) object; HiveIntervalYearMonth value = expectedWritable.getHiveIntervalYearMonth(); serializeWrite.writeHiveIntervalYearMonth(value); } break; case INTERVAL_DAY_TIME: { HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) object; HiveIntervalDayTime value = expectedWritable.getHiveIntervalDayTime(); serializeWrite.writeHiveIntervalDayTime(value); } break; case DECIMAL: { HiveDecimalWritable expectedWritable = (HiveDecimalWritable) object; HiveDecimal value = expectedWritable.getHiveDecimal(); serializeWrite.writeHiveDecimal(value, ((DecimalTypeInfo)primitiveTypeInfos[i]).scale()); } break; default: throw new HiveException("Unexpected primitive category " + primitiveCategory); } } return output; } private void addToProperties(Properties tbl, String fieldNames, String fieldTypes) { // Set the configuration parameters tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9"); tbl.setProperty("columns", fieldNames); tbl.setProperty("columns.types", fieldTypes); tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL"); } private LazySerDeParameters getSerDeParams( StructObjectInspector rowObjectInspector) throws SerDeException { return getSerDeParams(new Configuration(), new Properties(), rowObjectInspector); } private LazySerDeParameters getSerDeParams(Configuration conf, Properties tbl, StructObjectInspector rowObjectInspector) throws SerDeException { String fieldNames = ObjectInspectorUtils.getFieldNames(rowObjectInspector); String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowObjectInspector); addToProperties(tbl, fieldNames, fieldTypes); return new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName()); } void testVectorDeserializeRow(Random r, SerializationType serializationType, boolean alternate1, boolean alternate2, boolean useExternalBuffer) throws HiveException, IOException, SerDeException { String[] emptyScratchTypeNames = new String[0]; VectorRandomRowSource source = new VectorRandomRowSource(); source.init(r); VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(); batchContext.init(source.rowStructObjectInspector(), emptyScratchTypeNames); VectorizedRowBatch batch = batchContext.createVectorizedRowBatch(); // junk the destination for the 1st pass for (ColumnVector cv : batch.cols) { Arrays.fill(cv.isNull, true); } PrimitiveTypeInfo[] primitiveTypeInfos = source.primitiveTypeInfos(); int fieldCount = source.typeNames().size(); DeserializeRead deserializeRead; SerializeWrite serializeWrite; switch (serializationType) { case BINARY_SORTABLE: boolean useColumnSortOrderIsDesc = alternate1; if (!useColumnSortOrderIsDesc) { deserializeRead = new BinarySortableDeserializeRead(source.primitiveTypeInfos(), useExternalBuffer); serializeWrite = new BinarySortableSerializeWrite(fieldCount); } else { boolean[] columnSortOrderIsDesc = new boolean[fieldCount]; for (int i = 0; i < fieldCount; i++) { columnSortOrderIsDesc[i] = r.nextBoolean(); } deserializeRead = new BinarySortableDeserializeRead(source.primitiveTypeInfos(), useExternalBuffer, columnSortOrderIsDesc); byte[] columnNullMarker = new byte[fieldCount]; byte[] columnNotNullMarker = new byte[fieldCount]; for (int i = 0; i < fieldCount; i++) { if (columnSortOrderIsDesc[i]) { // Descending // Null last (default for descending order) columnNullMarker[i] = BinarySortableSerDe.ZERO; columnNotNullMarker[i] = BinarySortableSerDe.ONE; } else { // Ascending // Null first (default for ascending order) columnNullMarker[i] = BinarySortableSerDe.ZERO; columnNotNullMarker[i] = BinarySortableSerDe.ONE; } } serializeWrite = new BinarySortableSerializeWrite(columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker); } boolean useBinarySortableCharsNeedingEscape = alternate2; if (useBinarySortableCharsNeedingEscape) { source.addBinarySortableAlphabets(); } break; case LAZY_BINARY: deserializeRead = new LazyBinaryDeserializeRead(source.primitiveTypeInfos(), useExternalBuffer); serializeWrite = new LazyBinarySerializeWrite(fieldCount); break; case LAZY_SIMPLE: { StructObjectInspector rowObjectInspector = source.rowStructObjectInspector(); Configuration conf = new Configuration(); Properties tbl = new Properties(); tbl.setProperty(serdeConstants.FIELD_DELIM, "\t"); tbl.setProperty(serdeConstants.LINE_DELIM, "\n"); byte separator = (byte) '\t'; boolean useLazySimpleEscapes = alternate1; if (useLazySimpleEscapes) { tbl.setProperty(serdeConstants.QUOTE_CHAR, "'"); String escapeString = "\\"; tbl.setProperty(serdeConstants.ESCAPE_CHAR, escapeString); } LazySerDeParameters lazySerDeParams = getSerDeParams(conf, tbl, rowObjectInspector); if (useLazySimpleEscapes) { // LazySimple seems to throw away everything but \n and \r. boolean[] needsEscape = lazySerDeParams.getNeedsEscape(); StringBuilder sb = new StringBuilder(); if (needsEscape['\n']) { sb.append('\n'); } if (needsEscape['\r']) { sb.append('\r'); } // for (int i = 0; i < needsEscape.length; i++) { // if (needsEscape[i]) { // sb.append((char) i); // } // } String needsEscapeStr = sb.toString(); if (needsEscapeStr.length() > 0) { source.addEscapables(needsEscapeStr); } } deserializeRead = new LazySimpleDeserializeRead(source.primitiveTypeInfos(), useExternalBuffer, separator, lazySerDeParams); serializeWrite = new LazySimpleSerializeWrite(fieldCount, separator, lazySerDeParams); } break; default: throw new Error("Unknown serialization type " + serializationType); } VectorDeserializeRow vectorDeserializeRow = new VectorDeserializeRow(deserializeRead); vectorDeserializeRow.init(); // junk the destination for the 1st pass for (ColumnVector cv : batch.cols) { Arrays.fill(cv.isNull, true); cv.noNulls = false; } VectorExtractRow vectorExtractRow = new VectorExtractRow(); vectorExtractRow.init(source.typeNames()); Object[][] randomRows = source.randomRows(100000); int firstRandomRowIndex = 0; for (int i = 0; i < randomRows.length; i++) { Object[] row = randomRows[i]; Output output = serializeRow(row, source, serializeWrite); vectorDeserializeRow.setBytes(output.getData(), 0, output.getLength()); try { vectorDeserializeRow.deserialize(batch, batch.size); } catch (Exception e) { throw new HiveException( "\nDeserializeRead details: " + vectorDeserializeRow.getDetailedReadPositionString(), e); } batch.size++; if (batch.size == batch.DEFAULT_SIZE) { examineBatch(batch, vectorExtractRow, primitiveTypeInfos, randomRows, firstRandomRowIndex); firstRandomRowIndex = i + 1; batch.reset(); } } if (batch.size > 0) { examineBatch(batch, vectorExtractRow, primitiveTypeInfos, randomRows, firstRandomRowIndex); } } public void testVectorBinarySortableSerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.BINARY_SORTABLE); } public void testVectorLazyBinarySerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.LAZY_BINARY); } public void testVectorLazySimpleSerializeRow() throws Throwable { Random r = new Random(8732); testVectorSerializeRow(r, SerializationType.LAZY_SIMPLE); } public void testVectorBinarySortableDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ false, /* alternate2 = useBinarySortableCharsNeedingEscape */ false, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ true, /* alternate2 = useBinarySortableCharsNeedingEscape */ false, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ false, /* alternate2 = useBinarySortableCharsNeedingEscape */ false, /* useExternalBuffer */ true); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ true, /* alternate2 = useBinarySortableCharsNeedingEscape */ false, /* useExternalBuffer */ true); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ false, /* alternate2 = useBinarySortableCharsNeedingEscape */ true, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ true, /* alternate2 = useBinarySortableCharsNeedingEscape */ true, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ false, /* alternate2 = useBinarySortableCharsNeedingEscape */ true, /* useExternalBuffer */ true); testVectorDeserializeRow(r, SerializationType.BINARY_SORTABLE, /* alternate1 = useColumnSortOrderIsDesc */ true, /* alternate2 = useBinarySortableCharsNeedingEscape */ true, /* useExternalBuffer */ true); } public void testVectorLazyBinaryDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, SerializationType.LAZY_BINARY, /* alternate1 = unused */ false, /* alternate2 = unused */ false, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.LAZY_BINARY, /* alternate1 = unused */ false, /* alternate2 = unused */ false, /* useExternalBuffer */ true); } public void testVectorLazySimpleDeserializeRow() throws Throwable { Random r = new Random(8732); testVectorDeserializeRow(r, SerializationType.LAZY_SIMPLE, /* alternate1 = useLazySimpleEscapes */ false, /* alternate2 = unused */ false, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.LAZY_SIMPLE, /* alternate1 = useLazySimpleEscapes */ false, /* alternate2 = unused */ false, /* useExternalBuffer */ true); testVectorDeserializeRow(r, SerializationType.LAZY_SIMPLE, /* alternate1 = useLazySimpleEscapes */ true, /* alternate2 = unused */ false, /* useExternalBuffer */ false); testVectorDeserializeRow(r, SerializationType.LAZY_SIMPLE, /* alternate1 = useLazySimpleEscapes */ true, /* alternate2 = unused */ false, /* useExternalBuffer */ true); } }
package tamaized.voidcraft.client.model; import net.minecraft.client.model.ModelBase; import net.minecraft.client.model.ModelBiped; import net.minecraft.client.model.ModelRenderer; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.util.EnumHandSide; import net.minecraft.util.math.MathHelper; import tamaized.voidcraft.common.capabilities.CapabilityList; import tamaized.voidcraft.common.capabilities.vadeMecum.IVadeMecumCapability; import tamaized.voidcraft.common.capabilities.voidicInfusion.IVoidicInfusionCapability; public class ModelVoidSpikes extends ModelBase { public ModelRenderer armRight; public ModelRenderer body; public ModelRenderer armLeft; public ModelRenderer rightArmTop; public ModelRenderer rightArmBottom; public ModelRenderer rightSideTop; public ModelRenderer rightSideBottom; public ModelRenderer bodyLeft; public ModelRenderer bodyRight; public ModelRenderer leftArmTop; public ModelRenderer leftArmBottom; public ModelRenderer leftSideTop; public ModelRenderer leftSideBottom; public ModelRenderer bipedHead; public ModelBiped.ArmPose leftArmPose; public ModelBiped.ArmPose rightArmPose; public boolean isSneak; public ModelVoidSpikes(ModelBiped parent) { textureWidth = 46; textureHeight = 16; bipedHead = new ModelRenderer(this, 0, 0); bipedHead.addBox(-4.0F, -8.0F, -4.0F, 8, 8, 8, 0.0f); bipedHead.setRotationPoint(0.0F, 0.0F, 0.0F); rightSideTop = new ModelRenderer(this, 0, 0); rightSideTop.setRotationPoint(-1.5F, -1.5F, -0.5F); rightSideTop.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(rightSideTop, -0.8726646259971648F, -1.5707963267948966F, 0.0F); armLeft = new ModelRenderer(this, 22, 0); armLeft.setRotationPoint(5.0F, 2.0F, -0.0F); armLeft.addBox(-1.0F, -2.0F, -2.0F, 4, 12, 4, 0.0F); setRotateAngle(armLeft, 0.0F, 0.0F, -0.10000736613927509F); rightArmBottom = new ModelRenderer(this, 0, 0); rightArmBottom.setRotationPoint(-1.5F, 2.5F, 1.0F); rightArmBottom.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(rightArmBottom, -0.8726646259971648F, 0.0F, 0.0F); rightSideBottom = new ModelRenderer(this, 0, 0); rightSideBottom.setRotationPoint(-1.5F, 2.5F, -0.5F); rightSideBottom.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(rightSideBottom, -0.8726646259971648F, -1.5707963267948966F, 0.0F); rightArmTop = new ModelRenderer(this, 0, 0); rightArmTop.setRotationPoint(-1.5F, -1.5F, 1.0F); rightArmTop.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(rightArmTop, -0.8726646259971648F, 0.0F, 0.0F); leftSideTop = new ModelRenderer(this, 0, 0); leftSideTop.setRotationPoint(1.5F, -1.5F, 0.5F); leftSideTop.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(leftSideTop, -0.8726646259971648F, 1.5707963267948966F, 0.0F); leftArmBottom = new ModelRenderer(this, 0, 0); leftArmBottom.setRotationPoint(0.5F, 2.5F, 1.0F); leftArmBottom.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(leftArmBottom, -0.8726646259971648F, 0.0F, 0.0F); leftArmTop = new ModelRenderer(this, 0, 0); leftArmTop.setRotationPoint(0.5F, -1.5F, 1.0F); leftArmTop.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(leftArmTop, -0.8726646259971648F, 0.0F, 0.0F); bodyRight = new ModelRenderer(this, 0, 0); bodyRight.setRotationPoint(-2.5F, 2.5F, 1.0F); bodyRight.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(bodyRight, -0.8726646259971648F, 0.0F, 0.0F); leftSideBottom = new ModelRenderer(this, 0, 0); leftSideBottom.setRotationPoint(1.5F, 2.5F, 0.5F); leftSideBottom.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(leftSideBottom, -0.8726646259971648F, 1.5707963267948966F, 0.0F); body = new ModelRenderer(this, 22, 0); body.setRotationPoint(0.0F, 0.0F, 0.0F); body.addBox(-4.0F, 0.0F, -2.0F, 8, 12, 4, 0.0F); bodyLeft = new ModelRenderer(this, 0, 0); bodyLeft.setRotationPoint(1.5F, 2.5F, 1.0F); bodyLeft.addBox(0.0F, 0.0F, 0.0F, 1, 1, 10, 0.0F); setRotateAngle(bodyLeft, -0.8726646259971648F, 0.0F, 0.0F); armRight = new ModelRenderer(this, 22, 0); armRight.setRotationPoint(-5.0F, 2.0F, 0.0F); armRight.addBox(-3.0F, -2.0F, -2.0F, 4, 12, 4, 0.0F); setRotateAngle(armRight, 0.0F, 0.0F, 0.10000736613927509F); armRight.addChild(rightSideTop); armRight.addChild(rightArmBottom); armRight.addChild(rightSideBottom); armRight.addChild(rightArmTop); armLeft.addChild(leftSideTop); armLeft.addChild(leftArmBottom); armLeft.addChild(leftArmTop); body.addChild(bodyRight); armLeft.addChild(leftSideBottom); body.addChild(bodyLeft); } @Override public void render(Entity entity, float f, float f1, float f2, float f3, float f4, float f5) { IVoidicInfusionCapability infusionCap = entity.getCapability(CapabilityList.VOIDICINFUSION, null); if (infusionCap == null) return; IVadeMecumCapability vadeMecumCap = entity.getCapability(CapabilityList.VADEMECUM, null); float perc = Math.min(1F, (infusionCap.getInfusionPerc() + (vadeMecumCap != null && vadeMecumCap.hasPassive(IVadeMecumCapability.Passive.Flight) ? 0.5F : 0.0F))); GlStateManager.pushMatrix(); { GlStateManager.translate(0, 0.05F, 0); GlStateManager.scale(perc, perc, perc); body.render(f5); GlStateManager.pushMatrix(); GlStateManager.translate((1 - perc) + (perc > 0.75F ? ((1F - ((perc - 0.75F) / 0.25F)) * -0.15F) : -0.15F), 0, 0); armLeft.render(f5); GlStateManager.popMatrix(); GlStateManager.translate((perc - 1) + (perc > 0.75F ? ((1F - ((perc - 0.75F) / 0.25F)) * 0.15F) : 0.15F), 0, 0); armRight.render(f5); } GlStateManager.popMatrix(); } /** * This is a helper function from Tabula to set the rotation of model parts */ public void setRotateAngle(ModelRenderer modelRenderer, float x, float y, float z) { modelRenderer.rotateAngleX = x; modelRenderer.rotateAngleY = y; modelRenderer.rotateAngleZ = z; } @Override public void setRotationAngles(float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scaleFactor, Entity entityIn) { this.body.rotateAngleY = 0.0F; this.armRight.rotationPointZ = 0.0F; this.armRight.rotationPointX = -5.0F; this.armLeft.rotationPointZ = 0.0F; this.armLeft.rotationPointX = 5.0F; float f = 1.0F; this.armRight.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F + (float) Math.PI) * 2.0F * limbSwingAmount * 0.5F / f; this.armLeft.rotateAngleX = MathHelper.cos(limbSwing * 0.6662F) * 2.0F * limbSwingAmount * 0.5F / f; this.armRight.rotateAngleZ = 0.0F; this.armLeft.rotateAngleZ = 0.0F; if (this.isRiding) { this.armRight.rotateAngleX += -((float) Math.PI / 5F); this.armLeft.rotateAngleX += -((float) Math.PI / 5F); } this.armRight.rotateAngleY = 0.0F; this.armRight.rotateAngleZ = 0.0F; switch (this.leftArmPose) { case EMPTY: this.armLeft.rotateAngleY = 0.0F; break; case BLOCK: this.armLeft.rotateAngleX = this.armLeft.rotateAngleX * 0.5F - 0.9424779F; this.armLeft.rotateAngleY = 0.5235988F; break; case ITEM: this.armLeft.rotateAngleX = this.armLeft.rotateAngleX * 0.5F - ((float) Math.PI / 10F); this.armLeft.rotateAngleY = 0.0F; default: break; } switch (this.rightArmPose) { case EMPTY: this.armRight.rotateAngleY = 0.0F; break; case BLOCK: this.armRight.rotateAngleX = this.armRight.rotateAngleX * 0.5F - 0.9424779F; this.armRight.rotateAngleY = -0.5235988F; break; case ITEM: this.armRight.rotateAngleX = this.armRight.rotateAngleX * 0.5F - ((float) Math.PI / 10F); this.armRight.rotateAngleY = 0.0F; default: break; } if (this.swingProgress > 0.0F) { EnumHandSide enumhandside = this.getMainHand(entityIn); ModelRenderer modelrenderer = this.getArmForSide(enumhandside); this.getArmForSide(enumhandside.opposite()); float f1 = this.swingProgress; this.body.rotateAngleY = MathHelper.sin(MathHelper.sqrt(f1) * ((float) Math.PI * 2F)) * 0.2F; if (enumhandside == EnumHandSide.LEFT) { this.body.rotateAngleY *= -1.0F; } this.armRight.rotationPointZ = MathHelper.sin(this.body.rotateAngleY) * 5.0F; this.armRight.rotationPointX = -MathHelper.cos(this.body.rotateAngleY) * 5.0F; this.armLeft.rotationPointZ = -MathHelper.sin(this.body.rotateAngleY) * 5.0F; this.armLeft.rotationPointX = MathHelper.cos(this.body.rotateAngleY) * 5.0F; this.armRight.rotateAngleY += this.body.rotateAngleY; this.armLeft.rotateAngleY += this.body.rotateAngleY; this.armLeft.rotateAngleX += this.body.rotateAngleY; f1 = 1.0F - this.swingProgress; f1 = f1 * f1; f1 = f1 * f1; f1 = 1.0F - f1; float f2 = MathHelper.sin(f1 * (float) Math.PI); float f3 = MathHelper.sin(this.swingProgress * (float) Math.PI) * -(this.bipedHead.rotateAngleX - 0.7F) * 0.75F; modelrenderer.rotateAngleX = (float) ((double) modelrenderer.rotateAngleX - ((double) f2 * 1.2D + (double) f3)); modelrenderer.rotateAngleY += this.body.rotateAngleY * 2.0F; modelrenderer.rotateAngleZ += MathHelper.sin(this.swingProgress * (float) Math.PI) * -0.4F; } if (this.isSneak) { this.body.rotateAngleX = 0.5F; this.armRight.rotateAngleX += 0.4F; this.armLeft.rotateAngleX += 0.4F; this.bipedHead.rotationPointY = 1.0F; } else { this.body.rotateAngleX = 0.0F; this.bipedHead.rotationPointY = 0.0F; } this.armRight.rotateAngleZ += MathHelper.cos(ageInTicks * 0.09F) * 0.05F + 0.05F; this.armLeft.rotateAngleZ -= MathHelper.cos(ageInTicks * 0.09F) * 0.05F + 0.05F; this.armRight.rotateAngleX += MathHelper.sin(ageInTicks * 0.067F) * 0.05F; this.armLeft.rotateAngleX -= MathHelper.sin(ageInTicks * 0.067F) * 0.05F; if (this.rightArmPose == ModelBiped.ArmPose.BOW_AND_ARROW) { this.armRight.rotateAngleY = -0.1F + this.bipedHead.rotateAngleY; this.armLeft.rotateAngleY = 0.1F + this.bipedHead.rotateAngleY + 0.4F; this.armRight.rotateAngleX = -((float) Math.PI / 2F) + this.bipedHead.rotateAngleX; this.armLeft.rotateAngleX = -((float) Math.PI / 2F) + this.bipedHead.rotateAngleX; } else if (this.leftArmPose == ModelBiped.ArmPose.BOW_AND_ARROW) { this.armRight.rotateAngleY = -0.1F + this.bipedHead.rotateAngleY - 0.4F; this.armLeft.rotateAngleY = 0.1F + this.bipedHead.rotateAngleY; this.armRight.rotateAngleX = -((float) Math.PI / 2F) + this.bipedHead.rotateAngleX; this.armLeft.rotateAngleX = -((float) Math.PI / 2F) + this.bipedHead.rotateAngleX; } } @Override public void setModelAttributes(ModelBase model) { super.setModelAttributes(model); if (model instanceof ModelBiped) { ModelBiped modelbiped = (ModelBiped) model; this.leftArmPose = modelbiped.leftArmPose; this.rightArmPose = modelbiped.rightArmPose; this.isSneak = modelbiped.isSneak; } } public void postRenderArm(float scale, EnumHandSide side) { this.getArmForSide(side).postRender(scale); } protected ModelRenderer getArmForSide(EnumHandSide side) { return side == EnumHandSide.LEFT ? this.armLeft : this.armRight; } protected EnumHandSide getMainHand(Entity entityIn) { return entityIn instanceof EntityLivingBase ? ((EntityLivingBase) entityIn).getPrimaryHand() : EnumHandSide.RIGHT; } @Override public void setLivingAnimations(EntityLivingBase entitylivingbaseIn, float limbSwing, float limbSwingAmount, float partialTickTime) { super.setLivingAnimations(entitylivingbaseIn, limbSwing, limbSwingAmount, partialTickTime); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.caseditor.ide; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import org.apache.uima.cas.CAS; import org.apache.uima.cas.TypeSystem; import org.apache.uima.caseditor.CasEditorPlugin; import org.apache.uima.caseditor.core.model.DefaultColors; import org.apache.uima.caseditor.core.model.dotcorpus.DotCorpus; import org.apache.uima.caseditor.core.model.dotcorpus.DotCorpusSerializer; import org.apache.uima.caseditor.editor.AnnotationStyle; import org.apache.uima.caseditor.editor.CasDocumentProvider; import org.apache.uima.caseditor.editor.DocumentUimaImpl; import org.apache.uima.caseditor.editor.ICasDocument; import org.apache.uima.caseditor.editor.ICasEditor; import org.apache.uima.caseditor.ide.searchstrategy.ITypeSystemSearchStrategy; import org.apache.uima.caseditor.ide.searchstrategy.TypeSystemSearchStrategyFactory; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.IResourceChangeListener; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceDeltaVisitor; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.QualifiedName; import org.eclipse.core.runtime.Status; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.preference.PreferenceStore; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Label; import org.eclipse.ui.part.FileEditorInput; public class DefaultCasDocumentProvider extends org.apache.uima.caseditor.editor.CasDocumentProvider { /** * Listens for resource events: If the input file for the editor is removed the editor will be * closed and if the input file is modified, then the CAS needs to be updated and all views needs * to be notified. */ private class ModifyElementListener implements IResourceChangeListener { private FileEditorInput fileInput; public ModifyElementListener(FileEditorInput fileInput) { this.fileInput = fileInput; } @Override public void resourceChanged(IResourceChangeEvent event) { IResourceDelta delta = event.getDelta(); try { IResourceDeltaVisitor visitor = new IResourceDeltaVisitor() { @Override public boolean visit(IResourceDelta delta) throws CoreException { if (delta.getFlags() != IResourceDelta.MARKERS && delta.getResource().getType() == IResource.FILE) { IResource resource = delta.getResource(); if (resource.equals(fileInput.getFile())) { if (delta.getKind() == IResourceDelta.REMOVED) { handleElementDeleted(fileInput); } else if (delta.getKind() == IResourceDelta.CHANGED) { if (isFileChangeTrackingEnabled) handleElementChanged(fileInput); } } } return true; } }; delta.accept(visitor); } catch (CoreException e) { CasEditorPlugin.log(e); } } } private static class FileElementInfo extends ElementInfo { private ModifyElementListener deleteListener; FileElementInfo(ElementInfo info) { super(info.element); } } private class SaveSessionPreferencesTrigger implements IPropertyChangeListener { private Object element; SaveSessionPreferencesTrigger(Object element) { this.element = element; } @Override public void propertyChange(PropertyChangeEvent event) { IResource tsFile = ResourcesPlugin.getWorkspace().getRoot() .findMember((getTypesystemId(element))); PreferenceStore prefStore = (PreferenceStore) getSessionPreferenceStore(element); ByteArrayOutputStream prefBytes = new ByteArrayOutputStream(); try { prefStore.save(prefBytes, ""); } catch (IOException e) { CasEditorIdePlugin.log(e); } try { tsFile.setPersistentProperty(new QualifiedName("", CAS_EDITOR_SESSION_PROPERTIES), new String(prefBytes.toByteArray(), StandardCharsets.UTF_8)); } catch (CoreException e) { CasEditorIdePlugin.log(e); } } } private static final String CAS_EDITOR_SESSION_PROPERTIES = "CAS_EDITOR_SESSION_PROPERTIES"; /** * This map resolved an opened document to its associated style object id. * * The tracking is done in the provider because the document element itself does not has any link * to the style object. */ private Map<String, String> documentToTypeSystemMap = new HashMap<>(); /** * This map stores temporarily the type system that should be used to open the next document. This * functionality is separated from documentToTypeSystemMap since the preference for using the * previously selected type system can be deactivated. The inlined file choose, for example, uses * this field to remember the chosen type system. */ private Map<String, String> typeSystemForNextDocumentOnly = new HashMap<>(); private Map<String, IPreferenceStore> sessionPreferenceStores = new HashMap<>(); /** * This map resolves a type system to a style. It is used to cache type system preference instance * while the editor is open. */ private Map<String, PreferenceStore> typeSystemPreferences = new HashMap<>(); private boolean isFileChangeTrackingEnabled = true; // UIMA-2245 Remove this method together with the migration code below one day private String getStyleFileForTypeSystem(String typeSystemFile) { int lastSlashIndex = typeSystemFile.lastIndexOf("/"); String styleId = typeSystemFile.substring(0, lastSlashIndex + 1); styleId = styleId + ".style-" + typeSystemFile.substring(lastSlashIndex + 1); return styleId; } private String getPreferenceFileForTypeSystem(String typeSystemFile) { int lastSlashIndex = typeSystemFile.lastIndexOf("/"); String styleId = typeSystemFile.substring(0, lastSlashIndex + 1); styleId = styleId + ".pref-" + typeSystemFile.substring(lastSlashIndex + 1); return styleId; } private Collection<AnnotationStyle> getConfiguredAnnotationStyles(IPreferenceStore store, TypeSystem types) { // TODO: for each annotation type, try to retrieve annotation styles return new HashSet<>(); } @Override protected ICasDocument createDocument(Object element) throws CoreException { if (element instanceof FileEditorInput) { FileEditorInput fileInput = (FileEditorInput) element; IFile casFile = fileInput.getFile(); // Try to find a type system for the CAS file // TODO: Change to only use full path IFile typeSystemFile = null; // First check if a type system is already known or was // set by the editor for this specific CAS. // apply that type system only if the setting is active in the preferences String typeSystemFileString = null; String document = casFile.getFullPath().toPortableString(); if (typeSystemForNextDocumentOnly.get(document) != null) { // the type system was already set internally. Use this one and forget the information. typeSystemFileString = typeSystemForNextDocumentOnly.get(document); typeSystemForNextDocumentOnly.put(document, null); } IPreferenceStore prefStore = CasEditorIdePlugin.getDefault().getPreferenceStore(); boolean useLastTypesystem = prefStore .getBoolean(CasEditorIdePreferenceConstants.CAS_EDITOR_REMEMBER_TYPESYSTEM); if (typeSystemFileString == null && useLastTypesystem) { typeSystemFileString = documentToTypeSystemMap.get(document); } if (typeSystemFileString != null) typeSystemFile = ResourcesPlugin.getWorkspace().getRoot() .getFile(new Path(typeSystemFileString)); // use search strategies for finding the type system if (typeSystemFile == null || !typeSystemFile.exists()) { Map<Integer, ITypeSystemSearchStrategy> searchStrategies = TypeSystemSearchStrategyFactory .instance().getSearchStrategies(); // TODO sort again for user preference settings Collection<ITypeSystemSearchStrategy> values = searchStrategies.values(); for (ITypeSystemSearchStrategy eachStrategy : values) { IFile findTypeSystem = eachStrategy.findTypeSystem(casFile); if (findTypeSystem != null && findTypeSystem.exists()) { typeSystemFile = findTypeSystem; break; } } } // If non was found get it from project if (typeSystemFile == null) typeSystemFile = TypeSystemLocationPropertyPage.getTypeSystemLocation(casFile.getProject()); if (typeSystemFile != null && typeSystemFile.exists()) { if (!typeSystemFile.isSynchronized(IResource.DEPTH_ZERO)) { typeSystemFile.refreshLocal(IResource.DEPTH_ZERO, new NullProgressMonitor()); } // TODO: Update this comment! // Try to load a style file for the type system // Should be named: ts file name, prefixed with .style- // If it does not exist, create it when it is changed // Creating it after the default is changed means that // colors could change completely when the a type is // added or removed to the type system IFile prefFile = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path( getPreferenceFileForTypeSystem(typeSystemFile.getFullPath().toPortableString()))); PreferenceStore tsPrefStore = typeSystemPreferences .get(prefFile.getFullPath().toPortableString()); // If lookup for store failed ... if (tsPrefStore == null) { if (prefFile.exists()) { tsPrefStore = new PreferenceStore(prefFile.getName()); try { tsPrefStore.load(prefFile.getContents()); // TODO: Close stream! } catch (IOException e) { e.printStackTrace(); // TODO: Handle this correctly! } } else { // UIMA-2245 // DotCorpus to Eclipse PreferenceStore migration code. // If there is DotCorpus style file and not yet a preference store file // the settings from the DotCorpus style file should be written into a preference store // file. IFile styleFile = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path( getStyleFileForTypeSystem(typeSystemFile.getFullPath().toPortableString()))); if (styleFile.exists()) { InputStream styleFileIn = null; DotCorpus dotCorpus; try { styleFileIn = styleFile.getContents(); dotCorpus = DotCorpusSerializer.parseDotCorpus(styleFileIn); } finally { if (styleFileIn != null) try { styleFileIn.close(); } catch (IOException e) { CasEditorPlugin.log(e); } } if (dotCorpus != null) { tsPrefStore = new PreferenceStore(prefFile.getName()); for (AnnotationStyle style : dotCorpus.getAnnotationStyles()) { AnnotationStyle.putAnnotatationStyleToStore(tsPrefStore, style); } for (String shownType : dotCorpus.getShownTypes()) { tsPrefStore.putValue(shownType + ".isShown", "true"); } ByteArrayOutputStream prefOut = new ByteArrayOutputStream(); try { tsPrefStore.save(prefOut, ""); } catch (IOException e) { // Should never happen! CasEditorPlugin.log(e); } // TODO: Do we need to handle exceptions here? prefFile.create(new ByteArrayInputStream(prefOut.toByteArray()), IFile.FORCE, null); } } } // No preference defined, lets use defaults if (tsPrefStore == null) { tsPrefStore = new PreferenceStore(prefFile.getName()); CAS cas = DocumentUimaImpl.getVirginCAS(typeSystemFile); TypeSystem ts = cas.getTypeSystem(); Collection<AnnotationStyle> defaultStyles = getConfiguredAnnotationStyles(tsPrefStore, ts); Collection<AnnotationStyle> newStyles = DefaultColors.assignColors(ts, defaultStyles); // TODO: Settings defaults must be moved to the AnnotationEditor for (AnnotationStyle style : newStyles) { AnnotationStyle.putAnnotatationStyleToStore(tsPrefStore, style); } } typeSystemPreferences.put(prefFile.getFullPath().toPortableString(), tsPrefStore); } documentToTypeSystemMap.put(document, typeSystemFile.getFullPath().toPortableString()); IPreferenceStore store = sessionPreferenceStores.get(getTypesystemId(element)); if (store == null) { PreferenceStore newStore = new PreferenceStore(); sessionPreferenceStores.put(getTypesystemId(element), newStore); newStore.addPropertyChangeListener(new SaveSessionPreferencesTrigger(element)); String sessionPreferenceString = typeSystemFile .getPersistentProperty(new QualifiedName("", CAS_EDITOR_SESSION_PROPERTIES)); if (sessionPreferenceString != null) { try { newStore.load(new ByteArrayInputStream( sessionPreferenceString.getBytes(StandardCharsets.UTF_8))); } catch (IOException e) { CasEditorPlugin.log(e); } } } // TODO: // Preferences are bound to the type system // Changed in one place, then it should change in all places CAS cas = DocumentUimaImpl.getVirginCAS(typeSystemFile); ICasDocument doc = new DocumentUimaImpl(cas, casFile, typeSystemFile.getFullPath().makeRelative().toString()); elementErrorStatus.remove(element); return doc; } else { String message; if (typeSystemFile != null) { message = "Cannot find type system!\nPlease place a valid type system in this path:\n" + typeSystemFile.getFullPath().toString(); } else message = "Type system is not set, please choose a type system to open the CAS."; IStatus status = new Status(IStatus.ERROR, "org.apache.uima.dev", CasDocumentProvider.TYPE_SYSTEM_NOT_AVAILABLE_STATUS_CODE, message, null); elementErrorStatus.put(element, status); } } return null; } @Override protected void doSaveDocument(IProgressMonitor monitor, Object element, ICasDocument document, boolean overwrite) throws CoreException { if (element instanceof FileEditorInput) { FileEditorInput fileInput = (FileEditorInput) element; IFile file = fileInput.getFile(); if (document instanceof DocumentUimaImpl) { DocumentUimaImpl documentImpl = (DocumentUimaImpl) document; ByteArrayOutputStream outStream = new ByteArrayOutputStream(40000); documentImpl.serialize(outStream); InputStream stream = new ByteArrayInputStream(outStream.toByteArray()); isFileChangeTrackingEnabled = false; try { file.setContents(stream, true, false, null); } finally { isFileChangeTrackingEnabled = true; } } } // tell everyone that the element changed and is not dirty any longer fireElementDirtyStateChanged(element, false); } private String getTypesystemId(Object element) { if (element instanceof FileEditorInput) { FileEditorInput editorInput = (FileEditorInput) element; return documentToTypeSystemMap.get(editorInput.getFile().getFullPath().toPortableString()); } return null; } @Override public void saveTypeSystemPreferenceStore(Object element) { String prefereceFileId = getPreferenceFileForTypeSystem(getTypesystemId(element)); PreferenceStore preferences = typeSystemPreferences.get(prefereceFileId); // serialize ... IFile preferenceFile = ResourcesPlugin.getWorkspace().getRoot() .getFile(Path.fromPortableString(prefereceFileId)); ByteArrayOutputStream preferenceBytes = new ByteArrayOutputStream(); try { preferences.save(preferenceBytes, ""); } catch (IOException e) { // will not fail, writing to memory CasEditorPlugin.log(e); } try { if (preferenceFile.exists()) { preferenceFile.setContents(new ByteArrayInputStream(preferenceBytes.toByteArray()), true, false, null); } else { preferenceFile.create(new ByteArrayInputStream(preferenceBytes.toByteArray()), true, null); } } catch (CoreException e) { // might fail if writing is not possible // for some reason CasEditorPlugin.log(e); } } @Override public IPreferenceStore getTypeSystemPreferenceStore(Object element) { String tsId = getTypesystemId(element); if (tsId != null) return typeSystemPreferences.get(getPreferenceFileForTypeSystem(tsId)); else return null; } @Override public IPreferenceStore getSessionPreferenceStore(Object element) { return sessionPreferenceStores.get(getTypesystemId(element)); } void setTypeSystem(String document, String typeSystem) { documentToTypeSystemMap.put(document, typeSystem); } void setTypeSystemForNextDocumentOnly(String document, String typeSystem) { typeSystemForNextDocumentOnly.put(document, typeSystem); } @Override public Composite createTypeSystemSelectorForm(final ICasEditor editor, Composite parent, IStatus status) { // Note: // If the editor is not active and the user clicks on the button // the editor gets activated and an exception is logged // on the second click the button is selected // How to fix the exception ?! // Only tested on OS X Snow Leopard Composite provideTypeSystemForm = new Composite(parent, SWT.NONE); provideTypeSystemForm.setLayout(new GridLayout(1, false)); Label infoLabel = new Label(provideTypeSystemForm, SWT.NONE); infoLabel.setText(status.getMessage()); Button retryButton = new Button(provideTypeSystemForm, SWT.NONE); retryButton.setText("Choose Type System ..."); retryButton.addSelectionListener(new SelectionListener() { @Override public void widgetSelected(SelectionEvent e) { // Open a dialog to let the user choose a type system IResource resource = WorkspaceResourceDialog.getWorkspaceResourceElement( Display.getCurrent().getActiveShell(), ResourcesPlugin.getWorkspace().getRoot(), "Select a Type System", "Please select a Type System:"); if (resource != null) { FileEditorInput editorInput = (FileEditorInput) editor.getEditorInput(); setTypeSystemForNextDocumentOnly(editorInput.getFile().getFullPath().toPortableString(), resource.getFullPath().toString()); // Now set the input again to open the editor with the // specified type system editor.reopenEditorWithNewTypeSystem(); } } @Override public void widgetDefaultSelected(SelectionEvent e) { throw new IllegalStateException("Never be called!"); } }); return provideTypeSystemForm; } @Override protected ElementInfo createElementInfo(Object element) { FileElementInfo info = new FileElementInfo(super.createElementInfo(element)); // Register listener to listens for deletion events, // if the file opened in this editor is deleted, the editor should be closed! info.deleteListener = new ModifyElementListener((FileEditorInput) element); ResourcesPlugin.getWorkspace().addResourceChangeListener(info.deleteListener, IResourceChangeEvent.POST_CHANGE); return info; } @Override protected void disposeElementInfo(Object element, ElementInfo info) { FileElementInfo fileInfo = (FileElementInfo) info; ResourcesPlugin.getWorkspace().removeResourceChangeListener(fileInfo.deleteListener); super.disposeElementInfo(element, info); } private void handleElementDeleted(Object element) { fireElementDeleted(element); } private void handleElementChanged(Object element) { fireElementChanged(element); } }
/* * Copyright 1997-2017 Optimatika (www.optimatika.se) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.algo.matrix.decomposition; import static org.algo.constant.PrimitiveMath.*; import java.math.BigDecimal; import org.algo.RecoverableCondition; import org.algo.access.Access2D; import org.algo.access.Structure2D; import org.algo.access.Access2D.Collectable; import org.algo.array.BasicArray; import org.algo.constant.PrimitiveMath; import org.algo.function.aggregator.AggregatorFunction; import org.algo.matrix.store.BigDenseStore; import org.algo.matrix.store.ComplexDenseStore; import org.algo.matrix.store.ElementsSupplier; import org.algo.matrix.store.MatrixStore; import org.algo.matrix.store.PhysicalStore; import org.algo.matrix.store.PrimitiveDenseStore; import org.algo.scalar.ComplexNumber; abstract class LUDecomposition<N extends Number> extends InPlaceDecomposition<N> implements LU<N> { static final class Big extends LUDecomposition<BigDecimal> { Big() { super(BigDenseStore.FACTORY); } } static final class Complex extends LUDecomposition<ComplexNumber> { Complex() { super(ComplexDenseStore.FACTORY); } } static final class Primitive extends LUDecomposition<Double> { Primitive() { super(PrimitiveDenseStore.FACTORY); } } private Pivot myPivot; protected LUDecomposition(final DecompositionStore.Factory<N, ? extends DecompositionStore<N>> aFactory) { super(aFactory); } public N calculateDeterminant(final Access2D<?> matrix) { this.decompose(this.wrap(matrix)); return this.getDeterminant(); } public boolean computeWithoutPivoting(final ElementsSupplier<N> matrix) { return this.compute(matrix, true); } public boolean decompose(final Access2D.Collectable<N, ? super PhysicalStore<N>> aStore) { return this.compute(aStore, false); } public N getDeterminant() { final AggregatorFunction<N> tmpAggrFunc = this.aggregator().product(); this.getInPlace().visitDiagonal(0, 0, tmpAggrFunc); if (myPivot.signum() == -1) { return tmpAggrFunc.toScalar().negate().getNumber(); } else { return tmpAggrFunc.getNumber(); } } @Override public MatrixStore<N> getInverse(final PhysicalStore<N> preallocated) { if (myPivot.isModified()) { preallocated.fillAll(this.scalar().zero().getNumber()); final int[] tmpPivotOrder = myPivot.getOrder(); final int tmpRowDim = this.getRowDim(); for (int i = 0; i < tmpRowDim; i++) { preallocated.set(i, tmpPivotOrder[i], PrimitiveMath.ONE); } } final DecompositionStore<N> tmpBody = this.getInPlace(); preallocated.substituteForwards(tmpBody, true, false, !myPivot.isModified()); preallocated.substituteBackwards(tmpBody, false, false, false); return preallocated; } public MatrixStore<N> getL() { //return new LowerTriangularStore<N>(this.getInPlace(), true); return this.getInPlace().logical().triangular(false, true).get(); } public int[] getPivotOrder() { return myPivot.getOrder(); } public int getRank() { int retVal = 0; final DecompositionStore<N> tmpInPlace = this.getInPlace(); final AggregatorFunction<N> tmpLargest = this.aggregator().largest(); tmpInPlace.visitDiagonal(0L, 0L, tmpLargest); final double tmpLargestValue = tmpLargest.doubleValue(); final int tmpMinDim = this.getMinDim(); for (int ij = 0; ij < tmpMinDim; ij++) { if (!tmpInPlace.isSmall(ij, ij, tmpLargestValue)) { retVal++; } } return retVal; } public final MatrixStore<N> getSolution(final Collectable<N, ? super PhysicalStore<N>> rhs) { return this.getSolution(rhs, this.preallocate(this.getInPlace(), rhs)); } /** * Solves [this][X] = [rhs] by first solving * * <pre> * [L][Y] = [rhs] * </pre> * * and then * * <pre> * [U][X] = [Y] * </pre> * * @param rhs The right hand side * @return [X] The solution will be written to "preallocated" and then returned. */ @Override public MatrixStore<N> getSolution(final Collectable<N, ? super PhysicalStore<N>> rhs, final PhysicalStore<N> preallocated) { //preallocated.fillMatching(new RowsStore<N>(new WrapperStore<>(preallocated.factory(), rhs), myPivot.getOrder())); preallocated.fillMatching(this.collect(rhs).logical().row(myPivot.getOrder()).get()); final DecompositionStore<N> tmpBody = this.getInPlace(); preallocated.substituteForwards(tmpBody, true, false, false); preallocated.substituteBackwards(tmpBody, false, false, false); return preallocated; } public MatrixStore<N> getU() { //return new UpperTriangularStore<N>(this.getInPlace(), false); return this.getInPlace().logical().triangular(true, false).get(); } public final MatrixStore<N> invert(final Access2D<?> original) throws RecoverableCondition { this.decompose(this.wrap(original)); if (this.isSolvable()) { return this.getInverse(); } else { throw RecoverableCondition.newMatrixNotInvertible(); } } public final MatrixStore<N> invert(final Access2D<?> original, final PhysicalStore<N> preallocated) throws RecoverableCondition { this.decompose(this.wrap(original)); if (this.isSolvable()) { return this.getInverse(preallocated); } else { throw RecoverableCondition.newMatrixNotInvertible(); } } public boolean isFullRank() { return this.isSolvable(); } public PhysicalStore<N> preallocate(final Structure2D template) { final long tmpCountRows = template.countRows(); return this.allocate(tmpCountRows, tmpCountRows); } public PhysicalStore<N> preallocate(final Structure2D templateBody, final Structure2D templateRHS) { return this.allocate(templateRHS.countRows(), templateRHS.countColumns()); } @Override public void reset() { super.reset(); myPivot = null; } public MatrixStore<N> solve(final Access2D<?> body, final Access2D<?> rhs) throws RecoverableCondition { this.decompose(this.wrap(body)); if (this.isSolvable()) { return this.getSolution(this.wrap(rhs)); } else { throw RecoverableCondition.newEquationSystemNotSolvable(); } } public MatrixStore<N> solve(final Access2D<?> body, final Access2D<?> rhs, final PhysicalStore<N> preallocated) throws RecoverableCondition { this.decompose(this.wrap(body)); if (this.isSolvable()) { return this.getSolution(this.wrap(rhs), preallocated); } else { throw RecoverableCondition.newEquationSystemNotSolvable(); } } private final boolean compute(final Access2D.Collectable<N, ? super PhysicalStore<N>> aStore, final boolean assumeNoPivotingRequired) { this.reset(); final DecompositionStore<N> tmpInPlace = this.setInPlace(aStore); final int tmpRowDim = this.getRowDim(); this.getColDim(); final int tmpMinDim = this.getMinDim(); myPivot = new Pivot(tmpRowDim); final BasicArray<N> tmpMultipliers = this.makeArray(tmpRowDim); // Main loop - along the diagonal for (int ij = 0; ij < tmpMinDim; ij++) { if (!assumeNoPivotingRequired) { // Find next pivot row final int tmpPivotRow = (int) tmpInPlace.indexOfLargestInColumn(ij, ij); // Pivot? if (tmpPivotRow != ij) { tmpInPlace.exchangeRows(tmpPivotRow, ij); myPivot.change(tmpPivotRow, ij); } } // Do the calculations... // if (!tmpInPlace.isZero(ij, ij)) { // if (tmpInPlace.doubleValue(ij, ij) != PrimitiveMath.ZERO) { if (Double.compare(tmpInPlace.doubleValue(ij, ij), PrimitiveMath.ZERO) != 0) { // Calculate multipliers and copy to local column // Current column, below the diagonal tmpInPlace.divideAndCopyColumn(ij, ij, tmpMultipliers); // Apply transformations to everything below and to the right of the pivot element tmpInPlace.applyLU(ij, tmpMultipliers); } else { tmpInPlace.set(ij, ij, ZERO); } } return this.computed(true); } @Override protected boolean checkSolvability() { boolean retVal = this.getRowDim() == this.getColDim(); final DecompositionStore<N> tmpStore = this.getInPlace(); final int tmpMinDim = (int) Math.min(tmpStore.countRows(), tmpStore.countColumns()); for (int ij = 0; retVal && (ij < tmpMinDim); ij++) { // retVal &= tmpStore.doubleValue(ij, ij) != PrimitiveMath.ZERO; retVal &= Double.compare(tmpStore.doubleValue(ij, ij), PrimitiveMath.ZERO) != 0; } return retVal; } int[] getReducedPivots() { final int[] retVal = new int[this.getRank()]; final int[] tmpFullPivots = this.getPivotOrder(); final DecompositionStore<N> tmpInPlace = this.getInPlace(); int tmpRedInd = 0; for (int ij = 0; ij < tmpFullPivots.length; ij++) { if (!tmpInPlace.isSmall(ij, ij, PrimitiveMath.ONE)) { retVal[tmpRedInd++] = tmpFullPivots[ij]; } } return retVal; } }
/*_########################################################################## _## _## Copyright (C) 2013 Kaito Yamada _## _########################################################################## */ package com.github.kaitoy.sneo.giane.action; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import org.apache.struts2.convention.annotation.Action; import org.apache.struts2.convention.annotation.InterceptorRef; import org.apache.struts2.convention.annotation.ParentPackage; import org.apache.struts2.convention.annotation.Result; import com.github.kaitoy.sneo.giane.model.FixedIpV6Route; import com.github.kaitoy.sneo.giane.model.dao.FixedIpV6RouteDao; import com.github.kaitoy.sneo.giane.model.dto.IpV6RouteDto; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionSupport; @ParentPackage("giane-default") @InterceptorRef("gianeDefaultStack") public class FixedIpV6RouteGridAction extends ActionSupport { /** * */ private static final long serialVersionUID = -639915623498952756L; private static final Map<String, Comparator<IpV6RouteDto>> comparators = new HashMap<String, Comparator<IpV6RouteDto>>(); static { comparators.put( "id", new Comparator<IpV6RouteDto>() { public int compare(IpV6RouteDto o1, IpV6RouteDto o2) { return o2.getId().compareTo(o1.getId()); } } ); comparators.put( "networkDestination", new Comparator<IpV6RouteDto>() { public int compare(IpV6RouteDto o1, IpV6RouteDto o2) { byte[] o1Addr; byte[] o2Addr; try { o1Addr = InetAddress .getByName(o1.getNetworkDestination()).getAddress(); o2Addr = InetAddress .getByName(o2.getNetworkDestination()).getAddress(); } catch (UnknownHostException e) { throw new AssertionError("Never get here"); } if (o1Addr.length != o2Addr.length) { return o2Addr.length - o1Addr.length; } for (int i = 0; i < o1Addr.length; i++) { if (o1Addr[i] != o2Addr[i]) { return (0xFF & o2Addr[i]) - (0xFF & o1Addr[i]); } } return 0; } } ); comparators.put( "prefixLength", new Comparator<IpV6RouteDto>() { public int compare(IpV6RouteDto o1, IpV6RouteDto o2) { return o2.getPrefixLength().compareTo(o1.getPrefixLength()); } } ); comparators.put( "gateway", new Comparator<IpV6RouteDto>() { public int compare(IpV6RouteDto o1, IpV6RouteDto o2) { byte[] o1Addr; byte[] o2Addr; try { o1Addr = InetAddress.getByName(o1.getGateway()).getAddress(); o2Addr = InetAddress.getByName(o2.getGateway()).getAddress(); } catch (UnknownHostException e) { throw new AssertionError("Never get here"); } if (o1Addr.length != o2Addr.length) { return o2Addr.length - o1Addr.length; } for (int i = 0; i < o1Addr.length; i++) { if (o1Addr[i] != o2Addr[i]) { return (0xFF & o2Addr[i]) - (0xFF & o1Addr[i]); } } return 0; } } ); comparators.put( "metric", new Comparator<IpV6RouteDto>() { public int compare(IpV6RouteDto o1, IpV6RouteDto o2) { return o2.getMetric().compareTo(o1.getMetric()); } } ); } private FixedIpV6RouteDao fixedIpV6RouteDao; // result List private List<IpV6RouteDto> gridModel; // get how many rows we want to have into the grid - rowNum attribute in the grid private Integer rows = 0; // Get the requested page. By default grid sets this to 1. private Integer page = 0; // sorting order - asc or desc private String sord; // get index row - i.e. user click to sort. private String sidx; // Search Field private String searchField; // The Search String private String searchString; // The Search Operation ['eq','ne','lt','le','gt','ge','bw','bn','in','ni','ew','en','cn','nc'] private String searchOper; private boolean loadonce = false; // Total Pages private Integer total = 0; // All Record private Integer records = 0; // for DI public void setFixedIpV6RouteDao(FixedIpV6RouteDao fixedIpV6RouteDao) { this.fixedIpV6RouteDao = fixedIpV6RouteDao; } @Override @Action( results = { @Result(name = "success", type = "json") } ) public String execute() { CriteriaBuilder cb = fixedIpV6RouteDao.getCriteriaBuilder(); CriteriaQuery<FixedIpV6Route> cq = cb.createQuery(FixedIpV6Route.class); Root<FixedIpV6Route> r = cq.from(FixedIpV6Route.class); cq.select(r); Map<String, Object> params = ActionContext.getContext().getParameters(); Integer node_id = Integer.valueOf(((String[])params.get("node_id"))[0]); List<Predicate> predicates = new ArrayList<Predicate>(); predicates.add(cb.equal(r.get("node"), node_id)); if (searchField != null) { if ( searchField.equals("id") || searchField.equals("metric") ) { Integer searchValue = Integer.valueOf(searchString); if (searchOper.equals("eq")) { predicates.add(cb.equal(r.get(searchField), searchValue)); } else if (searchOper.equals("ne")) { predicates.add(cb.notEqual(r.get(searchField), searchValue)); } else if (searchOper.equals("lt")) { predicates.add(cb.lt(r.get(searchField).as(Integer.class), searchValue)); } else if (searchOper.equals("gt")) { predicates.add(cb.gt(r.get(searchField).as(Integer.class), searchValue)); } } else if ( searchField.equals("networkDestination") || searchField.equals("netmask") || searchField.equals("gateway") ) { if (searchOper.equals("eq")) { predicates.add(cb.equal(r.get(searchField), searchString)); } else if (searchOper.equals("ne")) { predicates.add(cb.notEqual(r.get(searchField), searchString)); } else if (searchOper.equals("bw")) { predicates.add(cb.like(r.get(searchField).as(String.class), searchString + "%")); } else if (searchOper.equals("ew")) { predicates.add(cb.like(r.get(searchField).as(String.class), "%" + searchString)); } else if (searchOper.equals("cn")) { predicates.add(cb.like(r.get(searchField).as(String.class), "%" + searchString + "%")); } } } cq.where(cb.and(predicates.toArray(new Predicate[0]))); List<FixedIpV6Route> models = fixedIpV6RouteDao.findByCriteria(cq); gridModel = new ArrayList<IpV6RouteDto>(); for (FixedIpV6Route entry: models) { gridModel.add(new IpV6RouteDto(entry)); } records = gridModel.size(); if (sord != null && sord.length() != 0 && sidx != null && sidx.length() != 0) { Collections.sort(gridModel, comparators.get(sidx)); if (sord.equalsIgnoreCase("desc")) { Collections.reverse(gridModel); } } if (!loadonce){ int to = (rows * page); int from = to - rows; if (to > records) { to = records; } gridModel = gridModel.subList(from, to); } // calculate the total pages for the query total = (int)Math.ceil((double)records / (double)rows); return "success"; } public String getJSON() { return execute(); } public List<IpV6RouteDto> getGridModel() { return gridModel; } public void setGridModel(List<IpV6RouteDto> gridModel) { this.gridModel = gridModel; } public Integer getRows() { return rows; } public void setRows(Integer rows) { this.rows = rows; } public Integer getPage() { return page; } public void setPage(Integer page) { this.page = page; } public String getSord() { return sord; } public void setSord(String sord) { this.sord = sord; } public String getSidx() { return sidx; } public void setSidx(String sidx) { this.sidx = sidx; } public String getSearchField() { return searchField; } public void setSearchField(String searchField) { this.searchField = searchField; } public String getSearchString() { return searchString; } public void setSearchString(String searchString) { this.searchString = searchString; } public String getSearchOper() { return searchOper; } public void setSearchOper(String searchOper) { this.searchOper = searchOper; } public boolean isLoadonce() { return loadonce; } public void setLoadonce(boolean loadonce) { this.loadonce = loadonce; } public Integer getTotal() { return total; } public void setTotal(Integer total) { this.total = total; } public Integer getRecords() { return records; } public void setRecords(Integer records) { this.records = records; } }
/* * ClosedBox.java * * Created on January 27, 2003, 11:05 AM */ package xal.tools.math.r3; import xal.tools.math.ClosedInterval; import xal.tools.math.MathException; /** * Represents a Cartesian box in <b>R</b><sup>3</sup>. * * @author Christopher K. Allen * @since Jan 27, 2003 */ public class ClosedBox implements java.io.Serializable { /* * Global Constants */ /** serialization version identifier */ private static final long serialVersionUID = 1L; /* * Local Attributes */ /** first dimension extent of domain */ public ClosedInterval I1; /** second dimension extent of domain */ public ClosedInterval I2; /** third dimension extent of domain */ public ClosedInterval I3; /** * Default constructor - creates an empty DomainR3 object to be initialized by the * user */ public ClosedBox() { } /** * Initializing constructor - creates a new instance of DomainR3 according to the * given parameters. * * @param I1 interval of definition in x dimension * @param I2 interval of definition in y dimension * @param I3 interval of definition in z dimension */ public ClosedBox(ClosedInterval I1, ClosedInterval I2, ClosedInterval I3) { this.I1 = I1; this.I2 = I2; this.I3 = I3; } /** * Initializing constructor - creates a new instance of DomainR3 according to the * given parameters. * * Not that the box is defined by the intervals * [xmin,xmax] interval of definition in x dimension * [ymin,ymax] interval of definition in y dimension * [zmin,zmax] interval of definition in z dimension * * @param xmin x dimension minimum value * @param xmax x dimension maximum value * @param ymin y dimension minimum value * @param ymax y dimension maximum value * @param zmin z dimension minimum value * @param zmax z dimension maximum value * * @throws MathException one or more axis intervals are malformed * (i.e., &alpha;<sub><i>max</i></sub> &lt; &alpha;<sub><i>min</i></sub>, * where &alpha; &isin; {<i>x,y,z</i>}) * */ public ClosedBox(double xmin, double xmax, double ymin, double ymax, double zmin, double zmax) throws MathException { I1 = new ClosedInterval(xmin, xmax); I2 = new ClosedInterval(ymin, ymax); I3 = new ClosedInterval(zmin, zmax); } /* * Grid Properties */ /** * Get first dimension extent */ public ClosedInterval get1() { return I1; }; /** * Get second dimension extent */ public ClosedInterval get2() { return I2; }; /** * Get second dimension extent */ public ClosedInterval get3() { return I3; }; /** * Get the x dimension */ public ClosedInterval getXDimension() { return I1; }; /** * Get the y dimension */ public ClosedInterval getYDimension() { return I2; }; /** * Get the z dimension */ public ClosedInterval getZDimension() { return I3; }; /** * Get the minimum vertex */ public R3 getVertexMin() { return new R3(I1.getMin(), I2.getMin(), I3.getMin()); } /** * Get the maximum vertex */ public R3 getVertexMax() { return new R3(I1.getMax(), I2.getMax(), I3.getMax()); } /** * Determine whether point pt is an element of the domain. * * @return true if pt is in domain */ public boolean membership(R3 pt) { if (!I1.membership(pt.getx())) return false; if (!I2.membership(pt.gety())) return false; if (!I3.membership(pt.getz())) return false; return true; } /** * Determine whether or not point pt is member of the boundary of this set. * * @return true if pt is a boundary element */ public boolean boundary(R3 pt) { if (I1.isBoundary(pt.getx())) return true; if (I2.isBoundary(pt.gety())) return true; if (I3.isBoundary(pt.getz())) return true; return false; }; /** * Compute the centroid of the domain */ public R3 centroid() { return new R3(I1.midpoint(), I2.midpoint(), I3.midpoint()); } /** * Computes the diameter of the domain. */ public double diameter() { return this.dimensions().norm2(); } /** * Compute the volume of the domain. */ public double volume() { return I1.measure() * I2.measure() * I3.measure(); } /** * Compute the dimensions of the domain * * @return (lx,ly,lz) */ public R3 dimensions() { return new R3(I1.measure(), I2.measure(), I3.measure()); } /* * Testing and Debugging */ /** * Print out contents on an output stream * * @param os output stream receiving content dump */ public void print(java.io.PrintWriter os) { I1.print(os); os.print("x"); I2.print(os); os.print("x"); I3.print(os); } /** * Print out contents on an output stream, terminate in newline character * * @param os output stream receiving content dump */ public void println(java.io.PrintWriter os) { I1.print(os); os.print("x"); I2.print(os); os.print("x"); I3.println(os); } }
import java.io.*; import java.text.*; import java.util.Scanner; import org.omg.CORBA.INTERNAL; /* * The file is printed like this: * 0 1 2 4 => 0*x^0 + 1*x^1 + 2*x^2 + 4*x^3 * Coefficients are stored in an array so that array[0] is the coefficient for * term x^0, array[3] is the coefficient for term x^3 * */ class InputOutput { Functions Functions = new Functions(); private Polynomials createPolynomialFromFile(String line) { String[] coeff = line.split(" "); int[] coeffOfPolynomial = new int[coeff.length]; int copyOfLength = coeff.length; for (int i = 0; i < coeff.length; i++) { coeffOfPolynomial[copyOfLength-1] = Integer.parseInt(coeff[i]); copyOfLength--; } Polynomials pol1 = new Polynomials(coeffOfPolynomial); return pol1; } /* * This method assumes that the exponent of the first term is the biggest * Could have performed a getMax from the resulted array to get the biggest exponent instead */ private Polynomials createPolFromFileMathForm(String line){ String copyOfLine = line; copyOfLine = copyOfLine.replace("x", ""); copyOfLine = copyOfLine.replace("^", ""); copyOfLine = copyOfLine.replace("+", ""); copyOfLine = copyOfLine.replace("*", ""); String[] coeff = copyOfLine.split(" "); int[] coeffOfPolynomial = new int[Integer.parseInt(coeff[1])+1]; // The exponent of the first term is the biggest for(int i=1; i<=coeff.length; i+=2){ coeffOfPolynomial[Integer.parseInt(coeff[i])] = Integer.parseInt(coeff[i-1]); } Polynomials pol1 = new Polynomials(coeffOfPolynomial); return pol1; } private void printFile(PrintWriter output, Polynomials result) { for(int i: result.getAllCoefficients()){ output.print(i + " "); } output.printf("%n"); } private void printFile(PrintWriter output, int number) { output.print(number); output.printf("%n"); } private void printFile(PrintWriter output, double number) { output.print(number); output.printf("%n"); } /** * * An utility that prints an array in math form like: 3*x^2 + 4*x; */ private void printInMathForm(PrintWriter output, Polynomials pol1){ for(int i=0; i<pol1.getLength(); i++){ String sign = new String(); if(Math.signum(pol1.getCoefficients(i)) != -1) sign ="+"; else sign = ""; output.printf("%s%dx^%d ", sign, pol1.getCoefficients(i), i); } output.printf("%n"); } /** * * Read and Write on the file */ void fileProcessing(Polynomials pol1, Polynomials pol2) { File inputFile = new File("input.txt"); File outputFile = new File("output.txt"); int linesRead = 0; try { Scanner input = new Scanner(inputFile); PrintWriter output = new PrintWriter(outputFile); while (input.hasNextLine()) { String line = input.nextLine(); linesRead++; // Read the coefficients in the next 2 lines if (linesRead == 1) { pol1 = createPolynomialFromFile(line); } else if (linesRead == 2) { pol2 = createPolynomialFromFile(line); } else { int number; Polynomials result = new Polynomials(Math.max(pol1.getLength(), pol2.getLength())); int evaluation; if (line.contains("ADD")) { result = Functions.addition(pol1, pol2); printFile(output, result); } if (line.contains("SUBTRACT")) { result = Functions.subtraction(pol1, pol2); printFile(output, result); } if (line.contains("MULTIPLY")) { result = Functions.multiplication(pol1, pol2); printFile(output, result); } if (line.contains("MUL_SCAL")) { number = Integer.parseInt(line.replaceAll("[\\D]", "")); result = Functions.scalarMultiplication(pol1, number); printFile(output, result); } if (line.contains("EVAL")) { number = Integer.parseInt(line.replaceAll("[\\D]", "")); evaluation = (int) Functions.evaluate(pol1, number); printFile(output, evaluation); } if (line.contains("DIVISION")) { Polynomials[] division = Functions.division(pol1, pol2); output.println("Division: "); output.printf("Quotient:%n"); printFile(output, division[0]); output.printf("%nRemainder: %n"); printFile(output, division[1]); output.printf("%n%n"); } if (line.contains("ROOT")) { DecimalFormat rounding = new DecimalFormat("###.##"); double root; root = Functions.rootApprox(pol2); //Rounding the result root = Double.parseDouble(rounding.format(root)); printFile(output, root); } if(line.contains("MathForm")){ readMathForm(pol1, pol2); printFile(output, pol2); } } } input.close(); output.close(); } catch (FileNotFoundException ex) { System.out.printf("Error: %s\n", ex); } } /** * * Reads the polynomial from math form */ void readMathForm(Polynomials pol1, Polynomials pol2){ File inputFile = new File("input.txt"); int linesRead = 0; try { Scanner input = new Scanner(inputFile); while (input.hasNextLine()) { String line = input.nextLine(); linesRead++; // Read the coefficients in the next 2 lines if (linesRead == 1) { pol1 = createPolFromFileMathForm(line); } else if (linesRead == 2) { pol2 = createPolFromFileMathForm(line); } } input.close(); } catch (FileNotFoundException ex) { System.out.printf("Error: %s\n", ex); } } } class Functions { /** * Reverse the sign of an array */ private int[] coeffReversedSign(int [] coefficients){ int[] newCoeff = new int[coefficients.length]; for(int i=0; i<coefficients.length; i++){ newCoeff[i] = -coefficients[i]; } return newCoeff; } /** * Adds two different sized arrays * The result is an array with the length equal to the max(array1.length, array2.length) * Is considered that the smaller array have 0 after array2[length-1] * */ private int[] addDiffSizedArray(int[] array1, int[] array2){ int[] result = new int[Math.max(array1.length, array2.length)]; int i=0; if(array1.length > array2.length){ for(i=0; i<array2.length; i++){ result[i] = array1[i] + array2[i]; } for(i=array2.length; i<array1.length; i++){ result[i]=array1[i]; } } else if(array1.length < array2.length){ for(i=0; i<array1.length; i++){ result[i] = array1[i] + array2[i]; } for(i=array1.length; i<array1.length; i++){ result[i]=array2[i]; } } else{ for(i=0;i<array1.length; i++){ result[i] = array1[i] + array2[i]; } } return result; } Polynomials addition(Polynomials p1, Polynomials p2) { Polynomials result = new Polynomials(addDiffSizedArray(p1.getAllCoefficients(), p2.getAllCoefficients())); return result; } Polynomials subtraction(Polynomials p1, Polynomials p2) { Polynomials result = new Polynomials(addDiffSizedArray(p1.getAllCoefficients(), coeffReversedSign(p2.getAllCoefficients()))); return result; } Polynomials multiplication(Polynomials p1, Polynomials p2) { Polynomials result = new Polynomials(p1.getLength() + p2.getLength() - 1); Polynomials aux = new Polynomials(p1.getLength() + p2.getLength()-1); for (int i = 0; i < p1.getLength(); i++) { for (int j = 0; j < p2.getLength(); j++) { aux.setCoefficients(i + j, p1.getCoefficients(i) * p2.getCoefficients(j)); } result = addition(result, aux); aux.setCoefficients(i, 0); } return result; } Polynomials scalarMultiplication(Polynomials p1, int scalar) { Polynomials result = new Polynomials(p1.getLength()); for (int i = 0; i < p1.getLength(); i++) { result.setCoefficients(i, scalar * p1.getCoefficients(i)); } return result; } double evaluate(Polynomials p1, double value) { double result = 0; if(value == 0) return p1.getCoefficients(0); for (int i = 0; i < p1.getLength(); i++) { result += p1.getCoefficients(i) * Math.pow(value, i); } return result; } /** * * Division of polynomials using long division * Returns an array of polynomials * The first one is the quotient * The second one is the remainder */ Polynomials[] division(Polynomials p1, Polynomials p2) { Polynomials[] division = new Polynomials[2]; division[0] = new Polynomials(p1.getLength() - p2.getLength()+1); // Quotient division[1] = new Polynomials(p2.getLength()-1); // Remainder Polynomials divider = new Polynomials(); divider.setToPol(p2); Polynomials copyOfDivident = new Polynomials(); copyOfDivident.setToPol(p1); Polynomials auxPol = new Polynomials(new int[] { 0, 1 }); int copyOfLength1 = p1.getLength(); int copyOfLength2 = p2.getLength(); if(p1.getLength()>0){ while(copyOfDivident.getLength()>=p2.getLength()){ for (int i = 0; i < copyOfLength1 - copyOfLength2; i++) { divider = multiplication(divider, auxPol); } division[0].setCoefficients(copyOfDivident.getLength() - p2.getLength(), copyOfDivident.getCoefficients(copyOfDivident.getLength()-1) / divider.getCoefficients(divider.getLength()-1)); divider = scalarMultiplication(divider, division[0].getCoefficients(copyOfDivident.getLength() - p2.getLength())); copyOfDivident = subtraction(copyOfDivident, divider); copyOfDivident.setLength(copyOfLength1-1); divider.setToPol(p2); copyOfLength1--; } } division[1] = subtraction(p1, multiplication(division[0], divider)); return division; } /** * Root Approximation using bisection * */ double rootApprox(Polynomials p1) { double root=0; // The initial interval double a = Integer.MIN_VALUE; double b = Integer.MAX_VALUE; //Set a and b // if(!(a<b && (evaluate(p1, a)) * evaluate(p1,b) <= 0)){ // a++; // b--; // } if(evaluate(p1,a)>0){ a++; } if(evaluate(p1,b)<0){ b--; } double midpoint=0; int i = 0; // i<=42 for a given tolerance of 0.001 // Calculated given the biggest interval [b;a] while (i <= 42) { i++; midpoint = (a + b) / 2; if (evaluate(p1, midpoint) == 0){ return midpoint; } else if (Math.signum(evaluate(p1, midpoint)) == Math.signum(evaluate(p1, a))) { a = midpoint; } else { b = midpoint; } if(i==42){ root = midpoint; } } return root; } } class Polynomials { private int[] coefficients; private int length; Polynomials(int[] data) { this.coefficients = data; this.length = data.length; } Polynomials(int length) { coefficients = new int[length]; this.length = length; } Polynomials() { } int getLength() { return this.length; } void setLength(int length) { this.length = length; } int[] getAllCoefficients() { return this.coefficients; } void setAllCoefficients(int[] coefficients) { this.coefficients = coefficients; } int getCoefficients(int index) { if(index<this.length) { return this.coefficients[index]; } else{ return 0; } } void setCoefficients(int index, int data) { this.coefficients[index] = data; } void setToPol(Polynomials pol2){ this.coefficients = pol2.getAllCoefficients(); this.length = pol2.getLength(); } } public class PolynomialsOperations { public static void main(String[] args) { InputOutput start = new InputOutput(); Polynomials pol1 = new Polynomials(); Polynomials pol2 = new Polynomials(); start.fileProcessing(pol1, pol2); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.index; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; /** * Performs the index operation. * * Allows for the following settings: * <ul> * <li><b>autoCreateIndex</b>: When set to <tt>true</tt>, will automatically create an index if one does not exists. * Defaults to <tt>true</tt>. * <li><b>allowIdGeneration</b>: If the id is set not, should it be generated. Defaults to <tt>true</tt>. * </ul> */ public class TransportIndexAction extends TransportReplicationAction<IndexRequest, IndexRequest, IndexResponse> { private final AutoCreateIndex autoCreateIndex; private final boolean allowIdGeneration; private final TransportCreateIndexAction createIndexAction; private final ClusterService clusterService; @Inject public TransportIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction, TransportCreateIndexAction createIndexAction, MappingUpdatedAction mappingUpdatedAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex) { super(settings, IndexAction.NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, actionFilters, indexNameExpressionResolver, IndexRequest::new, IndexRequest::new, ThreadPool.Names.INDEX); this.createIndexAction = createIndexAction; this.autoCreateIndex = autoCreateIndex; this.allowIdGeneration = settings.getAsBoolean("action.allow_id_generation", true); this.clusterService = clusterService; } @Override protected void doExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API ClusterState state = clusterService.state(); if (autoCreateIndex.shouldAutoCreate(request.index(), state)) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(request); createIndexRequest.index(request.index()); createIndexRequest.mapping(request.type()); createIndexRequest.cause("auto(index api)"); createIndexRequest.masterNodeTimeout(request.timeout()); createIndexAction.execute(createIndexRequest, new ActionListener<CreateIndexResponse>() { @Override public void onResponse(CreateIndexResponse result) { innerExecute(request, listener); } @Override public void onFailure(Throwable e) { if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) { // we have the index, do it try { innerExecute(request, listener); } catch (Throwable e1) { listener.onFailure(e1); } } else { listener.onFailure(e); } } }); } else { innerExecute(request, listener); } } @Override protected void resolveRequest(MetaData metaData, String concreteIndex, IndexRequest request) { MappingMetaData mappingMd = null; if (metaData.hasIndex(concreteIndex)) { mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type()); } request.process(metaData, mappingMd, allowIdGeneration, concreteIndex); ShardId shardId = clusterService.operationRouting().shardId(clusterService.state(), concreteIndex, request.id(), request.routing()); request.setShardId(shardId); } private void innerExecute(final IndexRequest request, final ActionListener<IndexResponse> listener) { super.doExecute(request, listener); } @Override protected IndexResponse newResponseInstance() { return new IndexResponse(); } @Override protected Tuple<IndexResponse, IndexRequest> shardOperationOnPrimary(MetaData metaData, IndexRequest request) throws Throwable { // validate, if routing is required, that we got routing IndexMetaData indexMetaData = metaData.index(request.shardId().getIndex()); MappingMetaData mappingMd = indexMetaData.mappingOrDefault(request.type()); if (mappingMd != null && mappingMd.routing().required()) { if (request.routing() == null) { throw new RoutingMissingException(request.shardId().getIndex(), request.type(), request.id()); } } IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = indexService.getShard(request.shardId().id()); final WriteResult<IndexResponse> result = executeIndexRequestOnPrimary(request, indexShard, mappingUpdatedAction); final IndexResponse response = result.response; final Translog.Location location = result.location; processAfterWrite(request.refresh(), indexShard, location); return new Tuple<>(response, request); } @Override protected void shardOperationOnReplica(IndexRequest request) { final ShardId shardId = request.shardId(); IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); final Engine.Index operation = executeIndexRequestOnReplica(request, indexShard); processAfterWrite(request.refresh(), indexShard, operation.getTranslogLocation()); } /** * Execute the given {@link IndexRequest} on a replica shard, throwing a * {@link RetryOnReplicaException} if the operation needs to be re-tried. */ public static Engine.Index executeIndexRequestOnReplica(IndexRequest request, IndexShard indexShard) { final ShardId shardId = indexShard.shardId(); SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.REPLICA, request.source()).index(shardId.getIndex()).type(request.type()).id(request.id()) .routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl()); final Engine.Index operation = indexShard.prepareIndexOnReplica(sourceToParse, request.version(), request.versionType()); Mapping update = operation.parsedDoc().dynamicMappingsUpdate(); if (update != null) { throw new RetryOnReplicaException(shardId, "Mappings are not available on the replica yet, triggered update: " + update); } indexShard.index(operation); return operation; } /** Utility method to prepare an index operation on primary shards */ public static Engine.Index prepareIndexOperationOnPrimary(IndexRequest request, IndexShard indexShard) { SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, request.source()).index(request.index()).type(request.type()).id(request.id()) .routing(request.routing()).parent(request.parent()).timestamp(request.timestamp()).ttl(request.ttl()); return indexShard.prepareIndexOnPrimary(sourceToParse, request.version(), request.versionType()); } /** * Execute the given {@link IndexRequest} on a primary shard, throwing a * {@link RetryOnPrimaryException} if the operation needs to be re-tried. */ public static WriteResult<IndexResponse> executeIndexRequestOnPrimary(IndexRequest request, IndexShard indexShard, MappingUpdatedAction mappingUpdatedAction) throws Throwable { Engine.Index operation = prepareIndexOperationOnPrimary(request, indexShard); Mapping update = operation.parsedDoc().dynamicMappingsUpdate(); final ShardId shardId = indexShard.shardId(); if (update != null) { final String indexName = shardId.getIndex(); mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, request.type(), update); operation = prepareIndexOperationOnPrimary(request, indexShard); update = operation.parsedDoc().dynamicMappingsUpdate(); if (update != null) { throw new RetryOnPrimaryException(shardId, "Dynamic mappings are not available on the node that holds the primary yet"); } } final boolean created = indexShard.index(operation); // update the version on request so it will happen on the replicas final long version = operation.version(); request.version(version); request.versionType(request.versionType().versionTypeForReplicationAndRecovery()); assert request.versionType().validateVersionForWrites(request.version()); return new WriteResult<>(new IndexResponse(shardId, request.type(), request.id(), request.version(), created), operation.getTranslogLocation()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.dfs; import junit.framework.TestCase; import junit.framework.AssertionFailedError; import org.apache.commons.logging.*; import org.apache.hadoop.fs.FSInputStream; import org.apache.hadoop.fs.FSOutputStream; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.UTF8; import org.apache.hadoop.conf.Configuration; import java.io.File; import java.io.FilenameFilter; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.ListIterator; import java.util.Random; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; /** * Test DFS. * ClusterTestDFS is a JUnit test for DFS using "pseudo multiprocessing" (or more strictly, pseudo distributed) meaning all daemons run in one process and sockets are used to communicate between daemons. The test permutes * various block sizes, number of files, file sizes, and number of * datanodes. After creating 1 or more files and filling them with random * data, one datanode is shutdown, and then the files are verfified. * Next, all the random test files are deleted and we test for leakage * (non-deletion) by directly checking the real directories corresponding * to the datanodes still running. * <p> * Usage notes: TEST_PERMUTATION_MAX can be adjusted to perform more or * less testing of permutations. The ceiling of useful permutation is * TEST_PERMUTATION_MAX_CEILING. * <p> * DFSClient emits many messages that can be ignored like: * "Failed to connect to *:7000:java.net.ConnectException: Connection refused: connect" * because a datanode is forced to close during testing. * <p> * Warnings about "Zero targets found" can be ignored (these are naggingly * emitted even though it is not possible to achieve the desired replication * level with the number of active datanodes.) * <p> * Possible Extensions: * <p>Bring a datanode down and restart it to verify reconnection to namenode. * <p>Simulate running out of disk space on one datanode only. * <p>Bring the namenode down and restart it to verify that datanodes reconnect. * <p> * <p>For a another approach to filesystem testing, see the high level * (HadoopFS level) test {@link org.apache.hadoop.fs.TestFileSystem}. * @author Paul Baclace */ public class ClusterTestDFS extends TestCase implements FSConstants { private static final Log LOG = LogFactory.getLog("org.apache.hadoop.dfs.ClusterTestDFS"); private static Configuration conf = new Configuration(); private static int BUFFER_SIZE = conf.getInt("io.file.buffer.size", 4096); private static int testCycleNumber = 0; /** * all DFS test files go under this base directory */ private static String baseDirSpecified; /** * base dir as File */ private static File baseDir; /** DFS block sizes to permute over in multiple test cycles * (array length should be prime). */ private static final int[] BLOCK_SIZES = {100000, 4096}; /** DFS file sizes to permute over in multiple test cycles * (array length should be prime). */ private static final int[] FILE_SIZES = {100000, 100001, 4095, 4096, 4097, 1000000, 1000001}; /** DFS file counts to permute over in multiple test cycles * (array length should be prime). */ private static final int[] FILE_COUNTS = {1, 10, 100}; /** Number of useful permutations or test cycles. * (The 2 factor represents the alternating 2 or 3 number of datanodes * started.) */ private static final int TEST_PERMUTATION_MAX_CEILING = BLOCK_SIZES.length * FILE_SIZES.length * FILE_COUNTS.length * 2; /** Number of permutations of DFS test parameters to perform. * If this is greater than ceiling TEST_PERMUTATION_MAX_CEILING, then the * ceiling value is used. */ private static final int TEST_PERMUTATION_MAX = 3; private Constructor randomDataGeneratorCtor = null; static { baseDirSpecified = System.getProperty("test.dfs.data", "/tmp/dfs_test"); baseDir = new File(baseDirSpecified); } protected void setUp() throws Exception { super.setUp(); conf.setBoolean("test.dfs.same.host.targets.allowed", true); } /** * Remove old files from temp area used by this test case and be sure * base temp directory can be created. */ protected void prepareTempFileSpace() { if (baseDir.exists()) { try { // start from a blank slate FileUtil.fullyDelete(baseDir); } catch (Exception ignored) { } } baseDir.mkdirs(); if (!baseDir.isDirectory()) { throw new RuntimeException("Value of root directory property test.dfs.data for dfs test is not a directory: " + baseDirSpecified); } } /** * Pseudo Distributed FS Test. * Test DFS by running all the necessary daemons in one process. * Test various block sizes, number of files, disk space consumption, * and leakage. * * @throws Exception */ public void testFsPseudoDistributed() throws Exception { while (testCycleNumber < TEST_PERMUTATION_MAX && testCycleNumber < TEST_PERMUTATION_MAX_CEILING) { int blockSize = BLOCK_SIZES[testCycleNumber % BLOCK_SIZES.length]; int numFiles = FILE_COUNTS[testCycleNumber % FILE_COUNTS.length]; int fileSize = FILE_SIZES[testCycleNumber % FILE_SIZES.length]; prepareTempFileSpace(); testFsPseudoDistributed(fileSize, numFiles, blockSize, (testCycleNumber % 2) + 2); } } /** * Pseudo Distributed FS Testing. * Do one test cycle with given parameters. * * @param nBytes number of bytes to write to each file. * @param numFiles number of files to create. * @param blockSize block size to use for this test cycle. * @param initialDNcount number of datanodes to create * @throws Exception */ public void testFsPseudoDistributed(long nBytes, int numFiles, int blockSize, int initialDNcount) throws Exception { long startTime = System.currentTimeMillis(); int bufferSize = Math.min(BUFFER_SIZE, blockSize); boolean checkDataDirsEmpty = false; int iDatanodeClosed = 0; Random randomDataGenerator = makeRandomDataGenerator(); final int currentTestCycleNumber = testCycleNumber; msg("using randomDataGenerator=" + randomDataGenerator.getClass().getName()); // // modify config for test // // set given config param to override other config settings conf.setInt("test.dfs.block_size", blockSize); // verify that config changed assertTrue(blockSize == conf.getInt("test.dfs.block_size", 2)); // 2 is an intentional obviously-wrong block size // downsize for testing (just to save resources) conf.setInt("dfs.namenode.handler.count", 3); if (false) { // use MersenneTwister, if present conf.set("hadoop.random.class", "org.apache.hadoop.util.MersenneTwister"); } conf.setLong("dfs.blockreport.intervalMsec", 50*1000L); conf.setLong("dfs.datanode.startupMsec", 15*1000L); String nameFSDir = baseDirSpecified + "/name"; msg("----Start Test Cycle=" + currentTestCycleNumber + " test.dfs.block_size=" + blockSize + " nBytes=" + nBytes + " numFiles=" + numFiles + " initialDNcount=" + initialDNcount); // // start a NameNode int nameNodePort = 9000 + testCycleNumber++; // ToDo: settable base port String nameNodeSocketAddr = "localhost:" + nameNodePort; NameNode nameNodeDaemon = new NameNode(new File[] { new File(nameFSDir) }, "localhost", nameNodePort, conf); DFSClient dfsClient = null; try { // // start some DataNodes // ArrayList listOfDataNodeDaemons = new ArrayList(); conf.set("fs.default.name", nameNodeSocketAddr); for (int i = 0; i < initialDNcount; i++) { // uniquely config real fs path for data storage for this datanode String dataDirs[] = new String[1]; dataDirs[0] = baseDirSpecified + "/datanode" + i; conf.set("dfs.data.dir", dataDirs[0]); DataNode dn = DataNode.makeInstance(dataDirs, conf); if (dn != null) { listOfDataNodeDaemons.add(dn); (new Thread(dn, "DataNode" + i + ": " + dataDirs[0])).start(); } } try { assertTrue("insufficient datanodes for test to continue", (listOfDataNodeDaemons.size() >= 2)); // // wait for datanodes to report in awaitQuiescence(); // act as if namenode is a remote process dfsClient = new DFSClient(new InetSocketAddress("localhost", nameNodePort), conf); // // write nBytes of data using randomDataGenerator to numFiles // ArrayList testfilesList = new ArrayList(); byte[] buffer = new byte[bufferSize]; UTF8 testFileName = null; for (int iFileNumber = 0; iFileNumber < numFiles; iFileNumber++) { testFileName = new UTF8("/f" + iFileNumber); testfilesList.add(testFileName); FSOutputStream nos = dfsClient.create(testFileName, false); try { for (long nBytesWritten = 0L; nBytesWritten < nBytes; nBytesWritten += buffer.length) { if ((nBytesWritten + buffer.length) > nBytes) { // calculate byte count needed to exactly hit nBytes in length // to keep randomDataGenerator in sync during the verify step int pb = (int) (nBytes - nBytesWritten); byte[] bufferPartial = new byte[pb]; randomDataGenerator.nextBytes(bufferPartial); nos.write(bufferPartial); } else { randomDataGenerator.nextBytes(buffer); nos.write(buffer); } } } finally { nos.flush(); nos.close(); } } // // No need to wait for blocks to be replicated because replication // is supposed to be complete when the file is closed. // // // take one datanode down iDatanodeClosed = currentTestCycleNumber % listOfDataNodeDaemons.size(); DataNode dn = (DataNode) listOfDataNodeDaemons.get(iDatanodeClosed); msg("shutdown datanode daemon " + iDatanodeClosed + " dn=" + dn.data); try { dn.shutdown(); } catch (Exception e) { msg("ignoring datanode shutdown exception=" + e); } // // verify data against a "rewound" randomDataGenerator // that all of the data is intact long lastLong = randomDataGenerator.nextLong(); randomDataGenerator = makeRandomDataGenerator(); // restart (make new) PRNG ListIterator li = testfilesList.listIterator(); while (li.hasNext()) { testFileName = (UTF8) li.next(); FSInputStream nis = dfsClient.open(testFileName); byte[] bufferGolden = new byte[bufferSize]; int m = 42; try { while (m != -1) { m = nis.read(buffer); if (m == buffer.length) { randomDataGenerator.nextBytes(bufferGolden); assertBytesEqual(buffer, bufferGolden, buffer.length); } else if (m > 0) { byte[] bufferGoldenPartial = new byte[m]; randomDataGenerator.nextBytes(bufferGoldenPartial); assertBytesEqual(buffer, bufferGoldenPartial, bufferGoldenPartial.length); } } } finally { nis.close(); } } // verify last randomDataGenerator rand val to ensure last file length was checked long lastLongAgain = randomDataGenerator.nextLong(); assertEquals(lastLong, lastLongAgain); msg("Finished validating all file contents"); // // now delete all the created files msg("Delete all random test files under DFS via remaining datanodes"); li = testfilesList.listIterator(); while (li.hasNext()) { testFileName = (UTF8) li.next(); assertTrue(dfsClient.delete(testFileName)); } // // wait for delete to be propagated // (unlike writing files, delete is lazy) msg("Test thread sleeping while datanodes propagate delete..."); awaitQuiescence(); msg("Test thread awakens to verify file contents"); // // check that the datanode's block directory is empty // (except for datanode that had forced shutdown) checkDataDirsEmpty = true; // do it during finally clause } catch (AssertionFailedError afe) { throw afe; } catch (Throwable t) { msg("Unexpected exception_b: " + t); t.printStackTrace(); } finally { // // shut down datanode daemons (this takes advantage of being same-process) msg("begin shutdown of all datanode daemons for test cycle " + currentTestCycleNumber); for (int i = 0; i < listOfDataNodeDaemons.size(); i++) { DataNode dataNode = (DataNode) listOfDataNodeDaemons.get(i); if (i != iDatanodeClosed) { try { if (checkDataDirsEmpty) { assertNoBlocks(dataNode); } dataNode.shutdown(); } catch (Exception e) { msg("ignoring exception during (all) datanode shutdown, e=" + e); } } } } msg("finished shutdown of all datanode daemons for test cycle " + currentTestCycleNumber); if (dfsClient != null) { try { msg("close down subthreads of DFSClient"); dfsClient.close(); } catch (Exception ignored) { } msg("finished close down of DFSClient"); } } catch (AssertionFailedError afe) { throw afe; } catch (Throwable t) { msg("Unexpected exception_a: " + t); t.printStackTrace(); } finally { // shut down namenode daemon (this takes advantage of being same-process) msg("begin shutdown of namenode daemon for test cycle " + currentTestCycleNumber); try { nameNodeDaemon.stop(); } catch (Exception e) { msg("ignoring namenode shutdown exception=" + e); } msg("finished shutdown of namenode daemon for test cycle " + currentTestCycleNumber); } msg("test cycle " + currentTestCycleNumber + " elapsed time=" + (System.currentTimeMillis() - startTime) / 1000. + "sec"); msg("threads still running (look for stragglers): "); msg(summarizeThreadGroup()); } private void assertNoBlocks(DataNode dn) { Block[] blocks = dn.data.getBlockReport(); // if this fails, the delete did not propagate because either // awaitQuiescence() returned before the disk images were removed // or a real failure was detected. assertTrue(" data dir not empty: " + dn.data.volumes, blocks.length==0); } /** * Make a data generator. * Allows optional use of high quality PRNG by setting property * hadoop.random.class to the full class path of a subclass of * java.util.Random such as "...util.MersenneTwister". * The property test.dfs.random.seed can supply a seed for reproducible * testing (a default is set here if property is not set.) */ private Random makeRandomDataGenerator() { long seed = conf.getLong("test.dfs.random.seed", 0xB437EF); try { if (randomDataGeneratorCtor == null) { // lazy init String rndDataGenClassname = conf.get("hadoop.random.class", "java.util.Random"); Class clazz = Class.forName(rndDataGenClassname); randomDataGeneratorCtor = clazz.getConstructor(new Class[]{Long.TYPE}); } if (randomDataGeneratorCtor != null) { Object arg[] = {new Long(seed)}; return (Random) randomDataGeneratorCtor.newInstance(arg); } } catch (ClassNotFoundException absorb) { } catch (NoSuchMethodException absorb) { } catch (SecurityException absorb) { } catch (InstantiationException absorb) { } catch (IllegalAccessException absorb) { } catch (IllegalArgumentException absorb) { } catch (InvocationTargetException absorb) { } // last resort return new java.util.Random(seed); } /** Wait for the DFS datanodes to become quiescent. * The initial implementation is to sleep for some fixed amount of time, * but a better implementation would be to really detect when distributed * operations are completed. * @throws InterruptedException */ private void awaitQuiescence() throws InterruptedException { // ToDo: Need observer pattern, not static sleep // Doug suggested that the block report interval could be made shorter // and then observing that would be a good way to know when an operation // was complete (quiescence detect). sleepAtLeast(60000); } private void assertBytesEqual(byte[] buffer, byte[] bufferGolden, int len) { for (int i = 0; i < len; i++) { assertEquals(buffer[i], bufferGolden[i]); } } private void msg(String s) { //System.out.println(s); LOG.info(s); } public static void sleepAtLeast(int tmsec) { long t0 = System.currentTimeMillis(); long t1 = t0; long tslept = t1 - t0; while (tmsec > tslept) { try { long tsleep = tmsec - tslept; Thread.sleep(tsleep); t1 = System.currentTimeMillis(); } catch (InterruptedException ie) { t1 = System.currentTimeMillis(); } tslept = t1 - t0; } } public static String summarizeThreadGroup() { int n = 10; int k = 0; Thread[] tarray = null; StringBuffer sb = new StringBuffer(500); do { n = n * 10; tarray = new Thread[n]; k = Thread.enumerate(tarray); } while (k == n); // while array is too small... for (int i = 0; i < k; i++) { Thread thread = tarray[i]; sb.append(thread.toString()); sb.append("\n"); } return sb.toString(); } public static void main(String[] args) throws Exception { String usage = "Usage: ClusterTestDFS (no args)"; if (args.length != 0) { System.err.println(usage); System.exit(-1); } String[] testargs = {"org.apache.hadoop.dfs.ClusterTestDFS"}; junit.textui.TestRunner.main(testargs); } }
package io.dropwizard.logging; import ch.qos.logback.classic.AsyncAppender; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.core.Appender; import ch.qos.logback.core.AsyncAppenderBase; import ch.qos.logback.core.Context; import ch.qos.logback.core.pattern.PatternLayoutBase; import ch.qos.logback.core.spi.DeferredProcessingAware; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import io.dropwizard.logging.async.AsyncAppenderFactory; import io.dropwizard.logging.filter.FilterFactory; import io.dropwizard.logging.layout.LayoutFactory; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import java.util.List; import java.util.TimeZone; import static com.google.common.base.Strings.nullToEmpty; /** * A base implementation of {@link AppenderFactory}. * <p/> * <b>Configuration Parameters:</b> * <table> * <tr> * <td>Name</td> * <td>Default</td> * <td>Description</td> * </tr> * <tr> * <td>{@code threshold}</td> * <td>ALL</td> * <td>The minimum event level the appender will handle.</td> * </tr> * <tr> * <td>{@code logFormat}</td> * <td>(none)</td> * <td>An appender-specific log format.</td> * </tr> * <tr> * <td>{@code timeZone}</td> * <td>{@code UTC}</td> * <td> * The time zone to which event timestamps will be converted. * Ignored if logFormat is supplied. * </td> * </tr> * <tr> * <td>{@code queueSize}</td> * <td>{@link AsyncAppenderBase}</td> * <td>The maximum capacity of the blocking queue.</td> * </tr> * <tr> * <td>{@code includeCallerData}</td> * <td>{@link AsyncAppenderBase}</td> * <td> * Whether to include caller data, required for line numbers. * Beware, is considered expensive. * </td> * </tr> * <tr> * <td>{@code discardingThreshold}</td> * <td>{@link AsyncAppenderBase}</td> * <td> * By default, when the blocking queue has 20% capacity remaining, * it will drop events of level TRACE, DEBUG and INFO, keeping only * events of level WARN and ERROR. To keep all events, set discardingThreshold to 0. * </td> * </tr> * <tr> * <td>{@code filterFactories}</td> * <td>(none)</td> * <td> * A list of {@link FilterFactory filters} to apply to the appender, in order, * after the {@code threshold}. * </td> * </tr> * </table> */ public abstract class AbstractAppenderFactory<E extends DeferredProcessingAware> implements AppenderFactory<E> { @NotNull protected Level threshold = Level.ALL; protected String logFormat; @NotNull protected TimeZone timeZone = TimeZone.getTimeZone("UTC"); @Min(1) @Max(Integer.MAX_VALUE) private int queueSize = AsyncAppenderBase.DEFAULT_QUEUE_SIZE; private int discardingThreshold = -1; private boolean includeCallerData = false; private ImmutableList<FilterFactory<E>> filterFactories = ImmutableList.of(); private boolean neverBlock = false; @JsonProperty public int getQueueSize() { return queueSize; } @JsonProperty public void setQueueSize(int queueSize) { this.queueSize = queueSize; } @JsonProperty public int getDiscardingThreshold() { return discardingThreshold; } @JsonProperty public void setDiscardingThreshold(int discardingThreshold) { this.discardingThreshold = discardingThreshold; } @JsonProperty public String getThreshold() { return threshold.toString(); } @JsonProperty public void setThreshold(String threshold) { this.threshold = DefaultLoggingFactory.toLevel(threshold); } @JsonProperty public String getLogFormat() { return logFormat; } @JsonProperty public void setLogFormat(String logFormat) { this.logFormat = logFormat; } @JsonProperty public TimeZone getTimeZone() { return timeZone; } @JsonProperty public void setTimeZone(String zoneId) { this.timeZone = nullToEmpty(zoneId).equalsIgnoreCase("system") ? TimeZone.getDefault() : TimeZone.getTimeZone(zoneId); } @JsonProperty public void setTimeZone(TimeZone timeZone) { this.timeZone = timeZone; } @JsonProperty public boolean isIncludeCallerData() { return includeCallerData; } @JsonProperty public void setIncludeCallerData(boolean includeCallerData) { this.includeCallerData = includeCallerData; } @JsonProperty public ImmutableList<FilterFactory<E>> getFilterFactories() { return filterFactories; } @JsonProperty public void setFilterFactories(List<FilterFactory<E>> appenders) { this.filterFactories = ImmutableList.copyOf(appenders); } @JsonProperty public void setNeverBlock(boolean neverBlock) { this.neverBlock = neverBlock; } protected Appender<E> wrapAsync(Appender<E> appender, AsyncAppenderFactory<E> asyncAppenderFactory) { return wrapAsync(appender, asyncAppenderFactory, appender.getContext()); } protected Appender<E> wrapAsync(Appender<E> appender, AsyncAppenderFactory<E> asyncAppenderFactory, Context context) { final AsyncAppenderBase<E> asyncAppender = asyncAppenderFactory.build(); if (asyncAppender instanceof AsyncAppender) { ((AsyncAppender) asyncAppender).setIncludeCallerData(includeCallerData); } asyncAppender.setQueueSize(queueSize); asyncAppender.setDiscardingThreshold(discardingThreshold); asyncAppender.setContext(context); asyncAppender.setName("async-" + appender.getName()); asyncAppender.addAppender(appender); asyncAppender.setNeverBlock(neverBlock); asyncAppender.start(); return asyncAppender; } protected PatternLayoutBase<E> buildLayout(LoggerContext context, LayoutFactory<E> layoutFactory) { final PatternLayoutBase<E> formatter = layoutFactory.build(context, timeZone); if (!Strings.isNullOrEmpty(logFormat)) { formatter.setPattern(logFormat); } formatter.start(); return formatter; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.todo; import com.intellij.find.FindModel; import com.intellij.find.impl.FindInProjectUtil; import com.intellij.icons.AllIcons; import com.intellij.ide.*; import com.intellij.ide.actions.NextOccurenceToolbarAction; import com.intellij.ide.actions.PreviousOccurenceToolbarAction; import com.intellij.ide.todo.nodes.TodoFileNode; import com.intellij.ide.todo.nodes.TodoItemNode; import com.intellij.ide.todo.nodes.TodoTreeHelper; import com.intellij.ide.util.PsiNavigationSupport; import com.intellij.ide.util.treeView.NodeDescriptor; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.SimpleToolWindowPanel; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.impl.VisibilityWatcher; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.ui.*; import com.intellij.ui.content.Content; import com.intellij.ui.tree.AsyncTreeModel; import com.intellij.ui.tree.StructureTreeModel; import com.intellij.ui.treeStructure.Tree; import com.intellij.usageView.UsageInfo; import com.intellij.usages.impl.UsagePreviewPanel; import com.intellij.util.Alarm; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.PlatformIcons; import com.intellij.util.ui.tree.TreeModelAdapter; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeModelEvent; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.util.ArrayList; import java.util.List; import java.util.Set; public abstract class TodoPanel extends SimpleToolWindowPanel implements OccurenceNavigator, DataProvider, Disposable { protected static final Logger LOG = Logger.getInstance(TodoPanel.class); protected Project myProject; private final TodoPanelSettings mySettings; private final boolean myCurrentFileMode; private final Content myContent; private final Tree myTree; private final TreeExpander myTreeExpander; private final MyOccurenceNavigator myOccurenceNavigator; protected final TodoTreeBuilder myTodoTreeBuilder; private MyVisibilityWatcher myVisibilityWatcher; private UsagePreviewPanel myUsagePreviewPanel; private MyAutoScrollToSourceHandler myAutoScrollToSourceHandler; public static final DataKey<TodoPanel> TODO_PANEL_DATA_KEY = DataKey.create("TodoPanel"); /** * @param currentFileMode if {@code true} then view doesn't have "Group By Packages" and "Flatten Packages" * actions. */ TodoPanel(Project project, TodoPanelSettings settings, boolean currentFileMode, Content content) { super(false, true); myProject = project; mySettings = settings; myCurrentFileMode = currentFileMode; myContent = content; DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode()); myTree = new Tree(model); myTreeExpander = new DefaultTreeExpander(myTree); myOccurenceNavigator = new MyOccurenceNavigator(); initUI(); myTodoTreeBuilder = setupTreeStructure(); updateTodoFilter(); myTodoTreeBuilder.setShowPackages(mySettings.arePackagesShown); myTodoTreeBuilder.setShowModules(mySettings.areModulesShown); myTodoTreeBuilder.setFlattenPackages(mySettings.areFlattenPackages); myVisibilityWatcher = new MyVisibilityWatcher(); myVisibilityWatcher.install(this); } private TodoTreeBuilder setupTreeStructure() { TodoTreeBuilder todoTreeBuilder = createTreeBuilder(myTree, myProject); Disposer.register(this, todoTreeBuilder); TodoTreeStructure structure = todoTreeBuilder.getTodoTreeStructure(); StructureTreeModel<TodoTreeStructure> structureTreeModel = new StructureTreeModel<>(structure, TodoTreeBuilder.NODE_DESCRIPTOR_COMPARATOR, myProject); AsyncTreeModel asyncTreeModel = new AsyncTreeModel(structureTreeModel, myProject); myTree.setModel(asyncTreeModel); asyncTreeModel.addTreeModelListener(new MyExpandListener(todoTreeBuilder)); todoTreeBuilder.setModel(structureTreeModel); Object selectableElement = structure.getFirstSelectableElement(); if (selectableElement != null) { todoTreeBuilder.select(selectableElement); } return todoTreeBuilder; } public static class GroupByActionGroup extends DefaultActionGroup { @Override public void actionPerformed(@NotNull AnActionEvent e) { JBPopupFactory.getInstance() .createActionGroupPopup(null, this, e.getDataContext(), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, true) .showUnderneathOf(e.getInputEvent().getComponent()); } } protected Tree getTree() { return myTree; } private class MyExpandListener extends TreeModelAdapter { private final TodoTreeBuilder myBuilder; MyExpandListener(TodoTreeBuilder builder) { myBuilder = builder; } @Override public void treeNodesInserted(TreeModelEvent e) { TreePath parentPath = e.getTreePath(); if (parentPath == null || parentPath.getPathCount() > 2) return; Object[] children = e.getChildren(); for (Object o : children) { NodeDescriptor descriptor = TreeUtil.getUserObject(NodeDescriptor.class, o); if (descriptor != null && myBuilder.isAutoExpandNode(descriptor)) { ApplicationManager.getApplication().invokeLater(() -> { if (myTree.isVisible(parentPath) && myTree.isExpanded(parentPath)) { myTree.expandPath(parentPath.pathByAddingChild(o)); } }, myBuilder.myProject.getDisposed()); } } } } protected abstract TodoTreeBuilder createTreeBuilder(JTree tree, Project project); private void initUI() { myTree.setShowsRootHandles(true); myTree.setRootVisible(false); myTree.setRowHeight(0); // enable variable-height rows myTree.setCellRenderer(new TodoCompositeRenderer()); EditSourceOnDoubleClickHandler.install(myTree); EditSourceOnEnterKeyHandler.install(myTree); new TreeSpeedSearch(myTree); DefaultActionGroup group = new DefaultActionGroup(); group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE)); group.addSeparator(); group.add(CommonActionsManager.getInstance().createExpandAllAction(myTreeExpander, this)); group.add(CommonActionsManager.getInstance().createCollapseAllAction(myTreeExpander, this)); group.addSeparator(); group.add(ActionManager.getInstance().getAction(IdeActions.GROUP_VERSION_CONTROLS)); PopupHandler.installPopupHandler(myTree, group, ActionPlaces.TODO_VIEW_POPUP, ActionManager.getInstance()); myUsagePreviewPanel = new UsagePreviewPanel(myProject, FindInProjectUtil.setupViewPresentation(false, new FindModel())); Disposer.register(this, myUsagePreviewPanel); myUsagePreviewPanel.setVisible(mySettings.showPreview); setContent(createCenterComponent()); myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(final TreeSelectionEvent e) { ApplicationManager.getApplication().invokeLater(() -> { if (myUsagePreviewPanel.isVisible()) { updatePreviewPanel(); } }, ModalityState.NON_MODAL, myProject.getDisposed()); } }); myAutoScrollToSourceHandler = new MyAutoScrollToSourceHandler(); myAutoScrollToSourceHandler.install(myTree); // Create tool bars and register custom shortcuts DefaultActionGroup toolbarGroup = new DefaultActionGroup(); toolbarGroup.add(new PreviousOccurenceToolbarAction(myOccurenceNavigator)); toolbarGroup.add(new NextOccurenceToolbarAction(myOccurenceNavigator)); toolbarGroup.add(new SetTodoFilterAction(myProject, mySettings, todoFilter -> setTodoFilter(todoFilter))); toolbarGroup.add(createAutoScrollToSourceAction()); toolbarGroup.add(CommonActionsManager.getInstance().createExpandAllAction(myTreeExpander, this)); toolbarGroup.add(CommonActionsManager.getInstance().createCollapseAllAction(myTreeExpander, this)); if (!myCurrentFileMode) { DefaultActionGroup groupBy = createGroupByActionGroup(); toolbarGroup.add(groupBy); } toolbarGroup.add(new MyPreviewAction()); setToolbar(ActionManager.getInstance().createActionToolbar(ActionPlaces.TODO_VIEW_TOOLBAR, toolbarGroup, false).getComponent()); } @NotNull protected DefaultActionGroup createGroupByActionGroup() { ActionManager actionManager = ActionManager.getInstance(); return (DefaultActionGroup) actionManager.getAction("TodoViewGroupByGroup"); } protected AnAction createAutoScrollToSourceAction() { return myAutoScrollToSourceHandler.createToggleAction(); } protected JComponent createCenterComponent() { Splitter splitter = new OnePixelSplitter(false); splitter.setSecondComponent(myUsagePreviewPanel); splitter.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree)); return splitter; } private void updatePreviewPanel() { if (myProject == null || myProject.isDisposed()) return; List<UsageInfo> infos = new ArrayList<>(); final TreePath path = myTree.getSelectionPath(); if (path != null) { DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (userObject instanceof NodeDescriptor) { Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer = myTodoTreeBuilder.getFirstPointerForElement(element); if (pointer != null) { final SmartTodoItemPointer value = pointer.getValue(); final Document document = value.getDocument(); final PsiFile psiFile = PsiDocumentManager.getInstance(myProject).getPsiFile(document); final RangeMarker rangeMarker = value.getRangeMarker(); if (psiFile != null) { infos.add(new UsageInfo(psiFile, rangeMarker.getStartOffset(), rangeMarker.getEndOffset())); for (RangeMarker additionalMarker: value.getAdditionalRangeMarkers()) { if (additionalMarker.isValid()) { infos.add(new UsageInfo(psiFile, additionalMarker.getStartOffset(), additionalMarker.getEndOffset())); } } } } } } myUsagePreviewPanel.updateLayout(infos.isEmpty() ? null : infos); } @Override public void dispose() { if (myVisibilityWatcher != null) { myVisibilityWatcher.deinstall(this); myVisibilityWatcher = null; } myProject = null; } void rebuildCache() { myTodoTreeBuilder.rebuildCache(); } void rebuildCache(@NotNull Set<? extends VirtualFile> files) { myTodoTreeBuilder.rebuildCache(files); } /** * Immediately updates tree. */ void updateTree() { myTodoTreeBuilder.updateTree(); } /** * Updates current filter. If previously set filter was removed then empty filter is set. * * @see TodoTreeBuilder#setTodoFilter */ void updateTodoFilter() { TodoFilter filter = TodoConfiguration.getInstance().getTodoFilter(mySettings.todoFilterName); setTodoFilter(filter); } /** * Sets specified {@code TodoFilter}. The method also updates window's title. * * @see TodoTreeBuilder#setTodoFilter */ private void setTodoFilter(TodoFilter filter) { // Clear name of current filter if it was removed from configuration. String filterName = filter != null ? filter.getName() : null; mySettings.todoFilterName = filterName; // Update filter myTodoTreeBuilder.setTodoFilter(filter); // Update content's title myContent.setDescription(filterName); } /** * @return list of all selected virtual files. */ @Nullable protected PsiFile getSelectedFile() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); LOG.assertTrue(node != null); if(node.getUserObject() == null){ return null; } return TodoTreeBuilder.getFileForNode(node); } protected void setDisplayName(@NlsContexts.TabTitle String tabName) { myContent.setDisplayName(tabName); } @Nullable private PsiElement getSelectedElement() { if (myTree == null) return null; TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); final PsiElement selectedElement = TodoTreeHelper.getInstance(myProject).getSelectedElement(userObject); if (selectedElement != null) return selectedElement; return getSelectedFile(); } @Override public Object getData(@NotNull String dataId) { if (CommonDataKeys.NAVIGATABLE.is(dataId)) { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); if (!(element instanceof TodoFileNode || element instanceof TodoItemNode)) { // allow user to use F4 only on files an TODOs return null; } TodoItemNode pointer = myTodoTreeBuilder.getFirstPointerForElement(element); if (pointer != null) { return PsiNavigationSupport.getInstance().createNavigatable(myProject, pointer.getValue().getTodoItem().getFile() .getVirtualFile(), pointer.getValue().getRangeMarker() .getStartOffset()); } else { return null; } } else if (CommonDataKeys.VIRTUAL_FILE.is(dataId)) { final PsiFile file = getSelectedFile(); return file != null ? file.getVirtualFile() : null; } else if (CommonDataKeys.PSI_ELEMENT.is(dataId)) { return getSelectedElement(); } else if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) { PsiFile file = getSelectedFile(); if (file != null) { return new VirtualFile[]{file.getVirtualFile()}; } else { return VirtualFile.EMPTY_ARRAY; } } else if (PlatformDataKeys.HELP_ID.is(dataId)) { return "find.todoList"; } else if (TODO_PANEL_DATA_KEY.is(dataId)) { return this; } return super.getData(dataId); } @Override @Nullable public OccurenceInfo goPreviousOccurence() { return myOccurenceNavigator.goPreviousOccurence(); } @NotNull @Override public String getNextOccurenceActionName() { return myOccurenceNavigator.getNextOccurenceActionName(); } @Override @Nullable public OccurenceInfo goNextOccurence() { return myOccurenceNavigator.goNextOccurence(); } @Override public boolean hasNextOccurence() { return myOccurenceNavigator.hasNextOccurence(); } @NotNull @Override public String getPreviousOccurenceActionName() { return myOccurenceNavigator.getPreviousOccurenceActionName(); } @Override public boolean hasPreviousOccurence() { return myOccurenceNavigator.hasPreviousOccurence(); } protected void rebuildWithAlarm(final Alarm alarm) { alarm.cancelAllRequests(); alarm.addRequest(() -> { myTodoTreeBuilder.rebuildCache(); }, 300); } /** * Provides support for "auto scroll to source" functionality */ private final class MyAutoScrollToSourceHandler extends AutoScrollToSourceHandler { MyAutoScrollToSourceHandler() { } @Override protected boolean isAutoScrollMode() { return mySettings.isAutoScrollToSource; } @Override protected void setAutoScrollMode(boolean state) { mySettings.isAutoScrollToSource = state; } } /** * Provides support for "Ctrl+Alt+Up/Down" navigation. */ private final class MyOccurenceNavigator implements OccurenceNavigator { @Override public boolean hasNextOccurence() { TreePath path = myTree.getSelectionPath(); if (path == null) { return false; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (userObject == null) { return false; } if (userObject instanceof NodeDescriptor && ((NodeDescriptor)userObject).getElement() instanceof TodoItemNode) { return myTree.getRowCount() != myTree.getRowForPath(path) + 1; } else { return node.getChildCount() > 0; } } @Override public boolean hasPreviousOccurence() { TreePath path = myTree.getSelectionPath(); if (path == null) { return false; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); return userObject instanceof NodeDescriptor && !isFirst(node); } private boolean isFirst(final TreeNode node) { final TreeNode parent = node.getParent(); return parent == null || parent.getIndex(node) == 0 && isFirst(parent); } @Override @Nullable public OccurenceInfo goNextOccurence() { return goToPointer(getNextPointer()); } @Override @Nullable public OccurenceInfo goPreviousOccurence() { return goToPointer(getPreviousPointer()); } @NotNull @Override public String getNextOccurenceActionName() { return IdeBundle.message("action.next.todo"); } @NotNull @Override public String getPreviousOccurenceActionName() { return IdeBundle.message("action.previous.todo"); } @Nullable private OccurenceInfo goToPointer(TodoItemNode pointer) { if (pointer == null) return null; myTodoTreeBuilder.select(pointer); return new OccurenceInfo( PsiNavigationSupport.getInstance() .createNavigatable(myProject, pointer.getValue().getTodoItem().getFile().getVirtualFile(), pointer.getValue().getRangeMarker().getStartOffset()), -1, -1 ); } @Nullable private TodoItemNode getNextPointer() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer; if (element instanceof TodoItemNode) { pointer = myTodoTreeBuilder.getNextPointer((TodoItemNode)element); } else { pointer = myTodoTreeBuilder.getFirstPointerForElement(element); } return pointer; } @Nullable private TodoItemNode getPreviousPointer() { TreePath path = myTree.getSelectionPath(); if (path == null) { return null; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof NodeDescriptor)) { return null; } Object element = ((NodeDescriptor)userObject).getElement(); TodoItemNode pointer; if (element instanceof TodoItemNode) { pointer = myTodoTreeBuilder.getPreviousPointer((TodoItemNode)element); } else { Object sibling = myTodoTreeBuilder.getPreviousSibling(element); if (sibling == null) { return null; } pointer = myTodoTreeBuilder.getLastPointerForElement(sibling); } return pointer; } } public static final class MyShowPackagesAction extends ToggleAction { public MyShowPackagesAction() { super(IdeBundle.messagePointer("action.group.by.packages"), PlatformIcons.GROUP_BY_PACKAGES); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(e.getData(TODO_PANEL_DATA_KEY) != null); super.update(e); } @Override public boolean isSelected(@NotNull AnActionEvent e) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); return todoPanel != null && todoPanel.mySettings.arePackagesShown; } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); if (todoPanel != null) { todoPanel.mySettings.arePackagesShown = state; todoPanel.myTodoTreeBuilder.setShowPackages(state); } } } public static final class MyShowModulesAction extends ToggleAction { public MyShowModulesAction() { super(IdeBundle.messagePointer("action.group.by.modules"), AllIcons.Actions.GroupByModule); } @Override public void update(@NotNull AnActionEvent e) { e.getPresentation().setEnabled(e.getData(TODO_PANEL_DATA_KEY) != null); super.update(e); } @Override public boolean isSelected(@NotNull AnActionEvent e) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); return todoPanel != null && todoPanel.mySettings.areModulesShown; } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); if (todoPanel != null) { todoPanel.mySettings.areModulesShown = state; todoPanel.myTodoTreeBuilder.setShowModules(state); } } } public static final class MyFlattenPackagesAction extends ToggleAction { public MyFlattenPackagesAction() { super(IdeBundle.messagePointer("action.flatten.view"), PlatformIcons.FLATTEN_PACKAGES_ICON); } @Override public void update(@NotNull AnActionEvent e) { super.update(e); e.getPresentation().setText(" " + getTemplateText()); TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); e.getPresentation().setEnabled(todoPanel != null && todoPanel.mySettings.arePackagesShown); } @Override public boolean isSelected(@NotNull AnActionEvent e) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); return todoPanel != null && todoPanel.mySettings.areFlattenPackages; } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { TodoPanel todoPanel = e.getData(TODO_PANEL_DATA_KEY); if (todoPanel != null) { todoPanel.mySettings.areFlattenPackages = state; todoPanel.myTodoTreeBuilder.setFlattenPackages(state); } } } private final class MyVisibilityWatcher extends VisibilityWatcher { @Override public void visibilityChanged() { if (myProject.isOpen()) { PsiDocumentManager.getInstance(myProject).performWhenAllCommitted( () -> myTodoTreeBuilder.setUpdatable(isShowing())); } } } private final class MyPreviewAction extends ToggleAction { MyPreviewAction() { super(IdeBundle.messagePointer("todo.panel.preview.source.action.text"), Presentation.NULL_STRING, AllIcons.Actions.PreviewDetails); } @Override public boolean isSelected(@NotNull AnActionEvent e) { return mySettings.showPreview; } @Override public void setSelected(@NotNull AnActionEvent e, boolean state) { mySettings.showPreview = state; myUsagePreviewPanel.setVisible(state); if (state) { updatePreviewPanel(); } } } }
/******************************************************************************* * Copyright 2012 The Regents of the University of California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.ohmage.domain; import java.util.ArrayList; import java.util.List; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.ohmage.domain.campaign.SurveyResponse; import org.ohmage.exception.DomainException; import org.ohmage.util.StringUtils; /** * This class represents a server's configuration including its name, version, * and state values. * * @author John Jenkins */ public class ServerConfig { /** * The key to use when creating/reading JSON for the application's name. */ public static final String JSON_KEY_APPLICATION_NAME = "application_name"; /** * The key to use when creating/reading JSON for the application's version. */ public static final String JSON_KEY_APPLICATION_VERSION = "application_version"; /** * The key to use when creating/reading JSON for the application's build. */ public static final String JSON_KEY_APPLICATION_BUILD = "application_build"; /** * The key to use when creating/reading JSON for the default survey * response privacy state for newly uploaded survey responses. */ public static final String JSON_KEY_DEFAULT_SURVEY_RESPONSE_PRIVACY_STATE = "default_survey_response_sharing_state"; /** * The key to use when creating/reading JSON for the list of all survey * response privacy states. */ public static final String JSON_KEY_SURVEY_RESPONSE_PRIVACY_STATES = "survey_response_privacy_states"; /** * The key to use when creating/reading JSON for the server's default * campaign creation privilege. */ public static final String JSON_KEY_DEFAULT_CAMPAIGN_CREATION_PRIVILEGE = "default_campaign_creation_privilege"; /** * Whether or not Mobility is enabled on this server. */ public static final String JSON_KEY_MOBILITY_ENABLED = "mobility_enabled"; /** * The length of time for which an authentication token lives. */ public static final String JSON_KEY_AUTH_TOKEN_LIFETIME = "auth_token_lifetime"; /** * The maximum size of the request. */ public static final String JSON_KEY_MAXIMUM_REQUEST_SIZE = "maximum_request_size"; /** * The maximum size of a single parameter. */ public static final String JSON_KEY_MAXIMUM_PARAMETER_SIZE = "maximum_parameter_size"; /** * Whether or not users are allowed to self-register. */ public static final String JSON_KEY_SELF_REGISTRATION_ALLOWED = "self_registration_allowed"; private final String appName; private final String appVersion; private final String appBuild; private final boolean defaultCampaignCreationPrivilege; private final SurveyResponse.PrivacyState defaultSurveyResponsePrivacyState; private final List<SurveyResponse.PrivacyState> surveyResponsePrivacyStates; private final boolean mobilityEnabled; private final long authTokenLifetime; private final long maxRequestSize; private final long maxParamSize; private final boolean selfRegistrationAllowed; /** * Creates a new server configuration. * * @param appName The application's name. * * @param appVersion The application's version. * * @param appBuild The applications build. * * @param defaultSurveyResponsePrivacyState The default survey response * privacy state for newly * uploaded survey responses. * * @param surveyResponsePrivacyStates A list of all of the survey response * privacy states. * * @throws DomainException Thrown if any of the values are invalid or null. */ public ServerConfig( final String appName, final String appVersion, final String appBuild, final SurveyResponse.PrivacyState defaultSurveyResponsePrivacyState, final List<SurveyResponse.PrivacyState> surveyResponsePrivacyStates, final boolean defaultCampaignCreationPrivilege, final boolean mobilityEnabled, final long authTokenLifetime, final long maximumRequestSize, final long maximumParameterSize, final boolean selfRegistrationAllowed) throws DomainException { if(StringUtils.isEmptyOrWhitespaceOnly(appName)) { throw new DomainException( "The application name is null or whitespace only."); } else if(StringUtils.isEmptyOrWhitespaceOnly(appVersion)) { throw new DomainException( "The application version is null or whitespace only."); } else if(StringUtils.isEmptyOrWhitespaceOnly(appBuild)) { throw new DomainException( "The application build is null or whitespace only."); } else if(defaultSurveyResponsePrivacyState == null) { throw new DomainException( "The default survey response privacy state is null."); } else if(surveyResponsePrivacyStates == null) { throw new DomainException( "The list of default survey response privacy states is null."); } this.appName = appName; this.appVersion = appVersion; this.appBuild = appBuild; this.defaultCampaignCreationPrivilege = defaultCampaignCreationPrivilege; this.defaultSurveyResponsePrivacyState = defaultSurveyResponsePrivacyState; this.surveyResponsePrivacyStates = new ArrayList<SurveyResponse.PrivacyState>(surveyResponsePrivacyStates); this.mobilityEnabled = mobilityEnabled; this.authTokenLifetime = authTokenLifetime; maxRequestSize = maximumRequestSize; maxParamSize = maximumParameterSize; this.selfRegistrationAllowed = selfRegistrationAllowed; } /** * Creates a new server configuration from a JSONObject. * * @param serverConfigAsJson The information about the server as a * JSONObject. * * @throws DomainException Thrown if the JSONObject is null or if it is * missing any of the required keys. */ public ServerConfig( final JSONObject serverConfigAsJson) throws DomainException { if(serverConfigAsJson == null) { throw new DomainException( "The server configuration JSON is null."); } try { appName = serverConfigAsJson.getString(JSON_KEY_APPLICATION_NAME); } catch(JSONException e) { throw new DomainException( "The application name was missing from the JSON.", e); } try { appVersion = serverConfigAsJson.getString(JSON_KEY_APPLICATION_VERSION); } catch(JSONException e) { throw new DomainException( "The application version was missing from the JSON.", e); } try { appBuild = serverConfigAsJson.getString(JSON_KEY_APPLICATION_BUILD); } catch(JSONException e) { throw new DomainException( "The application name was missing from the JSON.", e); } try { defaultCampaignCreationPrivilege = Boolean.valueOf(serverConfigAsJson.getString(JSON_KEY_DEFAULT_CAMPAIGN_CREATION_PRIVILEGE)); } catch(JSONException e) { throw new DomainException( "The default campaign creation privilege was missing from the JSON.", e); } catch(IllegalArgumentException e) { throw new DomainException( "The default campaign creation privilege is not a valid boolean value.", e); } try { defaultSurveyResponsePrivacyState = SurveyResponse.PrivacyState.getValue( serverConfigAsJson.getString(JSON_KEY_DEFAULT_SURVEY_RESPONSE_PRIVACY_STATE) ); } catch(JSONException e) { throw new DomainException( "The application name was missing from the JSON.", e); } catch(IllegalArgumentException e) { throw new DomainException( "The default survey response privacy state is not a known survey response privacy state.", e); } try { JSONArray surveyResponsePrivacyStatesJson = serverConfigAsJson.getJSONArray(JSON_KEY_SURVEY_RESPONSE_PRIVACY_STATES); int numPrivacyStates = surveyResponsePrivacyStatesJson.length(); surveyResponsePrivacyStates = new ArrayList<SurveyResponse.PrivacyState>(numPrivacyStates); for(int i = 0; i < numPrivacyStates; i++) { surveyResponsePrivacyStates.add( SurveyResponse.PrivacyState.getValue( surveyResponsePrivacyStatesJson.getString(i) ) ); } } catch(JSONException e) { throw new DomainException("The application name was missing from the JSON.", e); } try { mobilityEnabled = serverConfigAsJson.getBoolean(JSON_KEY_MOBILITY_ENABLED); } catch(JSONException e) { throw new DomainException( "Whether or not Mobility is enabled is missing.", e); } try { authTokenLifetime = serverConfigAsJson.getLong(JSON_KEY_AUTH_TOKEN_LIFETIME); } catch(JSONException e) { throw new DomainException( "The authentication token's lifetime is missing.", e); } try { maxRequestSize = serverConfigAsJson.getLong(JSON_KEY_MAXIMUM_REQUEST_SIZE); } catch(JSONException e) { throw new DomainException( "The maximum request size is missing.", e); } try { maxParamSize = serverConfigAsJson.getLong(JSON_KEY_MAXIMUM_PARAMETER_SIZE); } catch(JSONException e) { throw new DomainException( "The maximum parameter size is missing.", e); } try { selfRegistrationAllowed = serverConfigAsJson.getBoolean(JSON_KEY_SELF_REGISTRATION_ALLOWED); } catch(JSONException e) { throw new DomainException( "The self registration flag is missing.", e); } } /** * Returns the application's name. * * @return The application's name. */ public final String getAppName() { return appName; } /** * Returns the application's version. * * @return The application's version. */ public final String getAppVersion() { return appVersion; } /** * Returns the application's build. * * @return The application's build. */ public final String getAppBuild() { return appBuild; } /** * Returns the default campaign creation privilege. * * @return The default campaign creation privilege. */ public final boolean getDefaultCampaignCreationPrivilege() { return defaultCampaignCreationPrivilege; } /** * Returns the default survey response privacy state for newly uploaded * survey responses. * * @return The default survey response privacy state. */ public final SurveyResponse.PrivacyState getDefaultSurveyResponsePrivacyState() { return defaultSurveyResponsePrivacyState; } /** * Returns an array of all of the survey response privacy states. * * @return An array of all of the survey response privacy states. */ public final List<SurveyResponse.PrivacyState> getSurveyResponsePrivacyStates() { return new ArrayList<SurveyResponse.PrivacyState>(surveyResponsePrivacyStates); } /** * Returns whether or not Mobility is enabled. * * @return Whether or not mobility is enabled. */ public final boolean getMobilityEnabled() { return mobilityEnabled; } /** * Returns the maximum lifetime of a token in milliseconds unless * refreshed. * * @return The maximum lifetime of a token in milliseconds. */ public final long getAuthTokenLifetime() { return authTokenLifetime; } /** * Returns the maximum allowed size of a request in bytes. * * @return The maximum allowed size of a request in bytes. */ public final long getMaximumRequestSize() { return maxRequestSize; } /** * Returns the maximum allowed size of a single parameter in bytes. * * @return The maximum allowed size of a single parameter in bytes. */ public final long getMaximumParameterSize() { return maxParamSize; } /** * Returns whether or not self registration is allowed. * * @return Whether or not self registration is allowed. */ public final boolean getSelfRegistrationAllowed() { return selfRegistrationAllowed; } /** * Returns this server configuration as a JSONObject. * * @return This server configuration as a JSONObject. * * @throws JSONException Thrown if there is an error building the * JSONObject. */ public JSONObject toJson() throws JSONException { JSONObject result = new JSONObject(); result.put(JSON_KEY_APPLICATION_NAME, appName); result.put(JSON_KEY_APPLICATION_VERSION, appVersion); result.put(JSON_KEY_APPLICATION_BUILD, appBuild); result.put(JSON_KEY_DEFAULT_CAMPAIGN_CREATION_PRIVILEGE, defaultCampaignCreationPrivilege); result.put(JSON_KEY_DEFAULT_SURVEY_RESPONSE_PRIVACY_STATE, defaultSurveyResponsePrivacyState); result.put(JSON_KEY_SURVEY_RESPONSE_PRIVACY_STATES, new JSONArray(surveyResponsePrivacyStates)); result.put(JSON_KEY_MOBILITY_ENABLED, mobilityEnabled); result.put(JSON_KEY_AUTH_TOKEN_LIFETIME, authTokenLifetime); result.put(JSON_KEY_MAXIMUM_REQUEST_SIZE, maxRequestSize); result.put(JSON_KEY_MAXIMUM_PARAMETER_SIZE, maxParamSize); result.put(JSON_KEY_SELF_REGISTRATION_ALLOWED, selfRegistrationAllowed); return result; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.LinkedList; import java.util.List; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.test.system.JobInfo; /** * Concrete implementation of the JobInfo interface which is exposed to the * clients. * Look at {@link JobInfo} for further details. */ class JobInfoImpl implements JobInfo { private List<String> blackListedTracker; private String historyUrl; private JobID id; private boolean setupLaunched; private boolean setupFinished; private boolean cleanupLaunched; private JobStatus status; private int runningMaps; private int runningReduces; private int waitingMaps; private int waitingReduces; private int finishedMaps; private int finishedReduces; private int numMaps; private int numReduces; private long finishTime; private long launchTime; private int numOfSlotsPerMap; private int numOfSlotsPerReduce; public JobInfoImpl() { id = new JobID(); status = new JobStatus(); blackListedTracker = new LinkedList<String>(); historyUrl = ""; } public JobInfoImpl( JobID id, boolean setupLaunched, boolean setupFinished, boolean cleanupLaunched, int runningMaps, int runningReduces, int waitingMaps, int waitingReduces, int finishedMaps, int finishedReduces, JobStatus status, String historyUrl, List<String> blackListedTracker, boolean isComplete, int numMaps, int numReduces) { super(); this.blackListedTracker = blackListedTracker; this.historyUrl = historyUrl; this.id = id; this.setupLaunched = setupLaunched; this.setupFinished = setupFinished; this.cleanupLaunched = cleanupLaunched; this.status = status; this.runningMaps = runningMaps; this.runningReduces = runningReduces; this.waitingMaps = waitingMaps; this.waitingReduces = waitingReduces; this.finishedMaps = finishedMaps; this.finishedReduces = finishedReduces; this.numMaps = numMaps; this.numReduces = numReduces; } @Override public List<String> getBlackListedTrackers() { return blackListedTracker; } @Override public String getHistoryUrl() { return historyUrl; } @Override public JobID getID() { return id; } @Override public JobStatus getStatus() { return status; } @Override public boolean isCleanupLaunched() { return cleanupLaunched; } @Override public boolean isSetupLaunched() { return setupLaunched; } @Override public boolean isSetupFinished() { return setupFinished; } @Override public int runningMaps() { return runningMaps; } @Override public int runningReduces() { return runningReduces; } @Override public int waitingMaps() { return waitingMaps; } @Override public int waitingReduces() { return waitingReduces; } @Override public int finishedMaps() { return finishedMaps; } @Override public int finishedReduces() { return finishedReduces; } @Override public int numMaps() { return numMaps; } @Override public int numReduces() { return numReduces; } public void setFinishTime(long finishTime) { this.finishTime = finishTime; } public void setLaunchTime(long launchTime) { this.launchTime = launchTime; } @Override public long getFinishTime() { return finishTime; } @Override public long getLaunchTime() { return launchTime; } public void setNumSlotsPerMap(int numOfSlotsPerMap) { this.numOfSlotsPerMap = numOfSlotsPerMap; } public void setNumSlotsPerReduce(int numOfSlotsPerReduce) { this.numOfSlotsPerReduce = numOfSlotsPerReduce; } @Override public int getNumSlotsPerMap() { return numOfSlotsPerMap; } @Override public int getNumSlotsPerReduce() { return numOfSlotsPerReduce; } @Override public void readFields(DataInput in) throws IOException { id.readFields(in); setupLaunched = in.readBoolean(); setupFinished = in.readBoolean(); cleanupLaunched = in.readBoolean(); status.readFields(in); runningMaps = in.readInt(); runningReduces = in.readInt(); waitingMaps = in.readInt(); waitingReduces = in.readInt(); historyUrl = in.readUTF(); int size = in.readInt(); for (int i = 0; i < size; i++) { blackListedTracker.add(in.readUTF()); } finishedMaps = in.readInt(); finishedReduces = in.readInt(); numMaps = in.readInt(); numReduces = in.readInt(); finishTime = in.readLong(); launchTime = in.readLong(); numOfSlotsPerMap = in.readInt(); numOfSlotsPerReduce = in.readInt(); } @Override public void write(DataOutput out) throws IOException { id.write(out); out.writeBoolean(setupLaunched); out.writeBoolean(setupFinished); out.writeBoolean(cleanupLaunched); status.write(out); out.writeInt(runningMaps); out.writeInt(runningReduces); out.writeInt(waitingMaps); out.writeInt(waitingReduces); out.writeUTF(historyUrl); out.writeInt(blackListedTracker.size()); for (String str : blackListedTracker) { out.writeUTF(str); } out.writeInt(finishedMaps); out.writeInt(finishedReduces); out.writeInt(numMaps); out.writeInt(numReduces); out.writeLong(finishTime); out.writeLong(launchTime); out.writeInt(numOfSlotsPerMap); out.writeInt(numOfSlotsPerReduce); } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.transformer; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import androidx.annotation.Nullable; import com.google.android.exoplayer2.BaseRenderer; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.PlaybackException; import com.google.android.exoplayer2.RendererCapabilities; import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.MimeTypes; import com.google.errorprone.annotations.ForOverride; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; /* package */ abstract class TransformerBaseRenderer extends BaseRenderer { protected final MuxerWrapper muxerWrapper; protected final TransformerMediaClock mediaClock; protected final TransformationRequest transformationRequest; protected final FallbackListener fallbackListener; protected boolean isRendererStarted; protected boolean muxerWrapperTrackAdded; protected boolean muxerWrapperTrackEnded; protected long streamOffsetUs; protected @MonotonicNonNull SamplePipeline samplePipeline; public TransformerBaseRenderer( int trackType, MuxerWrapper muxerWrapper, TransformerMediaClock mediaClock, TransformationRequest transformationRequest, FallbackListener fallbackListener) { super(trackType); this.muxerWrapper = muxerWrapper; this.mediaClock = mediaClock; this.transformationRequest = transformationRequest; this.fallbackListener = fallbackListener; } /** * Returns whether the renderer supports the track type of the given format. * * @param format The format. * @return The {@link Capabilities} for this format. */ @Override public final @Capabilities int supportsFormat(Format format) { return RendererCapabilities.create( MimeTypes.getTrackType(format.sampleMimeType) == getTrackType() ? C.FORMAT_HANDLED : C.FORMAT_UNSUPPORTED_TYPE); } @Override public final MediaClock getMediaClock() { return mediaClock; } @Override public final boolean isReady() { return isSourceReady(); } @Override public final boolean isEnded() { return muxerWrapperTrackEnded; } @Override public final void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { try { if (!isRendererStarted || isEnded() || !ensureConfigured()) { return; } while (feedMuxerFromPipeline() || samplePipeline.processData() || feedPipelineFromInput()) {} } catch (TransformationException e) { throw wrapTransformationException(e); } catch (Muxer.MuxerException e) { throw wrapTransformationException( TransformationException.createForMuxer( e, TransformationException.ERROR_CODE_MUXING_FAILED)); } } @Override protected final void onStreamChanged(Format[] formats, long startPositionUs, long offsetUs) { this.streamOffsetUs = offsetUs; } @Override protected final void onEnabled(boolean joining, boolean mayRenderStartOfStream) { muxerWrapper.registerTrack(); fallbackListener.registerTrack(); mediaClock.updateTimeForTrackType(getTrackType(), 0L); } @Override protected final void onStarted() { isRendererStarted = true; } @Override protected final void onStopped() { isRendererStarted = false; } @Override protected final void onReset() { if (samplePipeline != null) { samplePipeline.release(); } muxerWrapperTrackAdded = false; muxerWrapperTrackEnded = false; } @ForOverride @EnsuresNonNullIf(expression = "samplePipeline", result = true) protected abstract boolean ensureConfigured() throws TransformationException; @RequiresNonNull({"samplePipeline", "#1.data"}) protected void maybeQueueSampleToPipeline(DecoderInputBuffer inputBuffer) throws TransformationException { samplePipeline.queueInputBuffer(); } /** * Attempts to write sample pipeline output data to the muxer. * * @return Whether it may be possible to write more data immediately by calling this method again. * @throws Muxer.MuxerException If a muxing problem occurs. * @throws TransformationException If a {@link SamplePipeline} problem occurs. */ @RequiresNonNull("samplePipeline") private boolean feedMuxerFromPipeline() throws Muxer.MuxerException, TransformationException { if (!muxerWrapperTrackAdded) { @Nullable Format samplePipelineOutputFormat = samplePipeline.getOutputFormat(); if (samplePipelineOutputFormat == null) { return false; } muxerWrapperTrackAdded = true; muxerWrapper.addTrackFormat(samplePipelineOutputFormat); } if (samplePipeline.isEnded()) { muxerWrapper.endTrack(getTrackType()); muxerWrapperTrackEnded = true; return false; } @Nullable DecoderInputBuffer samplePipelineOutputBuffer = samplePipeline.getOutputBuffer(); if (samplePipelineOutputBuffer == null) { return false; } if (!muxerWrapper.writeSample( getTrackType(), checkStateNotNull(samplePipelineOutputBuffer.data), samplePipelineOutputBuffer.isKeyFrame(), samplePipelineOutputBuffer.timeUs)) { return false; } samplePipeline.releaseOutputBuffer(); return true; } /** * Attempts to read input data and pass the input data to the sample pipeline. * * @return Whether it may be possible to read more data immediately by calling this method again. * @throws TransformationException If a {@link SamplePipeline} problem occurs. */ @RequiresNonNull("samplePipeline") private boolean feedPipelineFromInput() throws TransformationException { @Nullable DecoderInputBuffer samplePipelineInputBuffer = samplePipeline.dequeueInputBuffer(); if (samplePipelineInputBuffer == null) { return false; } @ReadDataResult int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0); switch (result) { case C.RESULT_BUFFER_READ: samplePipelineInputBuffer.flip(); if (samplePipelineInputBuffer.isEndOfStream()) { samplePipeline.queueInputBuffer(); return false; } mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs); samplePipelineInputBuffer.timeUs -= streamOffsetUs; checkStateNotNull(samplePipelineInputBuffer.data); maybeQueueSampleToPipeline(samplePipelineInputBuffer); return true; case C.RESULT_FORMAT_READ: throw new IllegalStateException("Format changes are not supported."); case C.RESULT_NOTHING_READ: default: return false; } } /** * Returns an {@link ExoPlaybackException} wrapping the {@link TransformationException}. * * <p>This temporary wrapping is needed due to the dependence on ExoPlayer's BaseRenderer. {@link * Transformer} extracts the {@link TransformationException} from this {@link * ExoPlaybackException} again. */ private ExoPlaybackException wrapTransformationException( TransformationException transformationException) { return ExoPlaybackException.createForRenderer( transformationException, "Transformer", getIndex(), /* rendererFormat= */ null, C.FORMAT_HANDLED, /* isRecoverable= */ false, PlaybackException.ERROR_CODE_UNSPECIFIED); } }
package gui.modal; import java.util.ArrayList; import java.util.List; import data.CameraTimeline; import data.Instrument; import gui.headerarea.DoubleTextField; import gui.misc.TweakingHelper; import gui.root.RootPane; import gui.styling.StyledCheckbox; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.control.Label; import javafx.scene.layout.HBox; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.text.Text; import lombok.Getter; /** * Class responsible for displaying a modal view for the creation of director shots. */ public class DirectorShotCreationModalView extends ShotCreationModalView { /* * Tweakable styling variables. */ // width and height of screen. 760 and 460 work very, very well. private static final int width = 760; private static final int height = 550; /* * Other variables */ private List<CameraTimeline> cameraTimelines; private ArrayList<Instrument> instruments; // General panes used @Getter private DoubleTextField frontPaddingField; @Getter private DoubleTextField endPaddingField; /** * Constructor with default modal size. * @param rootPane Pane to display modal on top of * @param cameraTimelines Cameras in timeline */ public DirectorShotCreationModalView(RootPane rootPane, List<CameraTimeline> cameraTimelines, ArrayList<Instrument> instruments) { this(rootPane, cameraTimelines, instruments, width, height); } /** * Constructor. * @param rootPane Pane to display modal on top of * @param cameraTimelines Cameras in timeline * @param modalWidth Modal display width * @param modalHeight Modal display height * @param instruments the instruments that can be used */ public DirectorShotCreationModalView(RootPane rootPane, List<CameraTimeline> cameraTimelines, ArrayList<Instrument> instruments, int modalWidth, int modalHeight) { super(rootPane, modalWidth, modalHeight); this.cameraTimelines = cameraTimelines; this.instruments = instruments; initializeCreationView(); } /** * Initialize and display the modal view. */ private void initializeCreationView() { // force minimum size forceBounds(height, width); // Create a new VBox for vertical layout this.rootPane = new VBox(); // Add label at top initTitleLabel("Add a director shot..."); // add space for textfields and checkboxes this.centerPane = new HBox(40.0); this.centerPane.setAlignment(Pos.CENTER); this.centerPane.setPadding(new Insets(0, TweakingHelper.GENERAL_PADDING, 0, 0)); this.centerPane.setPrefHeight(TweakingHelper.GENERAL_SIZE); this.rootPane.getChildren().add(centerPane); // actually add textfields and checkboxes initTextfields(); // space for checkboxes, two rows this.centerRightPane = new VBox(); this.centerRightPane.setAlignment(Pos.CENTER); this.centerRightPane.setPrefHeight(TweakingHelper.GENERAL_SIZE); this.centerRightPane.setSpacing(40.0); this.centerPane.getChildren().add(centerRightPane); initCamCheckBoxes(); initInstrumentCheckBoxes(); // add buttons at bottom. initButtons(); super.setModalView(this.rootPane); super.displayModal(); } /** * Initialize all six of the text fields. */ private void initTextfields() { VBox content = getTextfieldBox(); initNameDescriptionFields(content); initCountTextfields(content); initPaddingTextfields(content); this.centerPane.getChildren().add(content); } /** * Initialize the before and after padding text fields. * @param content the pane in which they are located. */ private void initPaddingTextfields(VBox content) { // init padding before field final Label frontPaddingLabel = new Label("Padding before shot:"); frontPaddingField = new DoubleTextField(); frontPaddingField.setText("0.0"); HBox frontPaddingBox = new HBox(TweakingHelper.GENERAL_SPACING); frontPaddingBox.getChildren().addAll(frontPaddingLabel, frontPaddingField); frontPaddingBox.setAlignment(Pos.CENTER_RIGHT); // init padding after field final Label endPaddingLabel = new Label("Padding after shot:"); endPaddingField = new DoubleTextField(); endPaddingField.setText("0.0"); HBox endPaddingBox = new HBox(TweakingHelper.GENERAL_SPACING); endPaddingBox.getChildren().addAll(endPaddingLabel, endPaddingField); endPaddingBox.setAlignment(Pos.CENTER_RIGHT); content.getChildren().addAll(frontPaddingBox, endPaddingBox); } /** * Initialize the checkboxes with labels for each camera, in a flowpane. */ private void initCamCheckBoxes() { styleCamCheckBoxes(); // add checkboxes cameraCheckboxes = new ArrayList<>(); for (int i = 0; i < this.cameraTimelines.size(); i++) { String checkBoxString = this.cameraTimelines.get(i).getCamera().getName(); StyledCheckbox checkBox = new StyledCheckbox(checkBoxString); cameraCheckboxes.add(checkBox); } // title Label label = new Label("Select cameras"); // add all to scene this.checkboxPane.getChildren().addAll(cameraCheckboxes); if (cameraTimelines.size() > 0) { this.centerRightPane.getChildren().addAll(label, this.checkboxPane); } } /** * Initialize the checkboxes with labels for each instrument, in a flowpane. */ private void initInstrumentCheckBoxes() { styleInstrumentCheckBoxes(); // add checkboxes instrumentCheckboxes = new ArrayList<>(); for (int i = 0; i < this.instruments.size(); i++) { String checkBoxString = this.instruments.get(i).getName(); StyledCheckbox checkbox = new StyledCheckbox(checkBoxString); instrumentCheckboxes.add(checkbox); } // title Label label = new Label("Select instruments"); // add all to scene this.instrumentPane.getChildren().addAll(instrumentCheckboxes); if (instruments.size() > 0) { this.centerRightPane.getChildren().addAll(label, this.instrumentPane); } } /** * Displays an error message in the view. * @param errorString Error to be displayed. */ public void displayError(String errorString) { Text errText = new Text(errorString); errText.setFill(Color.RED); this.rootPane.getChildren().add(this.rootPane.getChildren().size() - 1, errText); } }
/** * Opensec OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2015 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.oval.model.unix; import io.opensec.oval.model.ComponentType; import io.opensec.oval.model.ElementRef; import io.opensec.oval.model.Family; import io.opensec.oval.model.definitions.EntityObjectStringType; import io.opensec.oval.model.definitions.Filter; import io.opensec.oval.model.definitions.Set; import io.opensec.oval.model.definitions.SystemObjectType; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; /** * The runlevel object is used by a runlevel test * to define the specific service(s)/runlevel combination to be evaluated. * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ public class RunlevelObject extends SystemObjectType { //TODO: XSD model. private Set set; //{1..1} private EntityObjectStringType service_name; //{1..1} private EntityObjectStringType runlevel; //{1..1} private final Collection<Filter> filter = new ArrayList<Filter>(); //{0..*} /** * Constructor. */ public RunlevelObject() { this( null, 0 ); } public RunlevelObject( final String id, final int version ) { super( id, version ); // _oval_platform_type = OvalPlatformType.unix; // _oval_component_type = OvalComponentType.runlevel; _oval_family = Family.UNIX; _oval_component = ComponentType.RUNLEVEL; } // public FileObject( // final String id, // final int version, // final String comment // ) // { // super( id, version, comment ); // } // // // public FileObject( // final String id, // final int version, // final String path, // final String filename // ) // { // this( id, version, // new EntityObjectStringType( path ), // new EntityObjectStringType( filename ) // ); // } // // // public FileObject( // final String id, // final int version, // final EntityObjectStringType path, // final EntityObjectStringType filename // ) // { // super( id, version ); // setPath( path ); // setFilename( filename ); // } /** */ public void setSet( final Set set ) { this.set = set; } public Set getSet() { return set; } /** */ public void setServiceName( final EntityObjectStringType service_name ) { this.service_name = service_name; } public EntityObjectStringType getServiceName() { return service_name; } /** */ public void setRunlevel( final EntityObjectStringType runlevel ) { this.runlevel = runlevel; } public EntityObjectStringType getRunlevel() { return runlevel; } /** */ public void setFilter( final Collection<? extends Filter> filters ) { if (filter != filters) { filter.clear(); if (filters != null && filters.size() > 0) { filter.addAll( filters ); } } } public boolean addFilter( final Filter filter ) { if (filter == null) { return false; } return this.filter.add( filter ); } public Collection<Filter> getFilter() { return filter; } public Iterator<Filter> iterateFilter() { return filter.iterator(); } //********************************************************************* // DefinitionsElement //********************************************************************* @Override public Collection<ElementRef> ovalGetElementRef() { Collection<ElementRef> ref_list = new ArrayList<ElementRef>(); ref_list.add( getServiceName() ); ref_list.add( getRunlevel() ); ref_list.addAll( getFilter() ); return ref_list; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals( final Object obj ) { if (!(obj instanceof RunlevelObject)) { return false; } return super.equals( obj ); } @Override public String toString() { return "runlevel_object[" + super.toString() + ", set=" + getSet() + ", service_name=" + getServiceName() + ", runlevel=" + getRunlevel() + ", filter=" + getFilter() + "]"; } } //RunlevelObject
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.apache.http.HttpHost; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeSet; /** * Metadata about an {@link HttpHost} running Elasticsearch. */ public class Node { /** * Address that this host claims is its primary contact point. */ private final HttpHost host; /** * Addresses on which the host is listening. These are useful to have * around because they allow you to find a host based on any address it * is listening on. */ private final Set<HttpHost> boundHosts; /** * Name of the node as configured by the {@code node.name} attribute. */ private final String name; /** * Version of Elasticsearch that the node is running or {@code null} * if we don't know the version. */ private final String version; /** * Roles that the Elasticsearch process on the host has or {@code null} * if we don't know what roles the node has. */ private final Roles roles; /** * Attributes declared on the node. */ private final Map<String, List<String>> attributes; /** * Create a {@linkplain Node} with metadata. All parameters except * {@code host} are nullable and implementations of {@link NodeSelector} * need to decide what to do in their absence. */ public Node(HttpHost host, Set<HttpHost> boundHosts, String name, String version, Roles roles, Map<String, List<String>> attributes) { if (host == null) { throw new IllegalArgumentException("host cannot be null"); } this.host = host; this.boundHosts = boundHosts; this.name = name; this.version = version; this.roles = roles; this.attributes = attributes; } /** * Create a {@linkplain Node} without any metadata. */ public Node(HttpHost host) { this(host, null, null, null, null, null); } /** * Contact information for the host. */ public HttpHost getHost() { return host; } /** * Addresses on which the host is listening. These are useful to have * around because they allow you to find a host based on any address it * is listening on. */ public Set<HttpHost> getBoundHosts() { return boundHosts; } /** * The {@code node.name} of the node. */ public String getName() { return name; } /** * Version of Elasticsearch that the node is running or {@code null} * if we don't know the version. */ public String getVersion() { return version; } /** * Roles that the Elasticsearch process on the host has or {@code null} * if we don't know what roles the node has. */ public Roles getRoles() { return roles; } /** * Attributes declared on the node. */ public Map<String, List<String>> getAttributes() { return attributes; } @Override public String toString() { StringBuilder b = new StringBuilder(); b.append("[host=").append(host); if (boundHosts != null) { b.append(", bound=").append(boundHosts); } if (name != null) { b.append(", name=").append(name); } if (version != null) { b.append(", version=").append(version); } if (roles != null) { b.append(", roles=").append(roles); } if (attributes != null) { b.append(", attributes=").append(attributes); } return b.append(']').toString(); } @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { return false; } Node other = (Node) obj; return host.equals(other.host) && Objects.equals(boundHosts, other.boundHosts) && Objects.equals(name, other.name) && Objects.equals(version, other.version) && Objects.equals(roles, other.roles) && Objects.equals(attributes, other.attributes); } @Override public int hashCode() { return Objects.hash(host, boundHosts, name, version, roles, attributes); } /** * Role information about an Elasticsearch process. */ public static final class Roles { private final Set<String> roles; public Roles(final Set<String> roles) { this.roles = new TreeSet<>(roles); } /** * Returns whether or not the node <strong>could</strong> be elected master. */ public boolean isMasterEligible() { return roles.contains("master"); } /** * Returns whether or not the node stores data. * @deprecated use {@link #hasDataRole()} or {@link #canContainData()} */ @Deprecated public boolean isData() { return roles.contains("data"); } /** * @return true if node has the "data" role */ public boolean hasDataRole() { return roles.contains("data"); } /** * @return true if node has the "data_content" role */ public boolean hasDataContentRole() { return roles.contains("data_content"); } /** * @return true if node has the "data_hot" role */ public boolean hasDataHotRole() { return roles.contains("data_hot"); } /** * @return true if node has the "data_warm" role */ public boolean hasDataWarmRole() { return roles.contains("data_warm"); } /** * @return true if node has the "data_cold" role */ public boolean hasDataColdRole() { return roles.contains("data_cold"); } /** * @return true if node has the "data_frozen" role */ public boolean hasDataFrozenRole() { return roles.contains("data_frozen"); } /** * @return true if node stores any type of data */ public boolean canContainData() { return hasDataRole() || roles.stream().anyMatch(role -> role.startsWith("data_")); } /** * Returns whether or not the node runs ingest pipelines. */ public boolean isIngest() { return roles.contains("ingest"); } @Override public String toString() { return String.join(",", roles); } @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { return false; } Roles other = (Roles) obj; return roles.equals(other.roles); } @Override public int hashCode() { return roles.hashCode(); } } }
/* // Licensed to DynamoBI Corporation (DynamoBI) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. DynamoBI licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. */ package net.sf.farrago.type; import java.sql.*; import java.util.List; import org.eigenbase.jdbc4.*; import org.eigenbase.reltype.*; import org.eigenbase.sql.type.*; import org.eigenbase.util.Util; /** * Helper base class for implementing Jdbc metadata interfaces. * * @author John V. Sichi * @version $Id$ */ public class FarragoJdbcMetaDataImpl extends Unwrappable { //~ Instance fields -------------------------------------------------------- protected final RelDataType rowType; private final List<List<String>> fieldOrigins; //~ Constructors ----------------------------------------------------------- /** * Creates a FarragoJdbcMetaDataImpl. * * @param rowType Type info to return * @param fieldOrigins Origin of each field in column of catalog object */ protected FarragoJdbcMetaDataImpl( RelDataType rowType, List<List<String>> fieldOrigins) { this.rowType = rowType; this.fieldOrigins = fieldOrigins; assert rowType != null; assert fieldOrigins != null; assert fieldOrigins.size() == rowType.getFieldCount() : "field origins " + fieldOrigins + " have different count than row type " + rowType; } //~ Methods ---------------------------------------------------------------- public RelDataType getFieldNamedType(int fieldOrdinal) { return rowType.getFields()[fieldOrdinal - 1].getType(); } public RelDataType getFieldType(int fieldOrdinal) { RelDataType namedType = getFieldNamedType(fieldOrdinal); if (namedType.getSqlTypeName() == SqlTypeName.DISTINCT) { // for most metadata calls, report information about the // predefined type on which the distinct type is based return namedType.getFields()[0].getType(); } else { return namedType; } } public String getFieldName(int fieldOrdinal) { return rowType.getFields()[fieldOrdinal - 1].getName(); } public int getFieldCount() { return rowType.getFieldCount(); } public String getFieldClassName(int fieldOrdinal) { int type = getFieldJdbcType(fieldOrdinal); switch (type) { case Types.ARRAY: return "java.sql.Array"; case Types.BIGINT: return "java.lang.Long"; case Types.BINARY: return "[B"; case Types.BIT: return "java.lang.Boolean"; case Types.BLOB: return "java.sql.Blob"; case Types.BOOLEAN: return "java.lang.Boolean"; case Types.CHAR: return "java.lang.String"; case Types.CLOB: return "java.sql.Clob"; case Types.DATALINK: return ""; case Types.DATE: return "java.sql.Date"; case Types.DECIMAL: return "java.math.BigDecimal"; case Types.DISTINCT: // REVIEW: is this correct return "java.lang.Object"; case Types.DOUBLE: return "java.lang.Double"; case Types.FLOAT: return "java.lang.Double"; case Types.INTEGER: return "java.lang.Integer"; case Types.JAVA_OBJECT: return "java.lang.Object"; case Types.LONGVARBINARY: return "[B"; case Types.LONGVARCHAR: return "java.lang.String"; case Types.NULL: // REVIEW: is this correct return "java.lang.Object"; case Types.NUMERIC: return "java.math.BigDecimal"; case Types.OTHER: return "java.lang.Object"; case Types.REAL: return "java.lang.Float"; case Types.REF: return "java.sql.Ref"; case Types.SMALLINT: return "java.lang.Short"; case Types.STRUCT: return "java.sql.Struct"; case Types.TIME: return "java.sql.Time"; case Types.TIMESTAMP: return "java.sql.Timestamp"; case Types.TINYINT: return "java.lang.Byte"; case Types.VARBINARY: return "[B"; case Types.VARCHAR: return "java.lang.String"; } return ""; } public int getFieldJdbcType(int fieldOrdinal) { RelDataType type = getFieldNamedType(fieldOrdinal); SqlTypeName typeName = type.getSqlTypeName(); if (typeName == null) { return Types.OTHER; } return typeName.getJdbcOrdinal(); } public String getFieldTypeName(int fieldOrdinal) { RelDataType type = getFieldNamedType(fieldOrdinal); SqlTypeName typeName = type.getSqlTypeName(); if (typeName == null) { return type.toString(); } switch (typeName) { case STRUCTURED: case DISTINCT: return type.getSqlIdentifier().toString(); case INTERVAL_DAY_TIME: case INTERVAL_YEAR_MONTH: return type.toString(); } return typeName.name(); } public int getFieldPrecision(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); return type.getPrecision(); } public int getFieldScale(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); SqlTypeName typeName = type.getSqlTypeName(); if (typeName == null) { return 0; } if (typeName.allowsPrecScale(true, true)) { return type.getScale(); } else { return 0; } } public int getFieldDisplaySize(int column) { int precision = getFieldPrecision(column); int type = getFieldJdbcType(column); switch (type) { case Types.BOOLEAN: // 5 for max(strlen("true"),strlen("false")) return 5; case Types.DATE: // 10 for strlen("yyyy-mm-dd") return 10; case Types.TIME: if (precision == 0) { // 8 for strlen("hh:mm:ss") return 8; } else { // 1 extra for decimal point return 9 + precision; } case Types.TIMESTAMP: if (precision == 0) { // 19 for strlen("yyyy-mm-dd hh:mm:ss") return 19; } else { // 1 extra for decimal point return 20 + precision; } case Types.REAL: case Types.FLOAT: return 13; case Types.DOUBLE: return 22; default: // TODO: adjust for numeric formatting, etc. // REVIEW: scientific notation? what types of numeric formatting? return precision; } } public String getFieldCatalogName(int fieldOrdinal) { return getFieldOrigin(fieldOrdinal, 0); } public String getFieldSchemaName(int fieldOrdinal) { return getFieldOrigin(fieldOrdinal, 1); } public String getFieldTableName(int fieldOrdinal) { return getFieldOrigin(fieldOrdinal, 2); } public String getFieldColumnName(int fieldOrdinal) { return getFieldOrigin(fieldOrdinal, 3); } private String getFieldOrigin(int fieldOrdinal, int index) { final List<String> list = fieldOrigins.get(fieldOrdinal - 1); if (list == null) { return ""; // per JDBC spec: 'Return "" if not applicable' } if (list.size() < 4) { index -= (4 - list.size()); } if (index < 0) { return ""; } final String name = list.get(index); if (name == null) { return ""; } return name; } public int isFieldNullable(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); return type.isNullable() ? ResultSetMetaData.columnNullable : ResultSetMetaData.columnNoNulls; } public boolean isFieldAutoIncrement(int fieldOrdinal) { return false; } public boolean isFieldCaseSensitive(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); return SqlTypeUtil.inCharFamily(type); } public boolean isFieldSearchable(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); return RelDataTypeComparability.None != type.getComparability(); } public boolean isFieldSigned(int fieldOrdinal) { RelDataType type = getFieldType(fieldOrdinal); if (SqlTypeUtil.isNumeric(type)) { return true; } else { return false; } } public boolean isFieldCurrency(int fieldOrdinal) { return false; } public boolean isFieldReadOnly(int fieldOrdinal) { return true; } public boolean isFieldWritable(int fieldOrdinal) { return false; } public boolean isFieldDefinitelyWritable(int fieldOrdinal) { return false; } } // End FarragoJdbcMetaDataImpl.java
/* * Copyright 2015 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm; import android.content.Context; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Locale; import java.util.Set; import javax.annotation.Nullable; import io.realm.annotations.RealmModule; import io.realm.exceptions.RealmException; import io.realm.exceptions.RealmFileException; import io.realm.internal.OsRealmConfig; import io.realm.internal.RealmCore; import io.realm.internal.RealmProxyMediator; import io.realm.internal.Util; import io.realm.internal.modules.CompositeMediator; import io.realm.internal.modules.FilterableMediator; import io.realm.rx.RealmObservableFactory; import io.realm.rx.RxObservableFactory; /** * A RealmConfiguration is used to setup a specific Realm instance. * <p> * Instances of a RealmConfiguration can only created by using the {@link io.realm.RealmConfiguration.Builder} and calling * its {@link io.realm.RealmConfiguration.Builder#build()} method. * <p> * A commonly used RealmConfiguration can easily be accessed by first saving it as * {@link Realm#setDefaultConfiguration(RealmConfiguration)} and then using {@link io.realm.Realm#getDefaultInstance()}. * <p> * A minimal configuration can be created using: * <p> * {@code RealmConfiguration config = new RealmConfiguration.Builder().build()} * <p> * This will create a RealmConfiguration with the following properties. * <ul> * <li>Realm file is called "default.realm"</li> * <li>It is saved in Context.getFilesDir()</li> * <li>It has its schema version set to 0.</li> * </ul> */ public class RealmConfiguration { public static final String DEFAULT_REALM_NAME = "default.realm"; public static final int KEY_LENGTH = 64; private static final Object DEFAULT_MODULE; protected static final RealmProxyMediator DEFAULT_MODULE_MEDIATOR; private static Boolean rxJavaAvailable; static { DEFAULT_MODULE = Realm.getDefaultModule(); if (DEFAULT_MODULE != null) { final RealmProxyMediator mediator = getModuleMediator(DEFAULT_MODULE.getClass().getCanonicalName()); if (!mediator.transformerApplied()) { throw new ExceptionInInitializerError("RealmTransformer doesn't seem to be applied." + " Please update the project configuration to use the Realm Gradle plugin." + " See https://realm.io/news/android-installation-change/"); } DEFAULT_MODULE_MEDIATOR = mediator; } else { DEFAULT_MODULE_MEDIATOR = null; } } private final File realmDirectory; private final String realmFileName; private final String canonicalPath; private final String assetFilePath; private final byte[] key; private final long schemaVersion; private final RealmMigration migration; private final boolean deleteRealmIfMigrationNeeded; private final OsRealmConfig.Durability durability; private final RealmProxyMediator schemaMediator; private final RxObservableFactory rxObservableFactory; private final Realm.Transaction initialDataTransaction; private final boolean readOnly; private final CompactOnLaunchCallback compactOnLaunch; /** * Whether this RealmConfiguration is intended to open a * recovery Realm produced after an offline/online client reset. */ private final boolean isRecoveryConfiguration; // We need to enumerate all parameters since SyncConfiguration and RealmConfiguration supports different // subsets of them. protected RealmConfiguration(@Nullable File realmDirectory, @Nullable String realmFileName, String canonicalPath, @Nullable String assetFilePath, @Nullable byte[] key, long schemaVersion, @Nullable RealmMigration migration, boolean deleteRealmIfMigrationNeeded, OsRealmConfig.Durability durability, RealmProxyMediator schemaMediator, @Nullable RxObservableFactory rxObservableFactory, @Nullable Realm.Transaction initialDataTransaction, boolean readOnly, @Nullable CompactOnLaunchCallback compactOnLaunch, boolean isRecoveryConfiguration) { this.realmDirectory = realmDirectory; this.realmFileName = realmFileName; this.canonicalPath = canonicalPath; this.assetFilePath = assetFilePath; this.key = key; this.schemaVersion = schemaVersion; this.migration = migration; this.deleteRealmIfMigrationNeeded = deleteRealmIfMigrationNeeded; this.durability = durability; this.schemaMediator = schemaMediator; this.rxObservableFactory = rxObservableFactory; this.initialDataTransaction = initialDataTransaction; this.readOnly = readOnly; this.compactOnLaunch = compactOnLaunch; this.isRecoveryConfiguration = isRecoveryConfiguration; } public File getRealmDirectory() { return realmDirectory; } public String getRealmFileName() { return realmFileName; } public byte[] getEncryptionKey() { return key == null ? null : Arrays.copyOf(key, key.length); } public long getSchemaVersion() { return schemaVersion; } public RealmMigration getMigration() { return migration; } public boolean shouldDeleteRealmIfMigrationNeeded() { return deleteRealmIfMigrationNeeded; } public OsRealmConfig.Durability getDurability() { return durability; } /** * Returns the mediator instance of schema which is defined by this configuration. * * @return the mediator of the schema. */ // Protected for testing with mockito. protected RealmProxyMediator getSchemaMediator() { return schemaMediator; } /** * Returns the transaction instance with initial data. * * @return the initial data transaction. */ Realm.Transaction getInitialDataTransaction() { return initialDataTransaction; } /** * Indicates if there is available asset file for copy action. * * @return {@code true} if there is asset file, {@code false} otherwise. */ boolean hasAssetFile() { return !Util.isEmptyString(assetFilePath); } /** * Returns the path to the Realm asset file. * * @return path to the asset file relative to the asset directory. */ String getAssetFilePath() { return assetFilePath; } /** * Returns a callback to determine if the Realm file should be compacted before being returned to the user. * * @return a callback called when opening a Realm for the first time during the life of a process to determine if * it should be compacted before being returned to the user. It is passed the total file size (data + free space) * and the total bytes used by data in the file. */ public CompactOnLaunchCallback getCompactOnLaunchCallback() { return compactOnLaunch; } /** * Returns the unmodifiable {@link Set} of model classes that make up the schema for this Realm. * * @return unmodifiable {@link Set} of model classes. */ public Set<Class<? extends RealmModel>> getRealmObjectClasses() { return schemaMediator.getModelClasses(); } /** * Returns the absolute path to where the Realm file will be saved. * * @return the absolute path to the Realm file defined by this configuration. */ public String getPath() { return canonicalPath; } /** * Checks if the Realm file defined by this configuration already exists. * <p> * WARNING: This method is just a point-in-time check. Unless protected by external synchronization another * thread or process might have created or deleted the Realm file right after this method has returned. * * @return {@code true} if the Realm file exists, {@code false} otherwise. */ boolean realmExists() { return new File(canonicalPath).exists(); } /** * Returns the {@link RxObservableFactory} that is used to create Rx Observables from Realm objects. * * @return the factory instance used to create Rx Observables. * @throws UnsupportedOperationException if the required RxJava framework is not on the classpath. */ public RxObservableFactory getRxFactory() { // Since RxJava doesn't exist, rxObservableFactory is not initialized. if (rxObservableFactory == null) { throw new UnsupportedOperationException("RxJava seems to be missing from the classpath. " + "Remember to add it as a compile dependency." + " See https://realm.io/docs/java/latest/#rxjava for more details."); } return rxObservableFactory; } /** * Returns whether this Realm is read-only or not. Read-only Realms cannot be modified and will throw an * {@link IllegalStateException} if {@link Realm#beginTransaction()} is called on it. * * @return {@code true} if this Realm is read only, {@code false} if not. */ public boolean isReadOnly() { return readOnly; } /** * @return {@code true} if this configuration is intended to open a backup Realm (as a result of a client reset). * @see <a href="https://realm.io/docs/java/latest/api/io/realm/ClientResetRequiredError.html">ClientResetRequiredError</a> */ public boolean isRecoveryConfiguration() { return isRecoveryConfiguration; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } RealmConfiguration that = (RealmConfiguration) obj; if (schemaVersion != that.schemaVersion) { return false; } if (deleteRealmIfMigrationNeeded != that.deleteRealmIfMigrationNeeded) { return false; } if (readOnly != that.readOnly) { return false; } if (isRecoveryConfiguration != that.isRecoveryConfiguration) { return false; } if (realmDirectory != null ? !realmDirectory.equals(that.realmDirectory) : that.realmDirectory != null) { return false; } if (realmFileName != null ? !realmFileName.equals(that.realmFileName) : that.realmFileName != null) { return false; } if (!canonicalPath.equals(that.canonicalPath)) { return false; } if (assetFilePath != null ? !assetFilePath.equals(that.assetFilePath) : that.assetFilePath != null) { return false; } if (!Arrays.equals(key, that.key)) { return false; } if (migration != null ? !migration.equals(that.migration) : that.migration != null) { return false; } if (durability != that.durability) { return false; } if (!schemaMediator.equals(that.schemaMediator)) { return false; } if (rxObservableFactory != null ? !rxObservableFactory.equals(that.rxObservableFactory) : that.rxObservableFactory != null) { return false; } if (initialDataTransaction != null ? !initialDataTransaction.equals(that.initialDataTransaction) : that.initialDataTransaction != null) { return false; } return compactOnLaunch != null ? compactOnLaunch.equals(that.compactOnLaunch) : that.compactOnLaunch == null; } @Override public int hashCode() { int result = realmDirectory != null ? realmDirectory.hashCode() : 0; result = 31 * result + (realmFileName != null ? realmFileName.hashCode() : 0); result = 31 * result + canonicalPath.hashCode(); result = 31 * result + (assetFilePath != null ? assetFilePath.hashCode() : 0); result = 31 * result + Arrays.hashCode(key); result = 31 * result + (int) (schemaVersion ^ (schemaVersion >>> 32)); result = 31 * result + (migration != null ? migration.hashCode() : 0); result = 31 * result + (deleteRealmIfMigrationNeeded ? 1 : 0); result = 31 * result + durability.hashCode(); result = 31 * result + schemaMediator.hashCode(); result = 31 * result + (rxObservableFactory != null ? rxObservableFactory.hashCode() : 0); result = 31 * result + (initialDataTransaction != null ? initialDataTransaction.hashCode() : 0); result = 31 * result + (readOnly ? 1 : 0); result = 31 * result + (compactOnLaunch != null ? compactOnLaunch.hashCode() : 0); result = 31 * result + (isRecoveryConfiguration ? 1 : 0); return result; } // Creates the mediator that defines the current schema. protected static RealmProxyMediator createSchemaMediator(Set<Object> modules, Set<Class<? extends RealmModel>> debugSchema) { // If using debug schema, uses special mediator. if (debugSchema.size() > 0) { return new FilterableMediator(DEFAULT_MODULE_MEDIATOR, debugSchema); } // If only one module, uses that mediator directly. if (modules.size() == 1) { return getModuleMediator(modules.iterator().next().getClass().getCanonicalName()); } // Otherwise combines all mediators. RealmProxyMediator[] mediators = new RealmProxyMediator[modules.size()]; int i = 0; for (Object module : modules) { mediators[i] = getModuleMediator(module.getClass().getCanonicalName()); i++; } return new CompositeMediator(mediators); } // Finds the mediator associated with a given module. private static RealmProxyMediator getModuleMediator(String fullyQualifiedModuleClassName) { String[] moduleNameParts = fullyQualifiedModuleClassName.split("\\."); String moduleSimpleName = moduleNameParts[moduleNameParts.length - 1]; String mediatorName = String.format(Locale.US, "io.realm.%s%s", moduleSimpleName, "Mediator"); Class<?> clazz; //noinspection TryWithIdenticalCatches try { clazz = Class.forName(mediatorName); Constructor<?> constructor = clazz.getDeclaredConstructors()[0]; constructor.setAccessible(true); return (RealmProxyMediator) constructor.newInstance(); } catch (ClassNotFoundException e) { throw new RealmException("Could not find " + mediatorName, e); } catch (InvocationTargetException e) { throw new RealmException("Could not create an instance of " + mediatorName, e); } catch (InstantiationException e) { throw new RealmException("Could not create an instance of " + mediatorName, e); } catch (IllegalAccessException e) { throw new RealmException("Could not create an instance of " + mediatorName, e); } } @Override public String toString() { //noinspection StringBufferReplaceableByString StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("realmDirectory: ").append(realmDirectory != null ? realmDirectory.toString() : ""); stringBuilder.append("\n"); stringBuilder.append("realmFileName : ").append(realmFileName); stringBuilder.append("\n"); stringBuilder.append("canonicalPath: ").append(canonicalPath); stringBuilder.append("\n"); stringBuilder.append("key: ").append("[length: ").append(key == null ? 0 : KEY_LENGTH).append("]"); stringBuilder.append("\n"); stringBuilder.append("schemaVersion: ").append(Long.toString(schemaVersion)); stringBuilder.append("\n"); stringBuilder.append("migration: ").append(migration); stringBuilder.append("\n"); stringBuilder.append("deleteRealmIfMigrationNeeded: ").append(deleteRealmIfMigrationNeeded); stringBuilder.append("\n"); stringBuilder.append("durability: ").append(durability); stringBuilder.append("\n"); stringBuilder.append("schemaMediator: ").append(schemaMediator); stringBuilder.append("\n"); stringBuilder.append("readOnly: ").append(readOnly); stringBuilder.append("\n"); stringBuilder.append("compactOnLaunch: ").append(compactOnLaunch); return stringBuilder.toString(); } /** * Checks if RxJava is can be loaded. * * @return {@code true} if RxJava dependency exist, {@code false} otherwise. */ @SuppressWarnings("LiteralClassName") static synchronized boolean isRxJavaAvailable() { if (rxJavaAvailable == null) { try { Class.forName("io.reactivex.Flowable"); rxJavaAvailable = true; } catch (ClassNotFoundException ignore) { rxJavaAvailable = false; } } return rxJavaAvailable; } // Gets the canonical path for a given file. protected static String getCanonicalPath(File realmFile) { try { return realmFile.getCanonicalPath(); } catch (IOException e) { throw new RealmFileException(RealmFileException.Kind.ACCESS_ERROR, "Could not resolve the canonical path to the Realm file: " + realmFile.getAbsolutePath(), e); } } // Checks if this configuration is a SyncConfiguration instance. boolean isSyncConfiguration() { return false; } /** * RealmConfiguration.Builder used to construct instances of a RealmConfiguration in a fluent manner. */ public static class Builder { // IMPORTANT: When adding any new methods to this class also add them to SyncConfiguration. private File directory; private String fileName; private String assetFilePath; private byte[] key; private long schemaVersion; private RealmMigration migration; private boolean deleteRealmIfMigrationNeeded; private OsRealmConfig.Durability durability; private HashSet<Object> modules = new HashSet<Object>(); private HashSet<Class<? extends RealmModel>> debugSchema = new HashSet<Class<? extends RealmModel>>(); private RxObservableFactory rxFactory; private Realm.Transaction initialDataTransaction; private boolean readOnly; private CompactOnLaunchCallback compactOnLaunch; /** * Creates an instance of the Builder for the RealmConfiguration. * <p> * This will use the app's own internal directory for storing the Realm file. This does not require any * additional permissions. The default location is {@code /data/data/<packagename>/files}, but can * change depending on vendor implementations of Android. */ public Builder() { this(BaseRealm.applicationContext); } Builder(Context context) { //noinspection ConstantConditions if (context == null) { throw new IllegalStateException("Call `Realm.init(Context)` before creating a RealmConfiguration"); } RealmCore.loadLibrary(context); initializeBuilder(context); } // Setups builder in its initial state. private void initializeBuilder(Context context) { this.directory = context.getFilesDir(); this.fileName = Realm.DEFAULT_REALM_NAME; this.key = null; this.schemaVersion = 0; this.migration = null; this.deleteRealmIfMigrationNeeded = false; this.durability = OsRealmConfig.Durability.FULL; this.readOnly = false; this.compactOnLaunch = null; if (DEFAULT_MODULE != null) { this.modules.add(DEFAULT_MODULE); } } /** * Sets the filename for the Realm file. */ public Builder name(String filename) { //noinspection ConstantConditions if (filename == null || filename.isEmpty()) { throw new IllegalArgumentException("A non-empty filename must be provided"); } this.fileName = filename; return this; } /** * Specifies the directory where the Realm file will be saved. The default value is {@code context.getFilesDir()}. * If the directory does not exist, it will be created. * * @param directory the directory to save the Realm file in. Directory must be writable. * @throws IllegalArgumentException if {@code directory} is null, not writable or a file. */ public Builder directory(File directory) { //noinspection ConstantConditions if (directory == null) { throw new IllegalArgumentException("Non-null 'dir' required."); } if (directory.isFile()) { throw new IllegalArgumentException("'dir' is a file, not a directory: " + directory.getAbsolutePath() + "."); } if (!directory.exists() && !directory.mkdirs()) { throw new IllegalArgumentException("Could not create the specified directory: " + directory.getAbsolutePath() + "."); } if (!directory.canWrite()) { throw new IllegalArgumentException("Realm directory is not writable: " + directory.getAbsolutePath() + "."); } this.directory = directory; return this; } /** * Sets the 64 byte key used to encrypt and decrypt the Realm file. * Sets the {@value io.realm.RealmConfiguration#KEY_LENGTH} bytes key used to encrypt and decrypt the Realm file. */ public Builder encryptionKey(byte[] key) { //noinspection ConstantConditions if (key == null) { throw new IllegalArgumentException("A non-null key must be provided"); } if (key.length != KEY_LENGTH) { throw new IllegalArgumentException(String.format(Locale.US, "The provided key must be %s bytes. Yours was: %s", KEY_LENGTH, key.length)); } this.key = Arrays.copyOf(key, key.length); return this; } /** * Sets the schema version of the Realm. This must be equal to or higher than the schema version of the existing * Realm file, if any. If the schema version is higher than the already existing Realm, a migration is needed. * <p> * If no migration code is provided, Realm will throw a * {@link io.realm.exceptions.RealmMigrationNeededException}. * * @see #migration(RealmMigration) */ public Builder schemaVersion(long schemaVersion) { if (schemaVersion < 0) { throw new IllegalArgumentException("Realm schema version numbers must be 0 (zero) or higher. Yours was: " + schemaVersion); } this.schemaVersion = schemaVersion; return this; } /** * Sets the {@link io.realm.RealmMigration} to be run if a migration is needed. If this migration fails to * upgrade the on-disc schema to the runtime schema, a {@link io.realm.exceptions.RealmMigrationNeededException} * will be thrown. */ public Builder migration(RealmMigration migration) { //noinspection ConstantConditions if (migration == null) { throw new IllegalArgumentException("A non-null migration must be provided"); } this.migration = migration; return this; } /** * Setting this will change the behavior of how migration exceptions are handled. Instead of throwing a * {@link io.realm.exceptions.RealmMigrationNeededException} the on-disc Realm will be cleared and recreated * with the new Realm schema. * <p> * <p>This cannot be configured to have an asset file at the same time by calling * {@link #assetFile(String)} as the provided asset file will be deleted in migrations. * <p> * <p><b>WARNING!</b> This will result in loss of data. * * @throws IllegalStateException if configured to use an asset file by calling {@link #assetFile(String)} previously. */ public Builder deleteRealmIfMigrationNeeded() { if (this.assetFilePath != null && this.assetFilePath.length() != 0) { throw new IllegalStateException("Realm cannot clear its schema when previously configured to use an asset file by calling assetFile()."); } this.deleteRealmIfMigrationNeeded = true; return this; } /** * Setting this will create an in-memory Realm instead of saving it to disk. In-memory Realms might still use * disk space if memory is running low, but all files created by an in-memory Realm will be deleted when the * Realm is closed. * <p> * Note that because in-memory Realms are not persisted, you must be sure to hold on to at least one non-closed * reference to the in-memory Realm object with the specific name as long as you want the data to last. */ public Builder inMemory() { if (!Util.isEmptyString(assetFilePath)) { throw new RealmException("Realm can not use in-memory configuration if asset file is present."); } this.durability = OsRealmConfig.Durability.MEM_ONLY; return this; } /** * Replaces the existing module(s) with one or more {@link RealmModule}s. Using this method will replace the * current schema for this Realm with the schema defined by the provided modules. * <p> * A reference to the default Realm module containing all Realm classes in the project (but not dependencies), * can be found using {@link Realm#getDefaultModule()}. Combining the schema from the app project and a library * dependency is thus done using the following code: * <p> * {@code builder.modules(Realm.getDefaultMode(), new MyLibraryModule()); } * <p> * * @param baseModule the first Realm module (required). * @param additionalModules the additional Realm modules * @throws IllegalArgumentException if any of the modules doesn't have the {@link RealmModule} annotation. * @see Realm#getDefaultModule() */ public Builder modules(Object baseModule, Object... additionalModules) { modules.clear(); addModule(baseModule); //noinspection ConstantConditions if (additionalModules != null) { for (int i = 0; i < additionalModules.length; i++) { Object module = additionalModules[i]; addModule(module); } } return this; } /** * Sets the {@link RxObservableFactory} used to create Rx Observables from Realm objects. * The default factory is {@link RealmObservableFactory}. * * @param factory factory to use. */ public Builder rxFactory(RxObservableFactory factory) { rxFactory = factory; return this; } /** * Sets the initial data in {@link io.realm.Realm}. This transaction will be executed only for the first time * when database file is created or while migrating the data when {@link Builder#deleteRealmIfMigrationNeeded()} is set. * * @param transaction transaction to execute. */ public Builder initialData(Realm.Transaction transaction) { initialDataTransaction = transaction; return this; } /** * Copies the Realm file from the given asset file path. * <p> * When opening the Realm for the first time, instead of creating an empty file, * the Realm file will be copied from the provided asset file and used instead. * <p> * This cannot be combined with {@link #deleteRealmIfMigrationNeeded()} as doing so would just result in the * copied file being deleted. * <p> * WARNING: This could potentially be a lengthy operation and should ideally be done on a background thread. * * @param assetFile path to the asset database file. * @throws IllegalStateException if this is configured to clear its schema by calling {@link #deleteRealmIfMigrationNeeded()}. */ public Builder assetFile(String assetFile) { if (Util.isEmptyString(assetFile)) { throw new IllegalArgumentException("A non-empty asset file path must be provided"); } if (durability == OsRealmConfig.Durability.MEM_ONLY) { throw new RealmException("Realm can not use in-memory configuration if asset file is present."); } if (this.deleteRealmIfMigrationNeeded) { throw new IllegalStateException("Realm cannot use an asset file when previously configured to clear its schema in migration by calling deleteRealmIfMigrationNeeded()."); } this.assetFilePath = assetFile; return this; } /** * Setting this will cause the Realm to become read only and all write transactions made against this Realm will * fail with an {@link IllegalStateException}. * <p> * This in particular mean that {@link #initialData(Realm.Transaction)} will not work in combination with a * read only Realm and setting this will result in a {@link IllegalStateException} being thrown. * </p> * Marking a Realm as read only only applies to the Realm in this process. Other processes can still * write to the Realm. */ public Builder readOnly() { this.readOnly = true; return this; } /** * Setting this will cause Realm to compact the Realm file if the Realm file has grown too large and a * significant amount of space can be recovered. See {@link DefaultCompactOnLaunchCallback} for details. */ public Builder compactOnLaunch() { return compactOnLaunch(new DefaultCompactOnLaunchCallback()); } /** * Sets this to determine if the Realm file should be compacted before returned to the user. It is passed the * total file size (data + free space) and the bytes used by data in the file. * * @param compactOnLaunch a callback called when opening a Realm for the first time during the life of a process * to determine if it should be compacted before being returned to the user. It is passed * the total file size (data + free space) and the bytes used by data in the file. */ public Builder compactOnLaunch(CompactOnLaunchCallback compactOnLaunch) { //noinspection ConstantConditions if (compactOnLaunch == null) { throw new IllegalArgumentException("A non-null compactOnLaunch must be provided"); } this.compactOnLaunch = compactOnLaunch; return this; } private void addModule(Object module) { //noinspection ConstantConditions if (module != null) { checkModule(module); modules.add(module); } } /** * DEBUG method. This restricts the Realm schema to only consist of the provided classes without having to * create a module. These classes must be available in the default module. Calling this will remove any * previously configured modules. */ final Builder schema(Class<? extends RealmModel> firstClass, Class<? extends RealmModel>... additionalClasses) { //noinspection ConstantConditions if (firstClass == null) { throw new IllegalArgumentException("A non-null class must be provided"); } modules.clear(); modules.add(DEFAULT_MODULE_MEDIATOR); debugSchema.add(firstClass); //noinspection ConstantConditions if (additionalClasses != null) { Collections.addAll(debugSchema, additionalClasses); } return this; } /** * Creates the RealmConfiguration based on the builder parameters. * * @return the created {@link RealmConfiguration}. */ public RealmConfiguration build() { // Check that readOnly() was applied to legal configuration. Right now it should only be allowed if // an assetFile is configured if (readOnly) { if (initialDataTransaction != null) { throw new IllegalStateException("This Realm is marked as read-only. Read-only Realms cannot use initialData(Realm.Transaction)."); } if (assetFilePath == null) { throw new IllegalStateException("Only Realms provided using 'assetFile(path)' can be marked read-only. No such Realm was provided."); } if (deleteRealmIfMigrationNeeded) { throw new IllegalStateException("'deleteRealmIfMigrationNeeded()' and read-only Realms cannot be combined"); } if (compactOnLaunch != null) { throw new IllegalStateException("'compactOnLaunch()' and read-only Realms cannot be combined"); } } if (rxFactory == null && isRxJavaAvailable()) { rxFactory = new RealmObservableFactory(); } return new RealmConfiguration(directory, fileName, getCanonicalPath(new File(directory, fileName)), assetFilePath, key, schemaVersion, migration, deleteRealmIfMigrationNeeded, durability, createSchemaMediator(modules, debugSchema), rxFactory, initialDataTransaction, readOnly, compactOnLaunch, false ); } private void checkModule(Object module) { if (!module.getClass().isAnnotationPresent(RealmModule.class)) { throw new IllegalArgumentException(module.getClass().getCanonicalName() + " is not a RealmModule. " + "Add @RealmModule to the class definition."); } } } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ // Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.facebook.buck.query; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.rules.param.ParamName; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.function.Predicate; import javax.annotation.Nullable; /** * The environment of a Buck query that can evaluate queries to produce a result. * * <p>The query language is documented at docs/command/query.soy * * @param <NODE_TYPE> The primary type of "node" in the graph over which a {@code buck query} is run * in this environment. Although all objects returned by {@link QueryEnvironment} implement * {@link ConfiguredQueryTarget}, which is a marker interface for all possible "nodes" in the * graph being queried, <em>most</em> methods return objects that correspond to build rules. As * such, {@code NODE_TYPE} specifies the type used to represent build rules in this environment. * Methods that return objects of type {@code NODE_TYPE} therefore provide stronger guarantees * than those that only guarantee {@link ConfiguredQueryTarget} as the return type. */ public interface QueryEnvironment<NODE_TYPE> { /** Type of an argument of a user-defined query function. */ enum ArgumentType { EXPRESSION, WORD, INTEGER, } /** * Value of an argument of a user-defined query function. * * @param <ENV_NODE_TYPE> If this argument represents an {@link ArgumentType#EXPRESSION}, * determines the type of the {@link QueryEnvironment} in which the expression is expected to * be evaluated. */ class Argument<ENV_NODE_TYPE> { private final ArgumentType type; @Nullable private final QueryExpression<ENV_NODE_TYPE> expression; @Nullable private final String word; private final int integer; private Argument( ArgumentType type, @Nullable QueryExpression<ENV_NODE_TYPE> expression, @Nullable String word, int integer) { this.type = type; this.expression = expression; this.word = word; this.integer = integer; } public static <T> Argument<T> of(QueryExpression<T> expression) { return new Argument<T>(ArgumentType.EXPRESSION, expression, null, 0); } public static Argument<?> of(String word) { return new Argument<>(ArgumentType.WORD, null, word, 0); } public static Argument<?> of(int integer) { return new Argument<>(ArgumentType.INTEGER, null, null, integer); } public ArgumentType getType() { return type; } public QueryExpression<ENV_NODE_TYPE> getExpression() { return Objects.requireNonNull(expression); } public String getWord() { return Objects.requireNonNull(word); } public int getInteger() { return integer; } @Override public String toString() { switch (type) { case WORD: return "'" + word + "'"; case EXPRESSION: return Objects.requireNonNull(expression).toString(); case INTEGER: return Integer.toString(integer); default: throw new IllegalStateException(); } } @Override public boolean equals(Object other) { return (other instanceof Argument) && equalTo((Argument<?>) other); } public boolean equalTo(Argument<?> other) { return type.equals(other.type) && integer == other.integer && Objects.equals(expression, other.expression) && Objects.equals(word, other.word); } @Override public int hashCode() { int h = 31; h = h * 17 + type.hashCode(); h = h * 17 + integer; if (expression != null) { h = h * 17 + expression.hashCode(); } if (word != null) { h = h * 17 + word.hashCode(); } return h; } } /** A user-defined query function, which operates on the nodes of the QueryEnvironment */ interface QueryFunction<ENV_NODE_TYPE> { /** Name of the function as it appears in the query language. */ String getName(); /** * The number of arguments that are required. The rest is optional. * * <p>This should be greater than or equal to zero and at smaller than or equal to the length of * the list returned by {@link #getArgumentTypes}. */ int getMandatoryArguments(); /** The types of the arguments of the function. */ ImmutableList<ArgumentType> getArgumentTypes(); /** * Called when a user-defined function is to be evaluated. * * @param evaluator the evaluator for evaluating argument expressions. * @param env the query environment this function is evaluated in. * @param args the input arguments. These are type-checked against the specification returned by * {@link #getArgumentTypes} and {@link #getMandatoryArguments} * @return results of evaluating the query expression. The result type is mutable {@link Set} to * enable actual implementation to avoid making unnecessary copies, but resulting set is not * supposed to be mutated afterwards so implementation is ok to return {@link ImmutableSet}. */ Set<ENV_NODE_TYPE> eval( QueryEvaluator<ENV_NODE_TYPE> evaluator, QueryEnvironment<ENV_NODE_TYPE> env, ImmutableList<Argument<ENV_NODE_TYPE>> args) throws QueryException; } /** * A procedure for evaluating a target literal to {@link ConfiguredQueryTarget}. This evaluation * can either happen immediately at parse time or be delayed until evalution of the entire query. */ interface TargetEvaluator<ENV_NODE_TYPE> { /** Returns the set of target nodes for the specified target pattern, in 'buck build' syntax. */ Set<ENV_NODE_TYPE> evaluateTarget(String target) throws QueryException; Type getType(); enum Type { IMMEDIATE, LAZY } } /** Returns an evaluator for target patterns. */ TargetEvaluator<NODE_TYPE> getTargetEvaluator(); /** Query parser environment. */ default QueryParserEnv<NODE_TYPE> getQueryParserEnv() { return QueryParserEnv.of(getFunctions(), getTargetEvaluator()); } /** Returns the direct forward dependencies of the specified targets. */ Set<NODE_TYPE> getFwdDeps(Iterable<NODE_TYPE> targets) throws QueryException; /** * Applies {@code action} to each forward dependencies of the specified targets. * * <p>Might apply more than once to the same target, so {@code action} should be idempotent. */ default void forEachFwdDep(Iterable<NODE_TYPE> targets, Consumer<NODE_TYPE> action) throws QueryException { getFwdDeps(targets).forEach(action); } /** Returns the direct reverse dependencies of the specified targets. */ Set<NODE_TYPE> getReverseDeps(Iterable<NODE_TYPE> targets) throws QueryException; Set<NODE_TYPE> getInputs(NODE_TYPE target) throws QueryException; /** * Returns the forward transitive closure of all of the targets in "targets". Callers must ensure * that {@link #buildTransitiveClosure} has been called for the relevant subgraph. */ Set<NODE_TYPE> getTransitiveClosure(Set<NODE_TYPE> targets) throws QueryException; /** * Construct the dependency graph for a depth-bounded forward transitive closure of all nodes in * "targetNodes". The identity of the calling expression is required to produce error messages. * * <p>If a larger transitive closure was already built, returns it to improve incrementality, * since all depth-constrained methods filter it after it is built anyway. */ void buildTransitiveClosure(Set<NODE_TYPE> targetNodes) throws QueryException; String getTargetKind(NODE_TYPE target) throws QueryException; /** Returns the tests associated with the given target. */ Set<NODE_TYPE> getTestsForTarget(NODE_TYPE target) throws QueryException; /** Returns the build files that define the given targets. */ Set<NODE_TYPE> getBuildFiles(Set<NODE_TYPE> targets) throws QueryException; /** Returns the targets that own one or more of the given files. */ Set<NODE_TYPE> getFileOwners(ImmutableList<String> files) throws QueryException; /** * Returns a set of targets equal to the input targets except configured for `configuration` * instead (or the default target platform if no configuration is provided) */ Set<NODE_TYPE> getConfiguredTargets(Set<NODE_TYPE> targets, Optional<String> configuration) throws QueryException; /** * Returns the existing targets in the value of `attribute` of the given `target`. * * <p>Note that unlike most methods in this interface, this method can return a heterogeneous * collection of objects (both paths and build targets). */ Set<NODE_TYPE> getTargetsInAttribute(NODE_TYPE target, ParamName attribute) throws QueryException; /** Returns the objects in the `attribute` of the given `target` that satisfy `predicate` */ Set<Object> filterAttributeContents( NODE_TYPE target, ParamName attribute, Predicate<Object> predicate) throws QueryException; /** Returns the set of query functions implemented by this query environment. */ Iterable<QueryFunction<NODE_TYPE>> getFunctions(); /** @return the {@link NODE_TYPE}s expanded from the given variable {@code name}. */ default Set<NODE_TYPE> resolveTargetVariable(String name) { throw new IllegalArgumentException(String.format("unexpected target variable \"%s\"", name)); } Optional<BuckEventBus> getEventBus(); }
// Copyright 2015 The Project Buendia Authors // // Licensed under the Apache License, Version 2.0 (the "License"); you may not // use this file except in compliance with the License. You may obtain a copy // of the License at: http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distrib- // uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES // OR CONDITIONS OF ANY KIND, either express or implied. See the License for // specific language governing permissions and limitations under the License. package org.projectbuendia.client.ui; import android.app.Activity; import android.content.res.Resources; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.test.runner.lifecycle.ActivityLifecycleMonitorRegistry; import android.support.test.runner.lifecycle.Stage; import android.support.test.espresso.Espresso; import android.support.test.espresso.IdlingPolicies; import android.support.test.espresso.NoActivityResumedException; import com.google.common.base.Optional; import com.google.common.collect.Iterables; import com.squareup.spoon.Spoon; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.joda.time.Period; import org.projectbuendia.client.R; import org.projectbuendia.client.data.app.AppPatient; import org.projectbuendia.client.data.app.AppPatientDelta; import org.projectbuendia.client.events.data.ItemCreatedEvent; import org.projectbuendia.client.events.sync.SyncSucceededEvent; import org.projectbuendia.client.events.user.KnownUsersLoadedEvent; import org.projectbuendia.client.net.model.Patient; import org.projectbuendia.client.ui.login.LoginActivity; import org.projectbuendia.client.ui.matchers.TestCaseWithMatcherMethods; import org.projectbuendia.client.ui.sync.EventBusIdlingResource; import org.projectbuendia.client.utils.EventBusRegistrationInterface; import org.projectbuendia.client.utils.EventBusWrapper; import org.projectbuendia.client.utils.Logger; import java.util.UUID; import java.util.concurrent.TimeUnit; import de.greenrobot.event.EventBus; import static android.support.test.espresso.Espresso.pressBack; import static android.support.test.espresso.matcher.RootMatchers.isDialog; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isA; import static org.projectbuendia.client.ui.matchers.AppPatientMatchers.isPatientWithId; /** * Base class for functional tests that sets timeouts to be permissive, optionally logs in as a * user before continuing, and provides some utility functions for convenience. */ public class FunctionalTestCase extends TestCaseWithMatcherMethods<LoginActivity> { private static final Logger LOG = Logger.create(); private boolean mWaitForUserSync = true; protected EventBusRegistrationInterface mEventBus; // For now, we create a new demo patient for tests using the real patient // creation UI on each test run (see {@link #inUserLoginInitDemoPatient()}). // TODO/robustness: Use externally preloaded demo data instead. protected static String sDemoPatientId = null; public FunctionalTestCase() { super(LoginActivity.class); } @Override public void setUp() throws Exception { // Give additional leeway for idling resources, as sync may be slow, especially on Edisons. // Increased to 5 minutes as certain operations (like initial sync) may take an exceedingly // long time. IdlingPolicies.setIdlingResourceTimeout(300, TimeUnit.SECONDS); IdlingPolicies.setMasterPolicyTimeout(300, TimeUnit.SECONDS); mEventBus = new EventBusWrapper(EventBus.getDefault()); // Wait for users to sync. if (mWaitForUserSync) { EventBusIdlingResource<KnownUsersLoadedEvent> resource = new EventBusIdlingResource<>("USERS", mEventBus); Espresso.registerIdlingResources(resource); } super.setUp(); getActivity(); } public void setWaitForUserSync(boolean waitForUserSync) { mWaitForUserSync = waitForUserSync; } @Override public void tearDown() { // Remove activities from the stack until the app is closed. If we don't do this, the test // runner sometimes has trouble launching the activity to start the next test. try { closeAllActivities(); } catch (Exception e) { LOG.e("Error tearing down test case; test isolation may be broken", e); } } /** * Determines the currently loaded activity, rather than {@link #getActivity()}, which will * always return {@link LoginActivity}. */ protected Activity getCurrentActivity() throws Throwable { getInstrumentation().waitForIdleSync(); final Activity[] activity = new Activity[1]; runTestOnUiThread(new Runnable() { @Override public void run() { java.util.Collection<Activity> activities = ActivityLifecycleMonitorRegistry.getInstance() .getActivitiesInStage(Stage.RESUMED); activity[0] = Iterables.getOnlyElement(activities); } }); return activity[0]; } protected void screenshot(String tag) { try { Spoon.screenshot(getCurrentActivity(), tag.replace(" ", "")); } catch (Throwable throwable) { LOG.w("Could not create screenshot with tag %s", tag); } } /** * Instructs espresso to wait for a {@link ProgressFragment} to finish loading. Espresso will * also wait every subsequent time the {@link ProgressFragment} returns to the busy state, and * will period check whether or not the fragment is currently idle. */ protected void waitForProgressFragment(ProgressFragment progressFragment) { // Use the ProgressFragment hashCode as the identifier so that multiple ProgressFragments // can be tracked, but only one resource will be registered to each fragment. ProgressFragmentIdlingResource idlingResource = new ProgressFragmentIdlingResource( Integer.toString(progressFragment.hashCode()), progressFragment); Espresso.registerIdlingResources(idlingResource); } /** * Instructs espresso to wait for the {@link ProgressFragment} contained in the current * activity to finish loading, if such a fragment is present. Espresso will also wait every * subsequent time the {@link ProgressFragment} returns to the busy state, and * will period check whether or not the fragment is currently idle. * * <p>If the current activity does not contain a progress fragment, then this function will * throw an {@link IllegalArgumentException}. * * <p>Warning: This function will not work properly in setUp() as the current activity won't * be available. If you need to call this function during setUp(), use * {@link #waitForProgressFragment(ProgressFragment)}. * TODO/robustness: Investigate why the current activity isn't available during setUp(). */ protected void waitForProgressFragment() { Activity activity; try { activity = getCurrentActivity(); } catch (Throwable throwable) { throw new IllegalStateException("Error retrieving current activity", throwable); } if (!(activity instanceof FragmentActivity)) { throw new IllegalStateException("Activity is not a FragmentActivity"); } FragmentActivity fragmentActivity = (FragmentActivity)activity; try { for (Fragment fragment : fragmentActivity.getSupportFragmentManager().getFragments()) { if (fragment instanceof ProgressFragment) { waitForProgressFragment((ProgressFragment) fragment); return; } } } catch (NullPointerException e) { LOG.w("Unable to wait for ProgressFragment to initialize."); return; } throw new IllegalStateException("Could not find a progress fragment to wait on."); } /** Idles until sync has completed. */ protected void waitForInitialSync() { // Use a UUID as a tag so that we can wait for an arbitrary number of events, since // EventBusIdlingResource<> only works for a single event. LOG.i("Registering resource to wait for initial sync."); EventBusIdlingResource<SyncSucceededEvent> syncSucceededResource = new EventBusIdlingResource<>(UUID.randomUUID().toString(), mEventBus); Espresso.registerIdlingResources(syncSucceededResource); } /** * Adds a new patient using the new patient form. Assumes that the UI is * in the location selection activity, and leaves the UI in the same * activity. Note: this function will not work during {@link #setUp()} * as it relies on {@link #waitForProgressFragment()}. * @param delta an AppPatientDelta containing the data for the new patient; * use Optional.absent() to leave fields unset * @param locationName the name of a location to assign to the new patient, * or null to leave unset (assumes this name is unique among locations) */ protected void inLocationSelectionAddNewPatient(AppPatientDelta delta, String locationName) { LOG.i("Adding patient: %s (location %s)", delta.toContentValues().toString(), locationName); click(viewWithId(R.id.action_add)); expectVisible(viewWithText("New patient")); if (delta.id.isPresent()) { type(delta.id.get(), viewWithId(R.id.patient_creation_text_patient_id)); } if (delta.givenName.isPresent()) { type(delta.givenName.get(), viewWithId(R.id.patient_creation_text_patient_given_name)); } if (delta.familyName.isPresent()) { type(delta.familyName.get(), viewWithId(R.id.patient_creation_text_patient_family_name)); } if (delta.birthdate.isPresent()) { Period age = new Period(delta.birthdate.get().toLocalDate(), LocalDate.now()); if (age.getYears() < 1) { type(age.getMonths(), viewWithId(R.id.patient_creation_text_age)); click(viewWithId(R.id.patient_creation_radiogroup_age_units_months)); } else { type(age.getYears(), viewWithId(R.id.patient_creation_text_age)); click(viewWithId(R.id.patient_creation_radiogroup_age_units_years)); } } if (delta.gender.isPresent()) { if (delta.gender.get() == AppPatient.GENDER_MALE) { click(viewWithId(R.id.patient_creation_radiogroup_age_sex_male)); } else if (delta.gender.get() == AppPatient.GENDER_FEMALE) { click(viewWithId(R.id.patient_creation_radiogroup_age_sex_female)); } } if (delta.admissionDate.isPresent()) { // TODO/completeness: Support admission date in addNewPatient(). // The following code is broken -- hopefully fixed by Espresso 2.0. // click(viewWithId(R.id.patient_creation_admission_date)); // selectDateFromDatePickerDialog(mDemoPatient.admissionDate.get()); } if (delta.firstSymptomDate.isPresent()) { // TODO/completeness: Support first symptoms date in addNewPatient(). // The following code is broken -- hopefully fixed by Espresso 2.0. // click(viewWithId(R.id.patient_creation_symptoms_onset_date)); // selectDateFromDatePickerDialog(mDemoPatient.firstSymptomDate.get()); } if (delta.assignedLocationUuid.isPresent()) { // TODO/completeness: Support assigned location in addNewPatient(). // A little tricky as we need to select by UUID. // click(viewWithId(R.id.patient_creation_button_change_location)); } if (locationName != null) { click(viewWithId(R.id.patient_creation_button_change_location)); click(viewWithText(locationName)); } EventBusIdlingResource<ItemCreatedEvent<AppPatient>> resource = new EventBusIdlingResource<>(UUID.randomUUID().toString(), mEventBus); click(viewWithId(R.id.patient_creation_button_create)); Espresso.registerIdlingResources(resource); // wait for patient to be created } // Broken, but hopefully fixed in Espresso 2.0. private void selectDateFromDatePickerDialog(DateTime dateTime) { selectDateFromDatePicker(dateTime); click(viewWithText("Set").inRoot(isDialog())); } protected void selectDateFromDatePicker( @Nullable String year, @Nullable String monthOfYear, @Nullable String dayOfMonth) { LOG.e("Year: %s, Month: %s, Day: %s", year, monthOfYear, dayOfMonth); if (year != null) { setDateSpinner("year", year); } if (monthOfYear != null) { setDateSpinner("month", monthOfYear); } if (dayOfMonth != null) { setDateSpinner("day", dayOfMonth); } } protected void selectDateFromDatePicker(DateTime dateTime) { String year = dateTime.toString("yyyy"); String monthOfYear = dateTime.toString("MMM"); String dayOfMonth = dateTime.toString("dd"); selectDateFromDatePicker(year, monthOfYear, dayOfMonth); } // Broken, but hopefully fixed in Espresso 2.0. protected void setDateSpinner(String spinnerName, String value) { int numberPickerId = Resources.getSystem().getIdentifier("numberpicker_input", "id", "android"); int spinnerId = Resources.getSystem().getIdentifier(spinnerName, "id", "android"); LOG.i("%s: %s", spinnerName, value); LOG.i("numberPickerId: %d", numberPickerId); LOG.i("spinnerId: %d", spinnerId); type(value, viewThat(hasId(numberPickerId), whoseParent(hasId(spinnerId)))); } /** * Ensures that a demo patient exists, creating one if necessary. Assumes * that the UI is in the user login activity, and leaves the UI back in * the user login activity. Note: this function will not work during * {@link #setUp()} as it relies on {@link #waitForProgressFragment()}. */ protected void inUserLoginInitDemoPatient() { if (sDemoPatientId != null) { // demo patient exists and is reusable return; } AppPatientDelta delta = new AppPatientDelta(); String id = "" + (System.currentTimeMillis() % 100000); delta.id = Optional.of(id); delta.givenName = Optional.of("Given" + id); delta.familyName = Optional.of("Family" + id); delta.firstSymptomDate = Optional.of(LocalDate.now().minusMonths(7)); delta.gender = Optional.of(Patient.GENDER_FEMALE); delta.birthdate = Optional.of(DateTime.now().minusYears(12).minusMonths(3)); // Setting location within the AppPatientDelta is not yet supported. // delta.assignedLocationUuid = Optional.of(Zone.TRIAGE_ZONE_UUID); inUserLoginGoToLocationSelection(); inLocationSelectionAddNewPatient(delta, "S1"); // add the patient sDemoPatientId = id; // record ID so future tests can reuse the patient pressBack(); // return to user login activity } /** * Prevents the current demo patient from being reused for the next test. * The default behaviour is to reuse the same demo patient for each test; * if a test modifies patient data, it should call this method so that the * next test will use a fresh demo patient. */ protected void invalidateDemoPatient() { sDemoPatientId = null; } /** * Navigates to the location selection activity from the user login * activity. Note: this function will not work during {@link #setUp()} * as it uses {@link #waitForProgressFragment()}. */ protected void inUserLoginGoToLocationSelection() { click(viewWithText("Guest User")); waitForProgressFragment(); // wait for locations to load } /** * Navigates to the location selection activity with a list of all the * patients opened (from tapping the search button). Assumes that the UI is * in the user login activity. Note: this function will not work during * {@link #setUp()} as it uses {@link #waitForProgressFragment()}. */ protected void inUserLoginGoToPatientList() { inUserLoginGoToLocationSelection(); // There may be a small delay before the search button becomes visible; // the button is not displayed while locations are loading. expectVisibleWithin(3000, viewThat(hasId(R.id.action_search))); // Tap the search button to open the list of all patients. click(viewWithId(R.id.action_search)); } /** * Navigates to the patient chart for the shared demo patient, creating the * demo patient if it doesn't exist yet. Assumes that the UI is in the * user login activity. Note: this function will not work during * {@link #setUp()} as it uses {@link #waitForProgressFragment()}. */ protected void inUserLoginGoToDemoPatientChart() { inUserLoginInitDemoPatient(); inUserLoginGoToPatientList(); inPatientListClickPatientWithId(sDemoPatientId); } /** * Navigates to the patient creation activity. Assumes that the UI is * in the user login activity. Note: this function will not work during * {@link #setUp()} as it uses {@link #waitForProgressFragment()}. */ protected void inUserLoginGoToPatientCreation() { inUserLoginGoToLocationSelection(); click(viewWithId(R.id.action_add)); expectVisible(viewWithText("New patient")); } /** Checks that the expected zones and tents are shown. */ protected void inLocationSelectionCheckZonesAndTentsDisplayed() { // Should be at location selection screen expectVisibleSoon(viewWithText("ALL PRESENT PATIENTS")); // Zones and tents should be visible expectVisible(viewWithText("Triage")); expectVisible(viewWithText("S1")); expectVisible(viewWithText("S2")); expectVisible(viewWithText("P1")); expectVisible(viewWithText("P2")); expectVisible(viewWithText("C1")); expectVisible(viewWithText("C2")); expectVisible(viewWithText("Discharged")); } /** In the location selection activity, click a location tile. */ protected void inLocationSelectionClickLocation(String name) { click(viewThat(hasText(name))); waitForProgressFragment(); // Wait for search fragment to load. } /** In a patient list, click the first patient. */ protected void inPatientListClickFirstPatient() { click(dataThat(is(AppPatient.class)) .inAdapterView(hasId(R.id.fragment_patient_list)) .atPosition(0)); } /** In a patient list, click the patient with a specified ID. */ protected void inPatientListClickPatientWithId(String id) { click(dataThat(isPatientWithId(id)) .inAdapterView(hasId(R.id.fragment_patient_list)) .atPosition(0)); } /** Closes all activities on the stack. */ protected void closeAllActivities() throws Exception { try { for (int i = 0; i < 20; i++) { pressBack(); Thread.sleep(100); } } catch (NoActivityResumedException | InterruptedException e) { // nothing left to close } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.spring.security; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.tuple; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import org.flowable.idm.api.Group; import org.flowable.idm.api.Privilege; import org.flowable.idm.api.User; import org.flowable.idm.engine.test.PluggableFlowableIdmTestCase; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.security.core.CredentialsContainer; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.core.userdetails.UsernameNotFoundException; /** * @author Filip Hrisafov */ public class FlowableUserDetailsServiceTest extends PluggableFlowableIdmTestCase { private UserDetailsService userDetailsService; @BeforeEach protected void setUp() throws Exception { createGroup("admins", "Admins", "user"); createGroup("sales", "Sales", "user"); createGroup("engineering", "Engineering", "tech"); createUser("kermit", "Kermit", "the Frog", "Kermit the Frog", "kermit@muppetshow.com"); createUser("fozzie", "Fozzie", "Bear", "Fozzie Bear", "fozzie@muppetshow.com"); idmIdentityService.createMembership("kermit", "admins"); idmIdentityService.createMembership("kermit", "sales"); idmIdentityService.createMembership("kermit", "engineering"); idmIdentityService.createMembership("fozzie", "sales"); String adminPrivilegename = "access admin application"; Privilege adminPrivilege = idmIdentityService.createPrivilege(adminPrivilegename); idmIdentityService.addGroupPrivilegeMapping(adminPrivilege.getId(), "admins"); String modelerPrivilegeName = "access modeler application"; Privilege modelerPrivilege = idmIdentityService.createPrivilege(modelerPrivilegeName); idmIdentityService.addGroupPrivilegeMapping(modelerPrivilege.getId(), "admins"); idmIdentityService.addGroupPrivilegeMapping(modelerPrivilege.getId(), "engineering"); idmIdentityService.addUserPrivilegeMapping(modelerPrivilege.getId(), "kermit"); String startProcessesPrivilegename = "start processes"; Privilege startProcessesPrivilege = idmIdentityService.createPrivilege(startProcessesPrivilegename); idmIdentityService.addGroupPrivilegeMapping(startProcessesPrivilege.getId(), "sales"); userDetailsService = new FlowableUserDetailsService(idmIdentityService); } private User createUser(String id, String firstName, String lastName, String displayName, String email) { User user = idmIdentityService.newUser(id); user.setFirstName(firstName); user.setLastName(lastName); user.setDisplayName(displayName); user.setEmail(email); user.setPassword(id); idmIdentityService.saveUser(user); return user; } @AfterEach protected void tearDown() throws Exception { clearAllUsersAndGroups(); } @Test public void testLoadingByUnknownUserShouldThrowException() { assertThatThrownBy(() -> userDetailsService.loadUserByUsername("unknown")) .isInstanceOf(UsernameNotFoundException.class) .hasMessage("user (unknown) could not be found"); } @Test public void testLoadingByNullUserShouldIgnoreFlowableException() { assertThatThrownBy(() -> userDetailsService.loadUserByUsername(null)) .isInstanceOf(UsernameNotFoundException.class) .hasMessage("user (null) could not be found"); } @Test public void testLoadingKnownUserWithAllPrivileges() { UserDetails kermit = userDetailsService.loadUserByUsername("kermit"); assertThat(kermit).isNotNull(); assertThat(kermit.isCredentialsNonExpired()).as("credentialsNonExpired").isTrue(); assertThat(kermit.isAccountNonLocked()).as("accountNonLocked").isTrue(); assertThat(kermit.isAccountNonExpired()).as("accountNonExpired").isTrue(); assertThat(kermit.isEnabled()).as("enabled").isTrue(); assertThat(kermit.getUsername()).as("username").isEqualTo("kermit"); assertThat(kermit.getPassword()).as("password").isEqualTo("kermit"); assertThat(kermit.getAuthorities()) .extracting(GrantedAuthority::getAuthority) .as("granted authorities") .containsExactly( "access admin application", "access modeler application", "start processes" ); assertThat(kermit).isInstanceOf(FlowableUserDetails.class); FlowableUserDetails kermitFlowable = (FlowableUserDetails) kermit; User user = kermitFlowable.getUser(); assertThat(user.getId()).isEqualTo("kermit"); assertThat(user.getFirstName()).isEqualTo("Kermit"); assertThat(user.getLastName()).isEqualTo("the Frog"); assertThat(user.getDisplayName()).isEqualTo("Kermit the Frog"); assertThat(user.getEmail()).isEqualTo("kermit@muppetshow.com"); assertThat(user.getPassword()).isEqualTo("kermit"); user.setId("test"); user.setFirstName("test"); user.setLastName("test"); user.setDisplayName("test"); user.setEmail("test"); assertThat(user.getId()).isEqualTo("kermit"); assertThat(user.getFirstName()).isEqualTo("Kermit"); assertThat(user.getLastName()).isEqualTo("the Frog"); assertThat(user.getDisplayName()).isEqualTo("Kermit the Frog"); assertThat(user.getEmail()).isEqualTo("kermit@muppetshow.com"); assertThat(kermitFlowable.getGroups()) .extracting(Group::getId, Group::getName, Group::getType) .as("Groups") .containsExactlyInAnyOrder( tuple("admins", "Admins", "user"), tuple("sales", "Sales", "user"), tuple("engineering", "Engineering", "tech") ); kermitFlowable.getGroups().forEach(group -> { group.setId("test"); group.setType("test"); group.setName("test"); }); assertThat(kermitFlowable.getGroups()) .extracting(Group::getId, Group::getName, Group::getType) .as("Groups") .containsExactlyInAnyOrder( tuple("admins", "Admins", "user"), tuple("sales", "Sales", "user"), tuple("engineering", "Engineering", "tech") ); assertThat(kermit).isInstanceOf(CredentialsContainer.class); CredentialsContainer container = (CredentialsContainer) kermit; container.eraseCredentials(); assertThat(kermit.getPassword()).as("Password after erase").isNull(); assertThat(kermitFlowable.getUser().getPassword()).as("User password after erase").isNull(); } @Test public void testLoadingUserShouldBeCaseSensitive() { assertThatThrownBy(() -> userDetailsService.loadUserByUsername("kErMiT")) .isInstanceOf(UsernameNotFoundException.class) .hasMessage("user (kErMiT) could not be found"); } @Test public void testLoadingKnownUserWithSomePrivileges() { UserDetails fozzie = userDetailsService.loadUserByUsername("fozzie"); assertThat(fozzie).isNotNull(); assertThat(fozzie.isCredentialsNonExpired()).as("credentialsNonExpired").isTrue(); assertThat(fozzie.isAccountNonLocked()).as("accountNonLocked").isTrue(); assertThat(fozzie.isAccountNonExpired()).as("accountNonExpired").isTrue(); assertThat(fozzie.isEnabled()).as("enabled").isTrue(); assertThat(fozzie.getUsername()).as("username").isEqualTo("fozzie"); assertThat(fozzie.getPassword()).as("password").isEqualTo("fozzie"); assertThat(fozzie.getAuthorities()) .extracting(GrantedAuthority::getAuthority) .as("granted authorities") .containsExactly( "start processes" ); assertThat(fozzie).isInstanceOf(FlowableUserDetails.class); FlowableUserDetails fozzieFlowable = (FlowableUserDetails) fozzie; User user = fozzieFlowable.getUser(); assertThat(user.getId()).isEqualTo("fozzie"); assertThat(user.getFirstName()).isEqualTo("Fozzie"); assertThat(user.getLastName()).isEqualTo("Bear"); assertThat(user.getDisplayName()).isEqualTo("Fozzie Bear"); assertThat(user.getEmail()).isEqualTo("fozzie@muppetshow.com"); assertThat(user.getPassword()).isEqualTo("fozzie"); user.setId("test"); user.setFirstName("test"); user.setLastName("test"); user.setDisplayName("test"); user.setEmail("test"); assertThat(user.getId()).isEqualTo("fozzie"); assertThat(user.getFirstName()).isEqualTo("Fozzie"); assertThat(user.getLastName()).isEqualTo("Bear"); assertThat(user.getDisplayName()).isEqualTo("Fozzie Bear"); assertThat(user.getEmail()).isEqualTo("fozzie@muppetshow.com"); assertThat(fozzieFlowable.getGroups()) .extracting(Group::getId, Group::getName, Group::getType) .as("Groups") .containsExactlyInAnyOrder( tuple("sales", "Sales", "user") ); } @Test public void testSerializingUserDetailsShouldWorkCorrectly() throws IOException, ClassNotFoundException { UserDetails kermit = userDetailsService.loadUserByUsername("kermit"); byte[] serialized; ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ObjectOutputStream outputStream = new ObjectOutputStream(buffer); outputStream.writeObject(kermit); outputStream.close(); serialized = buffer.toByteArray(); ByteArrayInputStream inputStream = new ByteArrayInputStream(serialized); ObjectInputStream stream = new ObjectInputStream(inputStream); Object deserialized = stream.readObject(); stream.close(); assertThat(deserialized).isInstanceOf(FlowableUserDetails.class); kermit = (UserDetails) deserialized; assertThat(kermit.isCredentialsNonExpired()).as("credentialsNonExpired").isTrue(); assertThat(kermit.isAccountNonLocked()).as("accountNonLocked").isTrue(); assertThat(kermit.isAccountNonExpired()).as("accountNonExpired").isTrue(); assertThat(kermit.isEnabled()).as("enabled").isTrue(); assertThat(kermit.getUsername()).as("username").isEqualTo("kermit"); assertThat(kermit.getPassword()).as("password").isEqualTo("kermit"); assertThat(kermit.getAuthorities()) .extracting(GrantedAuthority::getAuthority) .as("granted authorities") .containsExactly( "access admin application", "access modeler application", "start processes" ); FlowableUserDetails kermitFlowable = (FlowableUserDetails) kermit; User user = kermitFlowable.getUser(); assertThat(user.getId()).isEqualTo("kermit"); assertThat(user.getFirstName()).isEqualTo("Kermit"); assertThat(user.getLastName()).isEqualTo("the Frog"); assertThat(user.getDisplayName()).isEqualTo("Kermit the Frog"); assertThat(user.getEmail()).isEqualTo("kermit@muppetshow.com"); assertThat(user.getPassword()).isEqualTo("kermit"); assertThat(kermitFlowable.getGroups()) .extracting(Group::getId, Group::getName, Group::getType) .as("Groups") .containsExactlyInAnyOrder( tuple("admins", "Admins", "user"), tuple("sales", "Sales", "user"), tuple("engineering", "Engineering", "tech") ); } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.ui.color; import org.apache.log4j.Logger; import org.broad.igv.util.ObjectCache; import java.awt.*; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.*; /** * Miscellaneous utilities for parsing and manipulating colors. * * @author Jim Robinson */ public class ColorUtilities { private static Logger log = Logger.getLogger(ColorUtilities.class); public static ObjectCache<Object, Color> colorCache = new ObjectCache<Object, Color>(1000); private static float[] whiteComponents = Color.white.getRGBColorComponents(null); private static Map<Integer, Color> grayscaleColors = new HashMap(); // HTML 4.1 color table, + orange and magenta static Map<String, String> colorSymbols = new HashMap(); private static Map<String, ColorPalette> palettes; public static Map<Color, float[]> componentsCache = Collections.synchronizedMap(new HashMap<Color, float[]>()); static { colorSymbols.put("white", "FFFFFF"); colorSymbols.put("silver", "C0C0C0"); colorSymbols.put("gray", "808080"); colorSymbols.put("black", "000000"); colorSymbols.put("red", "FF0000"); colorSymbols.put("maroon", "800000"); colorSymbols.put("yellow", "FFFF00"); colorSymbols.put("olive", "808000"); colorSymbols.put("lime", "00FF00"); colorSymbols.put("green", "008000"); colorSymbols.put("aqua", "00FFFF"); colorSymbols.put("teal", "008080"); colorSymbols.put("blue", "0000FF"); colorSymbols.put("navy", "000080"); colorSymbols.put("fuchsia", "FF00FF"); colorSymbols.put("purple", "800080"); colorSymbols.put("orange", "FFA500"); colorSymbols.put("magenta", "FF00FF"); } /** * @param idx * @return * @see #randomColor(int, float) */ private static int[] quasiRandomColor(int idx) { int BASE_COL = 40; int RAND_COL = 255 - BASE_COL; idx += 1; // avoid 0 int r = Math.abs(BASE_COL + (idx * 33) % RAND_COL); int g = Math.abs(BASE_COL + (idx * 55) % RAND_COL); int b = Math.abs(BASE_COL + (idx * 77) % RAND_COL); return new int[]{r, g, b}; } /** * Port of DChip function of the same name. * Calls {@link #randomColor(int, float)} with {@code alpha=1.0} * * @param idx * @return */ public static Color randomColor(int idx) { return randomColor(idx, 1.0f); } /** * Generate a color based on {@code idx}. Unpredictable but deterministic (like a hash) * Good for generating a set of colors for successive values of {@code idx}. * Alpha value is set as specified * * @param idx * @param alpha alpha value of color, from 0.0-1.0 * @return */ public static Color randomColor(int idx, float alpha) { int[] rgb = quasiRandomColor(idx); int r = rgb[0]; int g = rgb[1]; int b = rgb[2]; // Reject colors too close to white if (r > 200 && g > 200 && b > 200) { int tmp = r % 3; if (tmp == 0) { r = 255 - r; } else if (tmp == 1) { g = 255 - g; } else { b = 255 - b; } } return new Color(r, g, b, (int) (255 * alpha)); } public static void main(String[] args) { for (int i = 200; i < 300; i++) { System.out.println(i % 3); } } public static Color randomDesaturatedColor(float alpha) { float hue = (float) Math.random(); float brightenss = (float) (Math.random() * 0.7); Color base = Color.getHSBColor(hue, 0, brightenss); if (alpha >= 1) return base; else return new Color(base.getRed(), base.getGreen(), base.getBlue(), (int) (alpha * 255)); } /** * Method description * * @param inputColor * @param hue * @param saturation * @param brightness * @return */ public static Color adjustHSB(Color inputColor, float hue, float saturation, float brightness) { float[] hsbvals = new float[3]; Color.RGBtoHSB(inputColor.getRed(), inputColor.getGreen(), inputColor.getBlue(), hsbvals); return Color.getHSBColor(hue * hsbvals[0], saturation * hsbvals[1], brightness * hsbvals[2]); } public static String colorToString(Color color) { StringBuffer buffer = new StringBuffer(); buffer.append(color.getRed()); buffer.append(","); buffer.append(color.getGreen()); buffer.append(","); buffer.append(color.getBlue()); return buffer.toString(); } public static Color stringToColor(String string) { return stringToColor(string, Color.black); } public static Color stringToColor(String string, Color defaultColor) { if(string == null) return defaultColor; try { Color c = stringToColorNoDefault(string); if (c == null) { c = defaultColor; } colorCache.put(string, c); return c; } catch (NumberFormatException numberFormatException) { log.error("Error in color string. ", numberFormatException); return defaultColor; } } private static Color stringToColorNoDefault(String string) throws NumberFormatException { // Excel will quote color strings, strip all quotes string = string.replace("\"", "").replace("'", ""); Color c = null; if (string.contains(",")) { String[] rgb = string.split(","); int red = Integer.parseInt(rgb[0]); int green = Integer.parseInt(rgb[1]); int blue = Integer.parseInt(rgb[2]); c = new Color(red, green, blue); } else if (string.startsWith("#")) { c = hexToColor(string.substring(1)); } else { try { int intValue = Integer.parseInt(string); if (intValue >= 0) { c = new Color(intValue); } } catch (NumberFormatException e) { String hexString = colorSymbols.get(string.toLowerCase()); if (hexString != null) { c = hexToColor(hexString); } } } return c; } private static Color hexToColor(String string) { if (string.length() == 6) { int red = Integer.parseInt(string.substring(0, 2), 16); int green = Integer.parseInt(string.substring(2, 4), 16); int blue = Integer.parseInt(string.substring(4, 6), 16); return new Color(red, green, blue); } else { return null; } } public static float[] getRGBColorComponents(Color color) { float[] comps = componentsCache.get(color); if (comps == null) { comps = color.getRGBColorComponents(null); componentsCache.put(color, comps); } return comps; } /** * Return alphas shaded color. This method is used, rather than the Color constructor, so that * the alpha is not lost in postscript output. * * @param alpha * @return */ public static Color getCompositeColor(Color backgroundColor, Color foregroundColor, float alpha) { float[] dest = getRGBColorComponents(backgroundColor); float[] source = getRGBColorComponents(foregroundColor); int r = (int) ((alpha * source[0] + (1 - alpha) * dest[0]) * 255 + 0.5); int g = (int) ((alpha * source[1] + (1 - alpha) * dest[1]) * 255 + 0.5); int b = (int) ((alpha * source[2] + (1 - alpha) * dest[2]) * 255 + 0.5); int a = 255; int value = ((a & 0xFF) << 24) | ((r & 0xFF) << 16) | ((g & 0xFF) << 8) | ((b & 0xFF) << 0); Color c = colorCache.get(value); if (c == null) { c = new Color(value); colorCache.put(value, c); } return c; } /** * Return alphas shaded color for a white background. This method is used, rather than the Color constructor, so that * the alpha is not lost in postscript output. * * @param source * @param alpha * @return */ public static Color getCompositeColor(Color source, float alpha) { return getCompositeColor(Color.white, source, alpha); } public static Map<String, ColorPalette> loadPalettes() throws IOException { InputStream is = ColorUtilities.class.getResourceAsStream("resources/colorPalettes.txt"); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String nextLine; palettes = new LinkedHashMap<String, ColorPalette>(); palleteNames = new ArrayList(); String currentPalletName = null; java.util.List<Color> currentColorList = new ArrayList(); while ((nextLine = br.readLine()) != null) { nextLine = nextLine.trim(); if (nextLine.length() == 0) continue; if (nextLine.startsWith("#")) { if (currentPalletName != null) { ColorPalette palette = new ColorPalette(currentPalletName, currentColorList.toArray(new Color[currentColorList.size()])); palettes.put(currentPalletName, palette); palleteNames.add(currentPalletName); currentColorList.clear(); } currentPalletName = nextLine.substring(1); } else { String[] tokens = nextLine.split(";"); for (String s : tokens) { // Remove white space s = s.replaceAll(" ", ""); Color c = ColorUtilities.stringToColor(s); currentColorList.add(c); } } } if (!currentColorList.isEmpty()) { ColorPalette palette = new ColorPalette(currentPalletName, currentColorList.toArray(new Color[currentColorList.size()])); palettes.put(currentPalletName, palette); palleteNames.add(currentPalletName); } return palettes; } static int nextPaletteIdx = 0; static ArrayList<String> palleteNames = new ArrayList(); public static ColorPalette getNextPalette() { try { if (palettes == null) loadPalettes(); ColorPalette pallete = palettes.get(palleteNames.get(nextPaletteIdx)); nextPaletteIdx++; if (nextPaletteIdx >= palleteNames.size()) { nextPaletteIdx = 0; } return pallete; } catch (IOException e) { log.error(e); return null; } } public static ColorPalette getPalette(String s) { try { if (palettes == null) loadPalettes(); return palettes.get(s); } catch (IOException e) { log.error(e); return null; } } public static ColorPalette getDefaultPalette() { try { if (palettes == null) { loadPalettes(); } if (palettes.isEmpty()) { return null; } return palettes.values().iterator().next(); } catch (IOException e) { log.error("Error loading color palletes", e); return null; } } public static synchronized Color getGrayscaleColor(int gray) { gray = Math.max(0, Math.min(255, gray)); Color c = grayscaleColors.get(gray); if (c == null) { c = new Color(gray, gray, gray); grayscaleColors.put(gray, c); } return c; } /** * Return a new Color, same as the old, but with a new alpha value * * @param oldColor * @param newAlpha * @return */ public static Color modifyAlpha(Color oldColor, int newAlpha) { return new Color(oldColor.getRed(), oldColor.getGreen(), oldColor.getBlue(), newAlpha); } /** * Converts an HSL color value to RGB. Conversion formula * adapted from http://en.wikipedia.org/wiki/HSL_color_space. * * @param h The hue [0, 360] * @param s The saturation [0, 1] * @param l The lightness [0, 1] * @return The RGB representation */ public static int[] hslToRgb(double h, double s, double l) { double c = (1 - Math.abs(2 * l - 1)) * s; double hprime = h / 60; double x = c * (1 - Math.abs(hprime % 2 - 1)); double r, g, b; if (hprime < 1) { r = c; g = x; b = 0; } else if (hprime < 2) { r = x; g = c; b = 0; } else if (hprime < 3) { r = 0; g = c; b = x; } else if (hprime < 4) { r = 0; g = x; b = c; } else if (hprime < 5) { r = x; g = 0; b = c; } else { r = c; g = 0; b = x; } double m = l - 0.5 * c; return new int[]{ (int) ((r + m) * 255), (int) ((g + m) * 255), (int) ((b + m) * 255) }; // int r, g, b; // // if (s == 0) { // r = g = b = (int) (255 * l); // achromatic // } else { // double q = l < 0.5 ? l * (1 + s) : l + s - l * s; // double p = 2 * l - q; // r = (int) (255 * hue2rgb(p, q, h + 1 / 3)); // g = (int) (255 * hue2rgb(p, q, h)); // b = (int) (255 * hue2rgb(p, q, h - 1 / 3)); // } // // return new int[]{r, g, b}; } }
package com.ebay.test; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.GregorianCalendar; import java.util.List; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import javax.xml.namespace.QName; import junit.framework.Assert; import org.junit.Before; import org.junit.Test; import com.ebay.binding.BindingConstants; import com.ebay.marketplace.services.AttrTypeEnum; import com.ebay.marketplace.services.ComplexTypeSimpleContentWithAttrGp; import com.ebay.marketplace.services.EmptyType; import com.ebay.marketplace.services.GetAnonType; import com.ebay.marketplace.services.GetAnonTypeResponse; import com.ebay.marketplace.services.GetChoiceType; import com.ebay.marketplace.services.GetChoiceTypeResponse; import com.ebay.marketplace.services.GetEmptyAbsPolyType; import com.ebay.marketplace.services.GetEmptyAbsPolyTypeResponse; import com.ebay.marketplace.services.GetRecursionTypeResponse; import com.ebay.marketplace.services.GetRegistrationInfo; import com.ebay.marketplace.services.GetRegistrationInfoResponse; import com.ebay.marketplace.services.GetReservedType; import com.ebay.marketplace.services.GetReservedTypeResponse; import com.ebay.marketplace.services.GetSecurityCredentials; import com.ebay.marketplace.services.GetSecurityCredentialsResponse; import com.ebay.marketplace.services.GetUserInfo; import com.ebay.marketplace.services.GetUserInfoResponse; import com.ebay.marketplace.services.GlobalType; import com.ebay.marketplace.services.Items; import com.ebay.marketplace.services.MultipleNS; import com.ebay.marketplace.services.MyComplexType; import com.ebay.marketplace.services.Pamphlet; import com.ebay.marketplace.services.Plane; import com.ebay.marketplace.services.PolymorphismTest; import com.ebay.marketplace.services.PrimitiveTypes; import com.ebay.marketplace.services.RecursionType; import com.ebay.marketplace.services.RegistrationInfoType; import com.ebay.marketplace.services.RestrictedType; import com.ebay.marketplace.services.SampleComplexType; import com.ebay.marketplace.services.SecurityCredentials; import com.ebay.marketplace.services.Test1; import com.ebay.marketplace.services.TestEnum; import com.ebay.marketplace.services.UserInfoType; import com.ebay.marketplace.services.interopmodified.GetUserInfo_faultMsg; import com.ebay.marketplace.services.interopmodified.gen.SharedBlogsInterOpModifiedV1Consumer; import com.ebay.soaframework.common.config.DataBindingConfig; import com.ebay.soaframework.common.config.SerializerConfig; import com.ebay.soaframework.common.exceptions.ServiceException; import com.ebay.soaframework.common.exceptions.ServiceInvocationException; import com.ebay.soaframework.common.types.ByteBufferWrapper; import com.ebay.soaframework.common.types.SOAConstants; import com.ebay.soaframework.sif.service.Service; import com.ebay.soaframework.sif.service.ServiceFactory; import com.ebay.soaframework.sif.service.ServiceInvokerOptions; public class ProtoBufFormatTests { SharedBlogsInterOpModifiedV1Consumer testClient = null; private Service svc = null; @Before public void initService() throws IOException { try { testClient = new SharedBlogsInterOpModifiedV1Consumer( "BlogsInterOpModifiedV1Consumer", "production"); svc = ServiceFactory.create("BlogsInterOpModifiedV1","production","BlogsInterOpModifiedV1Consumer",new URL("http://localhost:8080/_soa_/services/blogs/BlogsInterOpModifiedV1/v1")); } catch (ServiceException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testGetProtoFile() throws IOException{ boolean exception = false; URL proto = new URL("http://localhost:8080/_soa_/services/blogs/BlogsInterOpModifiedV1/v1?proto"); URLConnection yc = proto.openConnection(); BufferedReader in = null; try{ in = new BufferedReader( new InputStreamReader( yc.getInputStream())); } catch(Exception e){ exception = true; } finally{ in.close(); } Assert.assertFalse("No exception was expected" ,exception); } //http://localhost:8080/services/advertise/UniqueIDService/v2?proto @Test public void testGetProtoFileNonProtobufService() throws IOException{ boolean error =false; URL proto = new URL("http://localhost:8080/services/advertise/UniqueIDService/v2?proto"); URLConnection yc = proto.openConnection(); BufferedReader in = null; try{ in = new BufferedReader( new InputStreamReader( yc.getInputStream())); } catch(Exception e){ error = true; } finally{ if( in!= null) in.close(); } Assert.assertTrue("No error message returned",error); } @Test public void testGetAnonTypeOperationWithProtoBuff() throws ServiceException, DatatypeConfigurationException { GetAnonType anonType = new GetAnonType(); Items value = new Items(); Test1 t = new Test1(); t.setProductName("phone"); t.setQuantity(12); GregorianCalendar greCal = new GregorianCalendar(); greCal.setTimeInMillis(10000); t.setUSPrice(10000.334349d); t.setShipDate(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); value.getItem().add(t); anonType.setIn1(value); GlobalType gtype = new GlobalType(); gtype.setGlobalName("gtype"); anonType.setIn2(gtype); anonType.setIn3("value"); GetAnonTypeResponse response = testClient.getAnonType(anonType); // Assert here Assert.assertEquals(response.getOut1().getItem().get(0).getProductName(),"SOA"); Assert.assertEquals(response.getOut1().getItem().get(0).getQuantity(),100); Assert.assertEquals(response.getOut2().getGlobalName(),"value"); Assert.assertEquals(response.getOut3(),"value"); } @Test public void testUserInfoOperation() throws GetUserInfo_faultMsg{ GetUserInfo userInfo = new GetUserInfo(); UserInfoType type = new UserInfoType(); type.setUserName("po"); type.getCountry().add("US"); RegistrationInfoType regInfo = new RegistrationInfoType(); regInfo.setEmail("email"); regInfo.setFeedback(34.5f); regInfo.setSellerType("star"); regInfo.setSite(10); regInfo.setUserID("user id"); type.getRegInfo().add(regInfo); userInfo.setIn1(type); userInfo.setIn2("value"); //get and retrieve response GetUserInfoResponse res = testClient.getUserInfo(userInfo); UserInfoType info = res.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); Assert.assertEquals(res.getOut2(),"value"); } @Test public void getEmptyAbsPolyTypeTestForProtobuf() throws DatatypeConfigurationException{ GregorianCalendar greCal = new GregorianCalendar(); greCal.setTimeInMillis(10000); GetEmptyAbsPolyType empty = new GetEmptyAbsPolyType(); EmptyType type = new EmptyType(); byte [] bytes ={10,0,11,0}; type.setB64Bi(bytes); type.setBool(true); type.setDa(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDay(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDec(new BigDecimal("45")); type.setDtime(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDur(DatatypeFactory.newInstance().newDuration(1000)); type.setGmday(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGmth(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGyear(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGymth(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setHexbi(bytes); type.setTi(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setValue("value"); empty.setIn1(type); Pamphlet pam = new Pamphlet(); pam.setISBN("ISBN"); pam.setTitle("title"); empty.setIn2(pam); empty.setIn3("plane"); Plane pl = new Plane(); pl.setInfo("info"); empty.setIn4(pl); //retrieve response GetEmptyAbsPolyTypeResponse res = testClient.getEmptyAbsPolyType(empty); EmptyType emp =res.getOut1(); Assert.assertEquals(emp.getB64Bi()[0],bytes[0]); Assert.assertEquals(emp.getValue(),"value"); Assert.assertEquals(emp.getDa(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getDay(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getDec(),new BigDecimal("45")); Assert.assertEquals(emp.getDtime(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getDur(),DatatypeFactory.newInstance().newDuration(1000)); Assert.assertEquals(emp.getGmday(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getGmth(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getGyear(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getGymth(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(emp.getHexbi()[0],bytes[0]); Assert.assertEquals(emp.getTi(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut3(),"value"); Assert.assertEquals(res.getOut2().getISBN(),"ISBN"); Assert.assertEquals(res.getOut2().getTitle(),"title"); Assert.assertEquals(res.getOut4().getInfo(),"info"); } @Test public void testGetResrvedTypeOperationForProtobuf(){ //set request GetReservedType resType = new GetReservedType(); resType.setIn1("value"); resType.setIn2("value"); resType.setIn3("value"); resType.setIn4("value"); GetReservedTypeResponse res = testClient.getReservedType(resType); //retrieve response Assert.assertEquals(res.getOut1(),"value"); Assert.assertEquals(res.getOut2(),"value"); Assert.assertEquals(res.getOut3(),"value"); Assert.assertEquals(res.getOut4(),"value"); } @Test public void testRecursionTypeForProtobuf() throws DatatypeConfigurationException{ //set request RecursionType recType = new RecursionType(); RecursionType recType2 = new RecursionType(); recType2.setIn("value"); SampleComplexType samType1 = new SampleComplexType(); samType1.getInt().add(10); samType1.setValue1("value"); samType1.setValue2("value"); samType1.getValue3().add("value"); samType1.setValue4("value"); samType1.getValue5().add("value"); samType1.getValue6().add("value"); samType1.getValue7().add("value"); samType1.setValue8("value"); samType1.setValue9("value"); samType1.setValue10("value"); recType2.setInt2(samType1); recType.setGetRecursionType(recType2); recType.setIn("value"); SampleComplexType samType = new SampleComplexType(); samType.getInt().add(10); samType.setValue1("value"); samType.setValue2("value"); samType.getValue3().add("value"); samType.setValue4("value"); samType.getValue5().add("value"); samType.getValue6().add("value"); samType.getValue7().add("value"); samType.setValue8("value"); samType.setValue9("value"); samType.setValue10("value"); recType.setInt2(samType); GregorianCalendar greCal = new GregorianCalendar(); greCal.setTimeInMillis(10000); GetRecursionTypeResponse res = testClient.getRecursionType(recType); Assert.assertEquals(res.getOut1().getValue(),23.4f); Assert.assertEquals(res.getOut1().isArgB(),true); Assert.assertEquals(res.getOut1().getArgA(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut2().isArgB(),true); Assert.assertEquals(res.getOut2().getValue(),RestrictedType.ONE); Assert.assertEquals(res.getOut(),"value"); } @Test public void testGetSecurityCredentialsOperationForProtobuf(){ //set request GetSecurityCredentials sec = new GetSecurityCredentials(); SecurityCredentials cre = new SecurityCredentials(); cre.setAppName("value"); cre.setPwd("value"); cre.setUserName("value"); sec.setIn1(cre); sec.setIn2("value"); //get response GetSecurityCredentialsResponse res = testClient.getSecurityCredentials(sec); Assert.assertEquals(res.getOut1().getAppName(),"value"); Assert.assertEquals(res.getOut1().getPwd(),"value"); Assert.assertEquals(res.getOut1().getUserName(),"value"); Assert.assertEquals(res.getOut2(),"value"); } @Test public void testGetChoiceTypeForProtobuf() throws DatatypeConfigurationException{ GregorianCalendar greCal = new GregorianCalendar(); greCal.setTimeInMillis(10000); GetChoiceType choice = new GetChoiceType(); PrimitiveTypes type = new PrimitiveTypes(); byte [] bytes = new byte[10]; type.setB64Bi(bytes); type.setBool(true); type.setDa(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDay(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDec(new BigDecimal("45")); type.setDtime(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setDur(DatatypeFactory.newInstance().newDuration(1000)); type.setGmday(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGmth(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGyear(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setGymth(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setHexbi(bytes); type.setTi(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); type.setId("value"); type.setLan("vallue"); type.setName("value"); type.setNcname("value"); type.setEnt("value"); type.setNegInt(new BigInteger("23")); type.setNonNegInt(new BigInteger("23")); type.setNonPosInt(new BigInteger("23")); type.setNorstr("value"); type.setQname(new QName("qname")); type.setToken("value"); type.setUnbyte(new Short("23")); type.setUnint(222222l); type.getTokens().add("value"); type.setUnlong(new BigInteger("23")); type.setUnshort( new Integer("23")); choice.setIn1(type); choice.setIn2("value"); MultipleNS mul = new MultipleNS(); mul.setCount(10); mul.setName("value"); choice.setIn3(mul); PolymorphismTest test = new PolymorphismTest(); MyComplexType ctype = new MyComplexType(); ctype.setElemA(23.4f); ctype.setElemB("value"); test.setElemD(ctype); choice.setInt4(test); GetChoiceTypeResponse res = testClient.getChoiceType(choice); Assert.assertEquals(res.getOut1().getB64Bi().length,bytes.length); Assert.assertEquals(res.getOut1().getDa(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getDay(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getDec(),new BigDecimal("45")); Assert.assertEquals(res.getOut1().getDtime(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getDur(),DatatypeFactory.newInstance().newDuration(1000)); Assert.assertEquals(res.getOut1().getGmday(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getGmth(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getGyear(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getGymth(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getHexbi().length,bytes.length); Assert.assertEquals(res.getOut1().getTi(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut1().getEnt(),"value"); Assert.assertEquals(res.getOut1().getId(),"value"); Assert.assertEquals(res.getOut1().getLan(),"value"); Assert.assertEquals(res.getOut1().getName(),"value"); Assert.assertEquals(res.getOut1().getNcname(),"value"); Assert.assertEquals(res.getOut1().getNorstr(),"value"); Assert.assertEquals(res.getOut1().getToken(),"value"); Assert.assertEquals(res.getOut1().getNegInt(),new BigInteger("23")); Assert.assertEquals(res.getOut1().getNonNegInt(),new BigInteger("23")); Assert.assertEquals(res.getOut1().getNonPosInt(),new BigInteger("23")); Assert.assertEquals(res.getOut1().getPosInt(),new BigInteger("23")); Assert.assertEquals(res.getOut1().getQname(), new QName("qname")); Assert.assertEquals(res.getOut1().getTokens().get(0),"value"); Assert.assertEquals(res.getOut1().getUnbyte(), new Short("23")); Assert.assertEquals(res.getOut1().getUnint(),new Long(222222l)); Assert.assertEquals(res.getOut1().getUnlong(), new BigInteger("23")); Assert.assertEquals(res.getOut1().getUnshort(), new Integer("23")); Assert.assertEquals(res.getOut2(),"value"); Assert.assertEquals(res.getOut3().getCount(),10); Assert.assertEquals(res.getOut3().getName(),"value"); Assert.assertEquals(res.getOut4().getElemB(),"value"); Assert.assertEquals(res.getOut4().getElemA(),23.4f); Assert.assertEquals(res.getOut5().getElemB(),"value"); Assert.assertEquals(res.getOut5().getElemC(),"value"); Assert.assertEquals(res.getOut5().getElemA(),23.4f); Assert.assertEquals(res.getOut6().getElemB(),"value"); Assert.assertEquals(res.getOut6().getElemC(),"value"); Assert.assertEquals(res.getOut6().getElemA(),23.4f); } @Test public void testTransportHTT10() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_PROTOBUF,BindingConstants.PAYLOAD_PROTOBUF,"getUserInfo",SOAConstants.TRANSPORT_HTTP_10,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsXMLPROTOBUF() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_XML,BindingConstants.PAYLOAD_PROTOBUF,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testGetRegistrationInfoForProtobuf() throws DatatypeConfigurationException{ GregorianCalendar greCal = new GregorianCalendar(); greCal.setTimeInMillis(10000); GetRegistrationInfo info = new GetRegistrationInfo(); RegistrationInfoType infoType = new RegistrationInfoType(); infoType.setEmail("value"); infoType.setFeedback(23.4f); infoType.setSellerType("value"); infoType.setSite(10); infoType.setUserID("value"); info.setIn1(infoType); info.setIn2("value"); ComplexTypeSimpleContentWithAttrGp grp = new ComplexTypeSimpleContentWithAttrGp(); grp.setAttrA(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); grp.setAttrB(new Integer(23)); grp.setValue("value"); info.setInt3(grp); AttrTypeEnum att = new AttrTypeEnum(); att.setArgE("value"); att.setArgF("value"); att.setAttrA(DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); att.setAttrB(TestEnum.ONE); info.setInt4(att); GetRegistrationInfoResponse res = testClient.getRegistrationInfo(info); //retrieve response Assert.assertEquals(res.getOut1().getEmail(),"value"); Assert.assertEquals(res.getOut1().getSellerType(),"value"); Assert.assertEquals(res.getOut1().getSite(),10); Assert.assertEquals(res.getOut1().getUserID(),"value"); Assert.assertEquals(res.getOut1().getFeedback(),23.4f); Assert.assertEquals(res.getOut2(),"value"); Assert.assertEquals(res.getOut3().getArgE(),"value"); Assert.assertEquals(res.getOut3().getArgF(),"value"); Assert.assertEquals(res.getOut3().getAttrA(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut3().getAttrB(),com.ebay.marketplace.services.TestEnum.ONE); Assert.assertEquals(res.getOut4().getParam(),"value"); Assert.assertEquals(res.getOut4().getParam1(),"value"); Assert.assertEquals(res.getOut4().getValue(),23.4f); Assert.assertEquals(res.getOut4().getArgA(),DatatypeFactory.newInstance().newXMLGregorianCalendar(greCal)); Assert.assertEquals(res.getOut5().getValue(),"value"); } @Test public void testDiffFormatsPROTOBUFXML() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_PROTOBUF,BindingConstants.PAYLOAD_XML,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsPROTOBUFJSON() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_PROTOBUF,BindingConstants.PAYLOAD_JSON,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsJSONPROTOBUF() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_JSON,BindingConstants.PAYLOAD_PROTOBUF,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsPROTOBUFNV() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_PROTOBUF,BindingConstants.PAYLOAD_NV,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsNVPROTOBUF() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_NV,BindingConstants.PAYLOAD_PROTOBUF,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsFASTINFOSETPROTOBUF() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_FAST_INFOSET,BindingConstants.PAYLOAD_PROTOBUF,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } @Test public void testDiffFormatsPROTOBUFFASTINFOSET() throws ServiceInvocationException, DatatypeConfigurationException{ GetUserInfoResponse response1 = (GetUserInfoResponse) invoke("normal",BindingConstants.PAYLOAD_PROTOBUF,BindingConstants.PAYLOAD_FAST_INFOSET,"getUserInfo",SOAConstants.TRANSPORT_HTTP_11,null); Assert.assertEquals(response1.getOut2(),"value"); UserInfoType info = response1.getOut1(); Assert.assertEquals(info.getCountry().get(0),"US"); Assert.assertEquals(info.getRegInfo().get(0).getEmail(),"email"); Assert.assertEquals(info.getRegInfo().get(0).getFeedback(),34.5f); Assert.assertEquals(info.getRegInfo().get(0).getSellerType(),"star"); Assert.assertEquals(info.getRegInfo().get(0).getSite(),10); Assert.assertEquals(info.getRegInfo().get(0).getUserID(),"user id"); } public Object invoke(String mode,String reqBinding,String resBinding,String opsName,String transport,String msgProtocol) throws DatatypeConfigurationException, ServiceInvocationException{ ByteBufferWrapper outParam = new ByteBufferWrapper(); Object[] inParam = new Object[1]; GetUserInfo userInfo = new GetUserInfo(); UserInfoType type = new UserInfoType(); type.setUserName("po"); type.getCountry().add("US"); RegistrationInfoType regInfo = new RegistrationInfoType(); regInfo.setEmail("email"); regInfo.setFeedback(34.5f); regInfo.setSellerType("star"); regInfo.setSite(10); regInfo.setUserID("user id"); type.getRegInfo().add(regInfo); userInfo.setIn1(type); userInfo.setIn2("value"); inParam[0] = userInfo; ServiceInvokerOptions options = svc.getInvokerOptions(); options.setTransportName(transport); if(msgProtocol != null) options.setMessageProtocolName(msgProtocol); options.setRequestBinding(reqBinding); options.setResponseBinding(resBinding); DataBindingConfig config = new DataBindingConfig(); SerializerConfig serConfig = new SerializerConfig(); serConfig.setDeserializerFactoryClassName("com.ebay.soaframework.common.impl.binding.protobuf.ProtobufDeserializerFactory"); serConfig.setSerializerFactoryClassName("com.ebay.soaframework.common.impl.binding.protobuf.ProtobufSerializerFactory"); serConfig.setMimeType("application/plain"); config.getDataBinding().add(serConfig); if (mode.contentEquals("raw")) { svc.invoke(opsName, inParam, outParam); String response = new String(outParam.getByteBuffer().array()); return response; } else { List<Object> outParam1 = new ArrayList<Object>(); svc.invoke(opsName, inParam, outParam1); GetUserInfoResponse response1 = (GetUserInfoResponse)outParam1.get(0); return response1; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.mobilenetwork.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.management.polling.PollResult; import com.azure.core.util.Context; import com.azure.core.util.polling.SyncPoller; import com.azure.resourcemanager.mobilenetwork.fluent.models.AttachedDataNetworkInner; import com.azure.resourcemanager.mobilenetwork.models.TagsObject; /** An instance of this class provides access to all the operations defined in AttachedDataNetworksClient. */ public interface AttachedDataNetworksClient { /** * Deletes the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName); /** * Deletes the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of long-running operation. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<Void>, Void> beginDelete( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, Context context); /** * Deletes the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName); /** * Deletes the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, Context context); /** * Gets information about the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return information about the specified attached data network. */ @ServiceMethod(returns = ReturnType.SINGLE) AttachedDataNetworkInner get( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName); /** * Gets information about the specified attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return information about the specified attached data network along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<AttachedDataNetworkInner> getWithResponse( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, Context context); /** * Creates or updates an attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to the create or update attached data network operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of attached data network resource. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<AttachedDataNetworkInner>, AttachedDataNetworkInner> beginCreateOrUpdate( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, AttachedDataNetworkInner parameters); /** * Creates or updates an attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to the create or update attached data network operation. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link SyncPoller} for polling of attached data network resource. */ @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) SyncPoller<PollResult<AttachedDataNetworkInner>, AttachedDataNetworkInner> beginCreateOrUpdate( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, AttachedDataNetworkInner parameters, Context context); /** * Creates or updates an attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to the create or update attached data network operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return attached data network resource. */ @ServiceMethod(returns = ReturnType.SINGLE) AttachedDataNetworkInner createOrUpdate( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, AttachedDataNetworkInner parameters); /** * Creates or updates an attached data network. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to the create or update attached data network operation. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return attached data network resource. */ @ServiceMethod(returns = ReturnType.SINGLE) AttachedDataNetworkInner createOrUpdate( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, AttachedDataNetworkInner parameters, Context context); /** * Updates an attached data network update tags. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to update attached data network tags. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return attached data network resource. */ @ServiceMethod(returns = ReturnType.SINGLE) AttachedDataNetworkInner updateTags( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, TagsObject parameters); /** * Updates an attached data network update tags. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param attachedDataNetworkName The name of the attached data network. * @param parameters Parameters supplied to update attached data network tags. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return attached data network resource along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<AttachedDataNetworkInner> updateTagsWithResponse( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, String attachedDataNetworkName, TagsObject parameters, Context context); /** * Gets all the data networks associated with a packet core data plane. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the data networks associated with a packet core data plane as paginated response with {@link * PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<AttachedDataNetworkInner> listByPacketCoreDataPlane( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName); /** * Gets all the data networks associated with a packet core data plane. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param packetCoreControlPlaneName The name of the packet core control plane. * @param packetCoreDataPlaneName The name of the packet core data plane. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the data networks associated with a packet core data plane as paginated response with {@link * PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<AttachedDataNetworkInner> listByPacketCoreDataPlane( String resourceGroupName, String packetCoreControlPlaneName, String packetCoreDataPlaneName, Context context); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.layoutmgr.inline; import java.util.LinkedList; import java.util.List; import org.apache.fop.area.Trait; import org.apache.fop.area.inline.TextArea; import org.apache.fop.fo.flow.Character; import org.apache.fop.fo.properties.CommonBorderPaddingBackground; import org.apache.fop.fonts.Font; import org.apache.fop.fonts.FontSelector; import org.apache.fop.layoutmgr.InlineKnuthSequence; import org.apache.fop.layoutmgr.KnuthElement; import org.apache.fop.layoutmgr.KnuthGlue; import org.apache.fop.layoutmgr.KnuthPenalty; import org.apache.fop.layoutmgr.KnuthSequence; import org.apache.fop.layoutmgr.LayoutContext; import org.apache.fop.layoutmgr.LeafPosition; import org.apache.fop.layoutmgr.Position; import org.apache.fop.layoutmgr.TraitSetter; import org.apache.fop.traits.MinOptMax; import org.apache.fop.traits.SpaceVal; import org.apache.fop.util.CharUtilities; /** * LayoutManager for the fo:character formatting object */ public class CharacterLayoutManager extends LeafNodeLayoutManager { private MinOptMax letterSpaceIPD; private int hyphIPD; private Font font; private CommonBorderPaddingBackground borderProps = null; /** * Constructor * * @param node the fo:character formatting object */ public CharacterLayoutManager(Character node) { super(node); } /** {@inheritDoc} */ @Override public void initialize() { Character fobj = (Character)this.fobj; font = FontSelector.selectFontForCharacter(fobj, this); SpaceVal ls = SpaceVal.makeLetterSpacing(fobj.getLetterSpacing()); letterSpaceIPD = ls.getSpace(); hyphIPD = fobj.getCommonHyphenation().getHyphIPD(font); borderProps = fobj.getCommonBorderPaddingBackground(); setCommonBorderPaddingBackground(borderProps); TextArea chArea = getCharacterInlineArea(fobj); chArea.setBaselineOffset(font.getAscender()); setCurrentArea(chArea); } private TextArea getCharacterInlineArea(Character node) { TextArea text = new TextArea(); char ch = node.getCharacter(); int ipd = font.getCharWidth(ch); int blockProgressionOffset = 0; int level = node.getBidiLevel(); if (CharUtilities.isAnySpace(ch)) { // add space unless it's zero-width: if (!CharUtilities.isZeroWidthSpace(ch)) { text.addSpace(ch, ipd, CharUtilities.isAdjustableSpace(ch), blockProgressionOffset, level); } } else { int[] levels = ( level >= 0 ) ? new int[] {level} : null; text.addWord(String.valueOf(ch), ipd, null, levels, null, blockProgressionOffset); } TraitSetter.setProducerID(text, node.getId()); TraitSetter.addTextDecoration(text, node.getTextDecoration()); TraitSetter.addStructureTreeElement(text, node.getStructureTreeElement()); return text; } /** {@inheritDoc} */ @Override public List getNextKnuthElements(LayoutContext context, int alignment) { MinOptMax ipd; curArea = get(context); KnuthSequence seq = new InlineKnuthSequence(); if (curArea == null) { setFinished(true); return null; } Character fobj = (Character)this.fobj; ipd = MinOptMax.getInstance(curArea.getIPD()); curArea.setBPD(font.getAscender() - font.getDescender()); TraitSetter.addFontTraits(curArea, font); curArea.addTrait(Trait.COLOR, fobj.getColor()); // TODO: may need some special handling for fo:character alignmentContext = new AlignmentContext(font , font.getFontSize() , fobj.getAlignmentAdjust() , fobj.getAlignmentBaseline() , fobj.getBaselineShift() , fobj.getDominantBaseline() , context.getAlignmentContext()); addKnuthElementsForBorderPaddingStart(seq); // create the AreaInfo object to store the computed values areaInfo = new AreaInfo((short) 0, ipd, false, alignmentContext); // node is a fo:Character if (letterSpaceIPD.isStiff()) { // constant letter space, only return a box seq.add(new KnuthInlineBox(areaInfo.ipdArea.getOpt(), areaInfo.alignmentContext, notifyPos(new LeafPosition(this, 0)), false)); } else { // adjustable letter space, return a sequence of elements; // at the moment the character is supposed to have no letter spaces, // but returning this sequence allows us to change only one element // if addALetterSpaceTo() is called seq.add(new KnuthInlineBox(areaInfo.ipdArea.getOpt(), areaInfo.alignmentContext, notifyPos(new LeafPosition(this, 0)), false)); seq.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, new LeafPosition(this, -1), true)); seq.add(new KnuthGlue(0, 0, 0, new LeafPosition(this, -1), true)); seq.add(new KnuthInlineBox(0, null, notifyPos(new LeafPosition(this, -1)), true)); } addKnuthElementsForBorderPaddingEnd(seq); LinkedList<KnuthSequence> returnList = new LinkedList<KnuthSequence>(); returnList.add(seq); setFinished(true); return returnList; } /** {@inheritDoc} */ @Override public String getWordChars(Position pos) { return ((TextArea) curArea).getText(); } /** {@inheritDoc} */ @Override public void hyphenate(Position pos, HyphContext hc) { if (hc.getNextHyphPoint() == 1) { // the character ends a syllable areaInfo.isHyphenated = true; somethingChanged = true; } else { // hc.getNextHyphPoint() returned -1 (no more hyphenation points) // or a number > 1; // the character does not end a syllable } hc.updateOffset(1); } /** {@inheritDoc} */ @Override public boolean applyChanges(List oldList) { setFinished(false); return somethingChanged; } /** {@inheritDoc} */ @Override public List getChangedKnuthElements(List oldList, int alignment) { if (isFinished()) { return null; } LinkedList<KnuthElement> returnList = new LinkedList<KnuthElement>(); addKnuthElementsForBorderPaddingStart(returnList); if (letterSpaceIPD.isStiff() || areaInfo.letterSpaces == 0) { // constant letter space, or no letter space returnList.add(new KnuthInlineBox(areaInfo.ipdArea.getOpt(), areaInfo.alignmentContext, notifyPos(new LeafPosition(this, 0)), false)); if (areaInfo.isHyphenated) { returnList.add(new KnuthPenalty(hyphIPD, KnuthPenalty.FLAGGED_PENALTY, true, new LeafPosition(this, -1), false)); } } else { // adjustable letter space returnList.add(new KnuthInlineBox(areaInfo.ipdArea.getOpt() - areaInfo.letterSpaces * letterSpaceIPD.getOpt(), areaInfo.alignmentContext, notifyPos(new LeafPosition(this, 0)), false)); returnList.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, new LeafPosition(this, -1), true)); returnList.add(new KnuthGlue(letterSpaceIPD.mult(areaInfo.letterSpaces), new LeafPosition(this, -1), true)); returnList.add ( new KnuthInlineBox(0, null, notifyPos(new LeafPosition(this, -1)), true)); if (areaInfo.isHyphenated) { returnList.add(new KnuthPenalty(hyphIPD, KnuthPenalty.FLAGGED_PENALTY, true, new LeafPosition(this, -1), false)); } } addKnuthElementsForBorderPaddingEnd(returnList); setFinished(true); return returnList; } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.device.mgt.core.authorization; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.device.mgt.common.Device; import org.wso2.carbon.device.mgt.common.DeviceIdentifier; import org.wso2.carbon.device.mgt.common.DeviceManagementException; import org.wso2.carbon.device.mgt.common.EnrolmentInfo; import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationException; import org.wso2.carbon.device.mgt.common.authorization.DeviceAccessAuthorizationService; import org.wso2.carbon.device.mgt.common.authorization.DeviceAuthorizationResult; import org.wso2.carbon.device.mgt.common.group.mgt.DeviceGroup; import org.wso2.carbon.device.mgt.common.group.mgt.GroupManagementException; import org.wso2.carbon.device.mgt.common.permission.mgt.Permission; import org.wso2.carbon.device.mgt.common.permission.mgt.PermissionManagementException; import org.wso2.carbon.device.mgt.core.internal.DeviceManagementDataHolder; import org.wso2.carbon.device.mgt.core.permission.mgt.PermissionUtils; import org.wso2.carbon.user.api.UserRealm; import org.wso2.carbon.user.api.UserStoreException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Implementation of DeviceAccessAuthorization service. */ public class DeviceAccessAuthorizationServiceImpl implements DeviceAccessAuthorizationService { private final static String CDM_ADMIN_PERMISSION = "/device-mgt/devices/any-device/permitted-actions-under-owning-device"; private final static String CDM_ADMIN = "Device Management Administrator"; private static Log log = LogFactory.getLog(DeviceAccessAuthorizationServiceImpl.class); public DeviceAccessAuthorizationServiceImpl() { try { this.addAdminPermissionToRegistry(); } catch (PermissionManagementException e) { log.error("Unable to add the emm-admin permission to the registry.", e); } } @Override public boolean isUserAuthorized(DeviceIdentifier deviceIdentifier, String username, String[] groupPermissions) throws DeviceAccessAuthorizationException { int tenantId = this.getTenantId(); if (username == null || username.isEmpty()) { return !DeviceManagementDataHolder.getInstance().requireDeviceAuthorization(deviceIdentifier.getType()); } //check for admin and ownership permissions if (isAdminOrDeviceOwner(username, tenantId, deviceIdentifier)) { return true; } //check for group permissions try { if (groupPermissions == null || groupPermissions.length == 0) { return false; } for (String groupPermission : groupPermissions) { if (!isAuthorizedViaGroup(username, deviceIdentifier, groupPermission)) { //if at least one fails, authorization fails return false; } } return true; } catch (GroupManagementException | UserStoreException e) { throw new DeviceAccessAuthorizationException("Unable to authorize the access to device : " + deviceIdentifier.getId() + " for the user : " + username, e); } } @Override public boolean isUserAuthorized(DeviceIdentifier deviceIdentifier, String username) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifier, username, null); } @Override public boolean isUserAuthorized(DeviceIdentifier deviceIdentifier, String[] groupPermissions) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifier, this.getUserName(), groupPermissions); } @Override public boolean isUserAuthorized(DeviceIdentifier deviceIdentifier) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifier, this.getUserName(), null); } @Override public boolean isDeviceAdminUser() throws DeviceAccessAuthorizationException { String username = this.getUserName(); int tenantId = this.getTenantId(); try { return isAdminUser(username, tenantId); } catch (UserStoreException e) { throw new DeviceAccessAuthorizationException("Unable to check the admin permissions of user : " + username + " in tenant : " + tenantId, e); } } @Override public DeviceAuthorizationResult isUserAuthorized(List<DeviceIdentifier> deviceIdentifiers, String username, String[] groupPermissions) throws DeviceAccessAuthorizationException { int tenantId = this.getTenantId(); if (username == null || username.isEmpty()) { return null; } DeviceAuthorizationResult deviceAuthorizationResult = new DeviceAuthorizationResult(); for (DeviceIdentifier deviceIdentifier : deviceIdentifiers) { //check for admin and ownership permissions if (isAdminOrDeviceOwner(username, tenantId, deviceIdentifier)) { deviceAuthorizationResult.addAuthorizedDevice(deviceIdentifier); } else { try { if (groupPermissions == null || groupPermissions.length == 0) { return null; } //check for group permissions boolean isAuthorized = true; for (String groupPermission : groupPermissions) { if (!isAuthorizedViaGroup(username, deviceIdentifier, groupPermission)) { //if at least one failed, authorizations fails and break the loop isAuthorized = false; break; } } if (isAuthorized) { deviceAuthorizationResult.addAuthorizedDevice(deviceIdentifier); } else { deviceAuthorizationResult.addUnauthorizedDevice(deviceIdentifier); } } catch (GroupManagementException | UserStoreException e) { throw new DeviceAccessAuthorizationException("Unable to authorize the access to device : " + deviceIdentifier.getId() + " for the user : " + username, e); } } } return deviceAuthorizationResult; } @Override public DeviceAuthorizationResult isUserAuthorized(List<DeviceIdentifier> deviceIdentifiers, String username) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifiers, username, null); } @Override public DeviceAuthorizationResult isUserAuthorized(List<DeviceIdentifier> deviceIdentifiers) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifiers, this.getUserName(), null); } @Override public DeviceAuthorizationResult isUserAuthorized(List<DeviceIdentifier> deviceIdentifiers, String[] groupPermissions) throws DeviceAccessAuthorizationException { return isUserAuthorized(deviceIdentifiers, this.getUserName(), groupPermissions); } private boolean isAdminOrDeviceOwner(String username, int tenantId, DeviceIdentifier deviceIdentifier) throws DeviceAccessAuthorizationException { try { //First Check for admin users. If the user is an admin user we authorize the access to that device. //Secondly Check for device ownership. If the user is the owner of the device we allow the access. return (isAdminUser(username, tenantId) || isDeviceOwner(deviceIdentifier, username)); } catch (UserStoreException e) { throw new DeviceAccessAuthorizationException("Unable to authorize the access to device : " + deviceIdentifier.getId() + " for the user : " + username, e); } } private boolean isAuthorizedViaGroup(String username, DeviceIdentifier deviceIdentifier, String groupPermission) throws GroupManagementException, UserStoreException { List<DeviceGroup> authorizedGroups = DeviceManagementDataHolder.getInstance().getGroupManagementProviderService() .getGroups(username, groupPermission); List<DeviceGroup> groupsWithDevice = DeviceManagementDataHolder.getInstance().getGroupManagementProviderService() .getGroups(deviceIdentifier); for (DeviceGroup group : authorizedGroups) { Iterator<DeviceGroup> groupsWithDeviceIterator = groupsWithDevice.iterator(); while (groupsWithDeviceIterator.hasNext()) { DeviceGroup deviceGroup = groupsWithDeviceIterator.next(); if (deviceGroup.getId() == group.getId()) { return true; } } } return false; } private boolean isDeviceOwner(DeviceIdentifier deviceIdentifier, String username) throws DeviceAccessAuthorizationException { //Check for device ownership. If the user is the owner of the device we allow the access. try { return DeviceManagementDataHolder.getInstance().getDeviceManagementProvider(). isEnrolled(deviceIdentifier, username); } catch (DeviceManagementException e) { throw new DeviceAccessAuthorizationException("Unable to authorize the access to device : " + deviceIdentifier.getId() + " for the user : " + username, e); } } private boolean isAdminUser(String username, int tenantId) throws UserStoreException { UserRealm userRealm = DeviceManagementDataHolder.getInstance().getRealmService().getTenantUserRealm(tenantId); if (userRealm != null && userRealm.getAuthorizationManager() != null) { return userRealm.getAuthorizationManager() .isUserAuthorized(removeTenantDomain(username), PermissionUtils.getAbsolutePermissionPath(CDM_ADMIN_PERMISSION), PermissionMethod.UI_EXECUTE); } return false; } private String getUserName() { String username = CarbonContext.getThreadLocalCarbonContext().getUsername(); if (username != null && !username.isEmpty()) { return removeTenantDomain(username); } return null; } private String removeTenantDomain(String username) { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); if (username.endsWith(tenantDomain)) { return username.substring(0, username.lastIndexOf("@")); } return username; } private int getTenantId() { return CarbonContext.getThreadLocalCarbonContext().getTenantId(); } private boolean addAdminPermissionToRegistry() throws PermissionManagementException { Permission permission = new Permission(); permission.setName(CDM_ADMIN); permission.setPath(PermissionUtils.getAbsolutePermissionPath(CDM_ADMIN_PERMISSION)); return PermissionUtils.putPermission(permission); } private Map<String, String> getOwnershipOfDevices(List<Device> devices) { Map<String, String> ownershipData = new HashMap<>(); EnrolmentInfo enrolmentInfo; String owner; for (Device device : devices) { enrolmentInfo = device.getEnrolmentInfo(); if (enrolmentInfo != null) { owner = enrolmentInfo.getOwner(); if (owner != null && !owner.isEmpty()) { ownershipData.put(device.getDeviceIdentifier(), owner); } } } return ownershipData; } public static final class PermissionMethod { public static final String READ = "read"; public static final String WRITE = "write"; public static final String DELETE = "delete"; public static final String ACTION = "action"; public static final String UI_EXECUTE = "ui.execute"; private PermissionMethod() { throw new AssertionError(); } } }
package oj; import oj.dump.LeafDump; import oj.parse.FastParse; import oj.parse.FileParserSource; import oj.parse.Parse; import org.jcodings.specific.UTF8Encoding; import org.jruby.Ruby; import org.jruby.RubyClass; import org.jruby.RubyModule; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.anno.JRubyMethod; import org.jruby.runtime.Block; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; import org.jruby.util.TypeConverter; import java.util.List; import static oj.LeafType.T_ARRAY; import static oj.LeafType.T_HASH; import static oj.LeafValue.COL_VAL; /** * Created by enebo on 8/3/18. */ public class Doc extends RubyObject { public final int MAX_STACK = 100; public long size = 0; public Leaf[] wheres = new Leaf[MAX_STACK]; public int where = 0; public int where_path = 0; public Leaf data; public IRubyObject self; IRubyObject slash; private static ObjectAllocator ALLOCATOR = new ObjectAllocator() { @Override public IRubyObject allocate(Ruby runtime, RubyClass klass) { return new Doc(runtime, klass); } }; public static void createDocClass(Ruby runtime, RubyModule oj) { RubyClass clazz = oj.defineClassUnder("Doc", runtime.getObject(), ALLOCATOR); clazz.defineAnnotatedMethods(Doc.class); } public Doc(ThreadContext context) { this(context.runtime, (RubyClass) ((RubyModule) context.runtime.getObject().getConstantAt("Oj")).getConstantAt("Doc")); } public Doc(Ruby runtime, RubyClass metaClass) { super(runtime, metaClass); self = null; slash = runtime.newString("/"); // doc->batches = &doc->batch0; } @JRubyMethod(meta = true) public static IRubyObject open(ThreadContext context, IRubyObject self, IRubyObject str, Block block) { ByteList json = ((RubyString) TypeConverter.checkStringType(context.runtime, str)).getByteList(); return new FastParse(context, json).parse_json(block); } @JRubyMethod(meta = true) public static IRubyObject open_file(ThreadContext context, IRubyObject self, IRubyObject filename, Block block) { String path = TypeConverter.checkStringType(context.runtime, filename).asJavaString(); ByteList json = FileParserSource.readFileIntoByteList(context, path); return new FastParse(context, json).parse_json(block); } @JRubyMethod(name = "where?") public IRubyObject where_p(ThreadContext context) { if (null == wheres[where_path] || where == where_path) { return slash; } else { int lp; int size = 2; // leading / + ??? // FIXME: try and remove this calculation and see how poorly bytelist autosizing works or even just waste and speculatively overalloc a little bit. for (lp = where_path; lp <= where; lp++) { Leaf leaf = wheres[lp]; if (ParentType.Hash == leaf.parentType) { size += leaf.key.realSize() + 1; } else if (ParentType.Array == leaf.parentType) { size += leaf.index < 100 ? 3 : 11; } } ByteList path = new ByteList(size); path.setEncoding(UTF8Encoding.INSTANCE); boolean first = true; for (lp = where_path; lp <= where; lp++) { if (!first) path.append('/'); Leaf leaf = wheres[lp]; if (ParentType.Hash == leaf.parentType) { path.append(leaf.key); } else if (ParentType.Array == leaf.parentType) { ulong_fill(path, leaf.index); } first = false; } return context.runtime.newString(path); } } @JRubyMethod public IRubyObject local_key(ThreadContext context) { Leaf leaf = wheres[where]; IRubyObject key = context.nil; if (ParentType.Hash == leaf.parentType) { key = Parse.oj_encode(context.runtime.newString(leaf.key)); } else if (ParentType.Array == leaf.parentType) { key = context.runtime.newFixnum(leaf.index); } return key; } @JRubyMethod public IRubyObject home(ThreadContext context) { wheres[where_path] = data; where = where_path; return slash; } @JRubyMethod public IRubyObject type(ThreadContext context) { return docTypeCommon(context, null); } @JRubyMethod public IRubyObject type(ThreadContext context, IRubyObject pathArg) { return docTypeCommon(context, asPath(context, pathArg)); } private ByteList asPath(ThreadContext context, IRubyObject pathArg) { return ((RubyString) TypeConverter.checkStringType(context.runtime, pathArg)).getByteList(); } private IRubyObject docTypeCommon(ThreadContext context, ByteList path) { Leaf leaf = get_doc_leaf(context, path); if (leaf == null) return context.nil; // FIXME: type can just be RubyClass reference and we can use these types directly and eliminate a switch. Ruby runtime = context.runtime; switch (leaf.rtype) { case T_NIL: return runtime.getNilClass(); case T_TRUE: return runtime.getTrueClass(); case T_FALSE: return runtime.getFalseClass(); case T_STRING: return runtime.getString(); case T_FIXNUM: return runtime.getFixnum(); case T_FLOAT: return runtime.getFloat(); case T_ARRAY: return runtime.getArray(); case T_HASH: return runtime.getHash(); } return context.nil; } @JRubyMethod public IRubyObject fetch(ThreadContext context) { Leaf leaf = get_doc_leaf(context, null); return leaf != null ? leaf.value(context) : context.nil; } @JRubyMethod public IRubyObject fetch(ThreadContext context, IRubyObject pathArg) { return fetch(context, pathArg, context.nil); } @JRubyMethod public IRubyObject fetch(ThreadContext context, IRubyObject pathArg, IRubyObject value) { Leaf leaf = get_doc_leaf(context, asPath(context, pathArg)); return leaf != null ? leaf.value(context) : value; } @JRubyMethod(rest=true) public IRubyObject each_leaf(ThreadContext context, IRubyObject[] argv, Block block) { if (block.isGiven()) { Leaf[] save_path = new Leaf[MAX_STACK]; ByteList path = null; int wlen; wlen = where - where_path; if (0 < wlen) { System.arraycopy(wheres, 0, save_path, 0, wlen + 1); } if (1 <= argv.length) { path = asPath(context, argv[0]); if ('/' == path.get(0)) { where = where_path; path = path.makeShared(1, path.realSize() - 1); } if (0 != move_step(path, 1)) { if (0 < wlen) { System.arraycopy(save_path, 0, wheres, 0, wlen + 1); } return context.nil; } } each_leaf_inner(context, this, block); if (0 < wlen) { System.arraycopy(save_path, 0, where, 0, wlen + 1); } } return context.nil; } @JRubyMethod public IRubyObject move(ThreadContext context, IRubyObject str) { ByteList path = asPath(context, str); if ('/' == path.get(0)) { where = where_path; path = path.makeShared(1, path.realSize() - 1); } int loc = move_step(path, 1); if (loc != 0) { throw context.runtime.newArgumentError("Failed to locate element " + loc + " of the path " + path + "."); } return context.nil; } @JRubyMethod(rest=true) public IRubyObject each_child(ThreadContext context, IRubyObject[] argv, Block block) { if (block.isGiven()) { Leaf[] save_path = new Leaf[MAX_STACK]; ByteList path = null; int wlen; wlen = where - where_path; if (0 < wlen) { System.arraycopy(wheres, 0, save_path, 0, wlen + 1); } if (1 <= argv.length) { path = asPath(context, argv[0]); if ('/' == path.get(0)) { where = where_path; path = path.makeShared(1, path.realSize() - 1); } if (0 != move_step(path, 1)) { if (0 < wlen) { System.arraycopy(save_path, 0, wheres, 0, wlen + 1); } return context.nil; } } if (COL_VAL == wheres[where].value_type && null != wheres[where].elements) { List<Leaf> elements = wheres[where].elements; where++; for (Leaf e: elements) { wheres[where] = e; block.yield(context, this); } } if (0 < wlen) { System.arraycopy(save_path, 0, wheres, 0, wlen + 1); } } return context.nil; } @JRubyMethod public IRubyObject each_value(ThreadContext context, Block block) { if (block.isGiven()) { Leaf leaf = get_doc_leaf(context, null); if (leaf != null) leaf.each_value(context, block); } return context.nil; } @JRubyMethod public IRubyObject each_value(ThreadContext context, IRubyObject pathArg, Block block) { if (block.isGiven()) { Leaf leaf = get_doc_leaf(context, asPath(context, pathArg)); if (leaf != null) leaf.each_value(context, block); } return context.nil; } @JRubyMethod(rest = true) public IRubyObject dump(ThreadContext context, IRubyObject[] argv) { ByteList path = null; String filename = null; if (1 <= argv.length) { if (context.nil != argv[0]) path = asPath(context, argv[0]); if (2 <= argv.length) path = asPath(context, argv[1]); } Leaf leaf = get_doc_leaf(context, path); if (leaf != null) { Options options = OjLibrary.getDefaultOptions(context); if (filename == null) { LeafDump dump = new LeafDump(context, RubyOj.oj(context), options); return context.runtime.newString(dump.leafToJSON(leaf)); } else { new LeafDump(context, RubyOj.oj(context), options).leafToFile(leaf, filename); } } return context.nil; } @JRubyMethod public IRubyObject size(ThreadContext context) { return context.runtime.newFixnum(size); } @JRubyMethod public IRubyObject close(ThreadContext context) { return context.nil; } Leaf get_doc_leaf(ThreadContext context, ByteList path) { Leaf leaf = wheres[where]; if (null != data && null != path) { Leaf[] stack = new Leaf[MAX_STACK]; int lp = 0; if ('/' == path.get(0)) { path = path.makeShared(1, path.realSize() - 1); stack[0] = data; } else if (where == where_path) { stack[0] = data; } else { int cnt = where - where_path; stackDepthCheck(context, cnt); System.arraycopy(wheres, 0, stack, 0, cnt + 1); lp = cnt; } return get_leaf(context, stack, lp, path); } return leaf; } private void stackDepthCheck(ThreadContext context, int cnt) { if (MAX_STACK <= cnt) { RubyClass error = (RubyClass) ((RubyModule) context.runtime.getObject().getConstant("Oj")).getConstantAt("DepthError"); throw context.runtime.newRaiseException(error, "\"Path too deep. Limit is " + MAX_STACK + " levels."); } } // FIXME: This substring stuff is pretty inefficient Leaf get_leaf(ThreadContext context, Leaf[] stack, int lp, ByteList path) { Leaf leaf = stack[lp]; stackDepthCheck(context, lp); if (ByteList.EMPTY_BYTELIST.equals(path)) return leaf; if ('.' == path.get(0) && '.' == path.get(1)) { // FIXME: rescanning past .. int slash = path.indexOf('/'); if (-1 == slash) { path = ByteList.EMPTY_BYTELIST; } else { path = path.makeShared(slash + 1, path.realSize() - slash - 1); } /*int skip = 2; if ('/' == path.get(skip)) { skip++; }*/ if (lp > 0) { leaf = get_leaf(context, stack, lp - 1, path); } else { return null; } } else if (COL_VAL == leaf.value_type && null != leaf.elements) { LeafType type = leaf.rtype; List<Leaf> elements = leaf.elements; leaf = null; if (T_ARRAY == type) { int cnt = 0; int i = 0; for (int c = path.get(i); '0' <= c && c <= '9'; i++, c = path.get(i + 1)) { cnt = cnt * 10 + (c - '0'); } // FIXME: we are rescanning all the numbers again. int slash = path.indexOf('/'); if (-1 == slash) { path = ByteList.EMPTY_BYTELIST; } else { path = path.makeShared(slash + 1, path.realSize() - slash - 1); } lp++; stack[lp] = elements.get(cnt - 1); leaf = get_leaf(context, stack, lp, path); } else if (T_HASH == type) { ByteList key; int slash = path.indexOf('/'); if (-1 == slash) { key = path; path = ByteList.EMPTY_BYTELIST; } else { key = path.makeShared(0, slash); path = path.makeShared(slash + 1, path.realSize() - slash - 1); } for (Leaf e: elements) { if (key.equals(e.key)) { lp++; stack[lp] = e; leaf = get_leaf(context, stack, lp, path); break; } } } } return leaf; } void each_leaf_inner(ThreadContext context, IRubyObject self, Block block) { if (COL_VAL == wheres[where].value_type) { if (wheres[where].hasElements()) { List<Leaf> elements = wheres[where].elements; where++; for (Leaf e: elements) { wheres[where] = e; each_leaf_inner(context, self, block); } where--; } } else { block.yield(context, self); } } int move_step(ByteList path, int loc) { if (ByteList.EMPTY_BYTELIST.equals(path)) { loc = 0; } else { Leaf leaf = wheres[where]; if (leaf == null) { System.err.println("*** Internal error at " + path); return loc; } if ('.' == path.get(0) && '.' == path.get(1)) { Leaf init = wheres[where]; int skip = 2; if (where == where_path) { return loc; } if ('/' == path.get(2)) { skip++; } wheres[where] = null; where--; path = path.makeShared(skip, path.realSize() - skip); loc = move_step(path, loc + 1); if (0 != loc) { wheres[where] = init; where++; } } else if (COL_VAL == leaf.value_type && null != leaf.elements) { if (T_ARRAY == leaf.rtype) { int cnt = 0; int i = 0; for (int c = path.get(i); '0' <= c && c <= '9'; i++, c = path.get(i)) { cnt = cnt * 10 + (c - '0'); } if (path.get(i) == '/') { path = path.makeShared(i + 1, path.realSize() - i - 1); } else if (i < path.realSize() - 1 || cnt == 0) { // random chars after digits or no digits at all... return loc; } else { path = ByteList.EMPTY_BYTELIST; } where++; wheres[where] = leaf.elements.get(cnt - 1); loc = move_step(path, loc + 1); if (0 != loc) { wheres[where] = null; where--; } } else if (T_HASH == leaf.rtype) { ByteList key; int slash = path.indexOf('/'); if (-1 == slash) { key = path; path = ByteList.EMPTY_BYTELIST; } else { key = path.makeShared(0, slash); path = path.makeShared(slash + 1, path.realSize() - slash - 1); } for (Leaf e: leaf.elements) { if (key.equals(e.key)) { where++; wheres[where] = e; loc = move_step(path, loc + 1); if (0 != loc) { wheres[where] = null; where--; } break; } } } } } return loc; } static void ulong_fill(ByteList value, int num) { byte[] buf = new byte[11]; int b = buf.length - 1; for (; 0 < num; num /= 10, b--) { buf[b] = (byte) ((num % 10) + '0'); } if (b == buf.length - 1) { buf[b] = '0'; b--; } int realLength = buf.length - b - 1; value.append(buf, b + 1, realLength); } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.acls.domain; import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import org.junit.*; import org.springframework.security.acls.model.*; import org.springframework.security.authentication.TestingAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.util.FieldUtils; import java.lang.reflect.Field; import java.util.*; /** * Tests for {@link AclImpl}. * * @author Andrei Stefan */ public class AclImplTests { private static final String TARGET_CLASS = "org.springframework.security.acls.TargetObject"; private static final List<Permission> READ = Arrays.asList(BasePermission.READ); private static final List<Permission> WRITE = Arrays.asList(BasePermission.WRITE); private static final List<Permission> CREATE = Arrays.asList(BasePermission.CREATE); private static final List<Permission> DELETE = Arrays.asList(BasePermission.DELETE); private static final List<Sid> SCOTT = Arrays.asList((Sid) new PrincipalSid("scott")); private static final List<Sid> BEN = Arrays.asList((Sid) new PrincipalSid("ben")); Authentication auth = new TestingAuthenticationToken("joe", "ignored", "ROLE_ADMINISTRATOR"); AclAuthorizationStrategy authzStrategy; PermissionGrantingStrategy pgs; AuditLogger mockAuditLogger; ObjectIdentity objectIdentity = new ObjectIdentityImpl(TARGET_CLASS, 100); // ~ Methods // ======================================================================================================== @Before public void setUp() throws Exception { SecurityContextHolder.getContext().setAuthentication(auth); authzStrategy = mock(AclAuthorizationStrategy.class); mockAuditLogger = mock(AuditLogger.class); pgs = new DefaultPermissionGrantingStrategy(mockAuditLogger); auth.setAuthenticated(true); } @After public void tearDown() throws Exception { SecurityContextHolder.clearContext(); } @Test(expected = IllegalArgumentException.class) public void constructorsRejectNullObjectIdentity() throws Exception { try { new AclImpl(null, 1, authzStrategy, pgs, null, null, true, new PrincipalSid( "joe")); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } new AclImpl(null, 1, authzStrategy, mockAuditLogger); } @Test(expected = IllegalArgumentException.class) public void constructorsRejectNullId() throws Exception { try { new AclImpl(objectIdentity, null, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } new AclImpl(objectIdentity, null, authzStrategy, mockAuditLogger); } @SuppressWarnings("deprecation") @Test(expected = IllegalArgumentException.class) public void constructorsRejectNullAclAuthzStrategy() throws Exception { try { new AclImpl(objectIdentity, 1, null, new DefaultPermissionGrantingStrategy( mockAuditLogger), null, null, true, new PrincipalSid("joe")); fail("It should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } new AclImpl(objectIdentity, 1, null, mockAuditLogger); } @Test public void insertAceRejectsNullParameters() throws Exception { MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); try { acl.insertAce(0, null, new GrantedAuthoritySid("ROLE_IGNORED"), true); fail("It should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { acl.insertAce(0, BasePermission.READ, null, true); fail("It should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } @Test public void insertAceAddsElementAtCorrectIndex() throws Exception { MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); MockAclService service = new MockAclService(); // Insert one permission acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST1"), true); service.updateAcl(acl); // Check it was successfully added assertThat(acl.getEntries()).hasSize(1); assertThat(acl).isEqualTo(acl.getEntries().get(0).getAcl()); assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(0).getPermission()); assertThat(acl.getEntries().get(0).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST1")); // Add a second permission acl.insertAce(1, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST2"), true); service.updateAcl(acl); // Check it was added on the last position assertThat(acl.getEntries()).hasSize(2); assertThat(acl).isEqualTo(acl.getEntries().get(1).getAcl()); assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(1).getPermission()); assertThat(acl.getEntries().get(1).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST2")); // Add a third permission, after the first one acl.insertAce(1, BasePermission.WRITE, new GrantedAuthoritySid("ROLE_TEST3"), false); service.updateAcl(acl); assertThat(acl.getEntries()).hasSize(3); // Check the third entry was added between the two existent ones assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(0).getPermission()); assertThat(acl.getEntries().get(0).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST1")); assertThat(BasePermission.WRITE).isEqualTo(acl.getEntries().get(1).getPermission()); assertThat(acl.getEntries().get(1).getSid()).isEqualTo( new GrantedAuthoritySid( "ROLE_TEST3")); assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(2).getPermission()); assertThat(acl.getEntries().get(2).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST2")); } @Test(expected = NotFoundException.class) public void insertAceFailsForNonExistentElement() throws Exception { MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); MockAclService service = new MockAclService(); // Insert one permission acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST1"), true); service.updateAcl(acl); acl.insertAce(55, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST2"), true); } @Test public void deleteAceKeepsInitialOrdering() throws Exception { MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); MockAclService service = new MockAclService(); // Add several permissions acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST1"), true); acl.insertAce(1, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST2"), true); acl.insertAce(2, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST3"), true); service.updateAcl(acl); // Delete first permission and check the order of the remaining permissions is // kept acl.deleteAce(0); assertThat(acl.getEntries()).hasSize(2); assertThat(acl.getEntries().get(0).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST2")); assertThat(acl.getEntries().get(1).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST3")); // Add one more permission and remove the permission in the middle acl.insertAce(2, BasePermission.READ, new GrantedAuthoritySid("ROLE_TEST4"), true); service.updateAcl(acl); acl.deleteAce(1); assertThat(acl.getEntries()).hasSize(2); assertThat(acl.getEntries().get(0).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST2")); assertThat(acl.getEntries().get(1).getSid()).isEqualTo(new GrantedAuthoritySid( "ROLE_TEST4")); // Remove remaining permissions acl.deleteAce(1); acl.deleteAce(0); assertThat(acl.getEntries()).isEmpty(); } @Test public void deleteAceFailsForNonExistentElement() throws Exception { AclAuthorizationStrategyImpl strategy = new AclAuthorizationStrategyImpl( new SimpleGrantedAuthority("ROLE_OWNERSHIP"), new SimpleGrantedAuthority( "ROLE_AUDITING"), new SimpleGrantedAuthority("ROLE_GENERAL")); MutableAcl acl = new AclImpl(objectIdentity, (1), strategy, pgs, null, null, true, new PrincipalSid("joe")); try { acl.deleteAce(99); fail("It should have thrown NotFoundException"); } catch (NotFoundException expected) { } } @Test public void isGrantingRejectsEmptyParameters() throws Exception { MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); Sid ben = new PrincipalSid("ben"); try { acl.isGranted(new ArrayList<>(0), Arrays.asList(ben), false); fail("It should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { acl.isGranted(READ, new ArrayList<>(0), false); fail("It should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } @Test public void isGrantingGrantsAccessForAclWithNoParent() throws Exception { Authentication auth = new TestingAuthenticationToken("ben", "ignored", "ROLE_GENERAL", "ROLE_GUEST"); auth.setAuthenticated(true); SecurityContextHolder.getContext().setAuthentication(auth); ObjectIdentity rootOid = new ObjectIdentityImpl(TARGET_CLASS, 100); // Create an ACL which owner is not the authenticated principal MutableAcl rootAcl = new AclImpl(rootOid, 1, authzStrategy, pgs, null, null, false, new PrincipalSid("joe")); // Grant some permissions rootAcl.insertAce(0, BasePermission.READ, new PrincipalSid("ben"), false); rootAcl.insertAce(1, BasePermission.WRITE, new PrincipalSid("scott"), true); rootAcl.insertAce(2, BasePermission.WRITE, new PrincipalSid("rod"), false); rootAcl.insertAce(3, BasePermission.WRITE, new GrantedAuthoritySid( "WRITE_ACCESS_ROLE"), true); // Check permissions granting List<Permission> permissions = Arrays.asList(BasePermission.READ, BasePermission.CREATE); List<Sid> sids = Arrays.asList(new PrincipalSid("ben"), new GrantedAuthoritySid( "ROLE_GUEST")); assertThat(rootAcl.isGranted(permissions, sids, false)).isFalse(); try { rootAcl.isGranted(permissions, SCOTT, false); fail("It should have thrown NotFoundException"); } catch (NotFoundException expected) { } assertThat(rootAcl.isGranted(WRITE, SCOTT, false)).isTrue(); assertThat(rootAcl.isGranted(WRITE, Arrays.asList(new PrincipalSid("rod"), new GrantedAuthoritySid("WRITE_ACCESS_ROLE")), false)).isFalse(); assertThat(rootAcl.isGranted(WRITE, Arrays.asList(new GrantedAuthoritySid( "WRITE_ACCESS_ROLE"), new PrincipalSid("rod")), false)).isTrue(); try { // Change the type of the Sid and check the granting process rootAcl.isGranted(WRITE, Arrays.asList(new GrantedAuthoritySid("rod"), new PrincipalSid("WRITE_ACCESS_ROLE")), false); fail("It should have thrown NotFoundException"); } catch (NotFoundException expected) { } } @Test public void isGrantingGrantsAccessForInheritableAcls() throws Exception { Authentication auth = new TestingAuthenticationToken("ben", "ignored", "ROLE_GENERAL"); auth.setAuthenticated(true); SecurityContextHolder.getContext().setAuthentication(auth); ObjectIdentity grandParentOid = new ObjectIdentityImpl(TARGET_CLASS, 100); ObjectIdentity parentOid1 = new ObjectIdentityImpl(TARGET_CLASS, 101); ObjectIdentity parentOid2 = new ObjectIdentityImpl(TARGET_CLASS, 102); ObjectIdentity childOid1 = new ObjectIdentityImpl(TARGET_CLASS, 103); ObjectIdentity childOid2 = new ObjectIdentityImpl(TARGET_CLASS, 104); // Create ACLs PrincipalSid joe = new PrincipalSid("joe"); MutableAcl grandParentAcl = new AclImpl(grandParentOid, 1, authzStrategy, pgs, null, null, false, joe); MutableAcl parentAcl1 = new AclImpl(parentOid1, 2, authzStrategy, pgs, null, null, true, joe); MutableAcl parentAcl2 = new AclImpl(parentOid2, 3, authzStrategy, pgs, null, null, true, joe); MutableAcl childAcl1 = new AclImpl(childOid1, 4, authzStrategy, pgs, null, null, true, joe); MutableAcl childAcl2 = new AclImpl(childOid2, 4, authzStrategy, pgs, null, null, false, joe); // Create hierarchies childAcl2.setParent(childAcl1); childAcl1.setParent(parentAcl1); parentAcl2.setParent(grandParentAcl); parentAcl1.setParent(grandParentAcl); // Add some permissions grandParentAcl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid( "ROLE_USER_READ"), true); grandParentAcl.insertAce(1, BasePermission.WRITE, new PrincipalSid("ben"), true); grandParentAcl .insertAce(2, BasePermission.DELETE, new PrincipalSid("ben"), false); grandParentAcl.insertAce(3, BasePermission.DELETE, new PrincipalSid("scott"), true); parentAcl1.insertAce(0, BasePermission.READ, new PrincipalSid("scott"), true); parentAcl1.insertAce(1, BasePermission.DELETE, new PrincipalSid("scott"), false); parentAcl2.insertAce(0, BasePermission.CREATE, new PrincipalSid("ben"), true); childAcl1.insertAce(0, BasePermission.CREATE, new PrincipalSid("scott"), true); // Check granting process for parent1 assertThat(parentAcl1.isGranted(READ, SCOTT, false)).isTrue(); assertThat(parentAcl1.isGranted(READ, Arrays.asList((Sid) new GrantedAuthoritySid("ROLE_USER_READ")), false)) .isTrue(); assertThat(parentAcl1.isGranted(WRITE, BEN, false)).isTrue(); assertThat(parentAcl1.isGranted(DELETE, BEN, false)).isFalse(); assertThat(parentAcl1.isGranted(DELETE, SCOTT, false)).isFalse(); // Check granting process for parent2 assertThat(parentAcl2.isGranted(CREATE, BEN, false)).isTrue(); assertThat(parentAcl2.isGranted(WRITE, BEN, false)).isTrue(); assertThat(parentAcl2.isGranted(DELETE, BEN, false)).isFalse(); // Check granting process for child1 assertThat(childAcl1.isGranted(CREATE, SCOTT, false)).isTrue(); assertThat(childAcl1.isGranted(READ, Arrays.asList((Sid) new GrantedAuthoritySid("ROLE_USER_READ")), false)) .isTrue(); assertThat(childAcl1.isGranted(DELETE, BEN, false)).isFalse(); // Check granting process for child2 (doesn't inherit the permissions from its // parent) try { assertThat(childAcl2.isGranted(CREATE, SCOTT, false)).isTrue(); fail("It should have thrown NotFoundException"); } catch (NotFoundException expected) { } try { childAcl2.isGranted(CREATE, Arrays.asList((Sid) new PrincipalSid("joe")), false); fail("It should have thrown NotFoundException"); } catch (NotFoundException expected) { } } @Test public void updatedAceValuesAreCorrectlyReflectedInAcl() throws Exception { Authentication auth = new TestingAuthenticationToken("ben", "ignored", "ROLE_GENERAL"); auth.setAuthenticated(true); SecurityContextHolder.getContext().setAuthentication(auth); MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, false, new PrincipalSid("joe")); MockAclService service = new MockAclService(); acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_USER_READ"), true); acl.insertAce(1, BasePermission.WRITE, new GrantedAuthoritySid("ROLE_USER_READ"), true); acl.insertAce(2, BasePermission.CREATE, new PrincipalSid("ben"), true); service.updateAcl(acl); assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(0).getPermission()); assertThat(BasePermission.WRITE).isEqualTo(acl.getEntries().get(1).getPermission()); assertThat(BasePermission.CREATE).isEqualTo(acl.getEntries().get(2).getPermission()); // Change each permission acl.updateAce(0, BasePermission.CREATE); acl.updateAce(1, BasePermission.DELETE); acl.updateAce(2, BasePermission.READ); // Check the change was successfully made assertThat(BasePermission.CREATE).isEqualTo(acl.getEntries().get(0).getPermission()); assertThat(BasePermission.DELETE).isEqualTo(acl.getEntries().get(1).getPermission()); assertThat(BasePermission.READ).isEqualTo(acl.getEntries().get(2).getPermission()); } @Test public void auditableEntryFlagsAreUpdatedCorrectly() throws Exception { Authentication auth = new TestingAuthenticationToken("ben", "ignored", "ROLE_AUDITING", "ROLE_GENERAL"); auth.setAuthenticated(true); SecurityContextHolder.getContext().setAuthentication(auth); MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, false, new PrincipalSid("joe")); MockAclService service = new MockAclService(); acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_USER_READ"), true); acl.insertAce(1, BasePermission.WRITE, new GrantedAuthoritySid("ROLE_USER_READ"), true); service.updateAcl(acl); assertThat(((AuditableAccessControlEntry) acl.getEntries().get(0)) .isAuditFailure()) .isFalse(); assertThat(((AuditableAccessControlEntry) acl.getEntries().get(1)) .isAuditFailure()) .isFalse(); assertThat(((AuditableAccessControlEntry) acl.getEntries().get(0)) .isAuditSuccess()) .isFalse(); assertThat(((AuditableAccessControlEntry) acl.getEntries().get(1)) .isAuditSuccess()) .isFalse(); // Change each permission ((AuditableAcl) acl).updateAuditing(0, true, true); ((AuditableAcl) acl).updateAuditing(1, true, true); // Check the change was successfuly made assertThat(acl.getEntries()).extracting("auditSuccess").containsOnly(true, true); assertThat(acl.getEntries()).extracting("auditFailure").containsOnly(true, true); } @Test public void gettersAndSettersAreConsistent() throws Exception { Authentication auth = new TestingAuthenticationToken("ben", "ignored", "ROLE_GENERAL"); auth.setAuthenticated(true); SecurityContextHolder.getContext().setAuthentication(auth); ObjectIdentity identity = new ObjectIdentityImpl(TARGET_CLASS, (100)); ObjectIdentity identity2 = new ObjectIdentityImpl(TARGET_CLASS, (101)); MutableAcl acl = new AclImpl(identity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); MutableAcl parentAcl = new AclImpl(identity2, 2, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); MockAclService service = new MockAclService(); acl.insertAce(0, BasePermission.READ, new GrantedAuthoritySid("ROLE_USER_READ"), true); acl.insertAce(1, BasePermission.WRITE, new GrantedAuthoritySid("ROLE_USER_READ"), true); service.updateAcl(acl); assertThat(1).isEqualTo(acl.getId()); assertThat(identity).isEqualTo(acl.getObjectIdentity()); assertThat(new PrincipalSid("joe")).isEqualTo(acl.getOwner()); assertThat(acl.getParentAcl()).isNull(); assertThat(acl.isEntriesInheriting()).isTrue(); assertThat(acl.getEntries()).hasSize(2); acl.setParent(parentAcl); assertThat(parentAcl).isEqualTo(acl.getParentAcl()); acl.setEntriesInheriting(false); assertThat(acl.isEntriesInheriting()).isFalse(); acl.setOwner(new PrincipalSid("ben")); assertThat(new PrincipalSid("ben")).isEqualTo(acl.getOwner()); } @Test public void isSidLoadedBehavesAsExpected() throws Exception { List<Sid> loadedSids = Arrays.asList(new PrincipalSid("ben"), new GrantedAuthoritySid("ROLE_IGNORED")); MutableAcl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, loadedSids, true, new PrincipalSid("joe")); assertThat(acl.isSidLoaded(loadedSids)).isTrue(); assertThat(acl.isSidLoaded(Arrays.asList(new GrantedAuthoritySid("ROLE_IGNORED"), new PrincipalSid("ben")))) .isTrue(); assertThat(acl.isSidLoaded(Arrays.asList((Sid) new GrantedAuthoritySid( "ROLE_IGNORED")))) .isTrue(); assertThat(acl.isSidLoaded(BEN)).isTrue(); assertThat(acl.isSidLoaded(null)).isTrue(); assertThat(acl.isSidLoaded(new ArrayList<>(0))).isTrue(); assertThat(acl.isSidLoaded(Arrays.asList((Sid) new GrantedAuthoritySid( "ROLE_IGNORED"), new GrantedAuthoritySid("ROLE_IGNORED")))) .isTrue(); assertThat(acl.isSidLoaded(Arrays.asList((Sid) new GrantedAuthoritySid( "ROLE_GENERAL"), new GrantedAuthoritySid("ROLE_IGNORED")))) .isFalse(); assertThat(acl.isSidLoaded(Arrays.asList((Sid) new GrantedAuthoritySid( "ROLE_IGNORED"), new GrantedAuthoritySid("ROLE_GENERAL")))) .isFalse(); } @Test(expected = NotFoundException.class) public void insertAceRaisesNotFoundExceptionForIndexLessThanZero() throws Exception { AclImpl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); acl.insertAce(-1, mock(Permission.class), mock(Sid.class), true); } @Test(expected = NotFoundException.class) public void deleteAceRaisesNotFoundExceptionForIndexLessThanZero() throws Exception { AclImpl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); acl.deleteAce(-1); } @Test(expected = NotFoundException.class) public void insertAceRaisesNotFoundExceptionForIndexGreaterThanSize() throws Exception { AclImpl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); // Insert at zero, OK. acl.insertAce(0, mock(Permission.class), mock(Sid.class), true); // Size is now 1 acl.insertAce(2, mock(Permission.class), mock(Sid.class), true); } // SEC-1151 @Test(expected = NotFoundException.class) public void deleteAceRaisesNotFoundExceptionForIndexEqualToSize() throws Exception { AclImpl acl = new AclImpl(objectIdentity, 1, authzStrategy, pgs, null, null, true, new PrincipalSid("joe")); acl.insertAce(0, mock(Permission.class), mock(Sid.class), true); // Size is now 1 acl.deleteAce(1); } // SEC-1795 @Test public void changingParentIsSuccessful() throws Exception { AclImpl parentAcl = new AclImpl(objectIdentity, 1L, authzStrategy, mockAuditLogger); AclImpl childAcl = new AclImpl(objectIdentity, 2L, authzStrategy, mockAuditLogger); AclImpl changeParentAcl = new AclImpl(objectIdentity, 3L, authzStrategy, mockAuditLogger); childAcl.setParent(parentAcl); childAcl.setParent(changeParentAcl); } // ~ Inner Classes // ================================================================================================== private class MockAclService implements MutableAclService { public MutableAcl createAcl(ObjectIdentity objectIdentity) throws AlreadyExistsException { return null; } public void deleteAcl(ObjectIdentity objectIdentity, boolean deleteChildren) throws ChildrenExistException { } /* * Mock implementation that populates the aces list with fully initialized * AccessControlEntries * * @see * org.springframework.security.acls.MutableAclService#updateAcl(org.springframework * .security.acls.MutableAcl) */ @SuppressWarnings("unchecked") public MutableAcl updateAcl(MutableAcl acl) throws NotFoundException { List<AccessControlEntry> oldAces = acl.getEntries(); Field acesField = FieldUtils.getField(AclImpl.class, "aces"); acesField.setAccessible(true); List newAces; try { newAces = (List) acesField.get(acl); newAces.clear(); for (int i = 0; i < oldAces.size(); i++) { AccessControlEntry ac = oldAces.get(i); // Just give an ID to all this acl's aces, rest of the fields are just // copied newAces.add(new AccessControlEntryImpl((i + 1), ac.getAcl(), ac .getSid(), ac.getPermission(), ac.isGranting(), ((AuditableAccessControlEntry) ac).isAuditSuccess(), ((AuditableAccessControlEntry) ac).isAuditFailure())); } } catch (IllegalAccessException e) { e.printStackTrace(); } return acl; } public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) { return null; } public Acl readAclById(ObjectIdentity object) throws NotFoundException { return null; } public Acl readAclById(ObjectIdentity object, List<Sid> sids) throws NotFoundException { return null; } public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> objects) throws NotFoundException { return null; } public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> objects, List<Sid> sids) throws NotFoundException { return null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.converter.crypto; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.util.IOHelper; import org.bouncycastle.bcpg.BCPGOutputStream; import org.bouncycastle.bcpg.CompressionAlgorithmTags; import org.bouncycastle.bcpg.HashAlgorithmTags; import org.bouncycastle.bcpg.SymmetricKeyAlgorithmTags; import org.bouncycastle.bcpg.sig.KeyFlags; import org.bouncycastle.openpgp.PGPCompressedDataGenerator; import org.bouncycastle.openpgp.PGPEncryptedDataGenerator; import org.bouncycastle.openpgp.PGPException; import org.bouncycastle.openpgp.PGPLiteralData; import org.bouncycastle.openpgp.PGPLiteralDataGenerator; import org.bouncycastle.openpgp.PGPPrivateKey; import org.bouncycastle.openpgp.PGPPublicKey; import org.bouncycastle.openpgp.PGPPublicKeyRing; import org.bouncycastle.openpgp.PGPPublicKeyRingCollection; import org.bouncycastle.openpgp.PGPSecretKey; import org.bouncycastle.openpgp.PGPSecretKeyRing; import org.bouncycastle.openpgp.PGPSecretKeyRingCollection; import org.bouncycastle.openpgp.PGPSignature; import org.bouncycastle.openpgp.PGPSignatureGenerator; import org.bouncycastle.openpgp.PGPUtil; import org.bouncycastle.openpgp.operator.bc.BcKeyFingerprintCalculator; import org.bouncycastle.openpgp.operator.jcajce.JcaPGPContentSignerBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePBEKeyEncryptionMethodGenerator; import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePGPDataEncryptorBuilder; import org.bouncycastle.openpgp.operator.jcajce.JcePublicKeyKeyEncryptionMethodGenerator; import org.junit.Before; import org.junit.Test; public class PGPDataFormatTest extends AbstractPGPDataFormatTest { private static final String PUB_KEY_RING_SUBKEYS_FILE_NAME = "org/apache/camel/component/crypto/pubringSubKeys.gpg"; private static final String SEC_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/secring.gpg"; private static final String PUB_KEY_RING_FILE_NAME = "org/apache/camel/component/crypto/pubring.gpg"; PGPDataFormat encryptor = new PGPDataFormat(); PGPDataFormat decryptor = new PGPDataFormat(); @Before public void setUpEncryptorAndDecryptor() { // the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption encryptor.setKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME); encryptor.setSignatureKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg"); encryptor.setSignaturePassword("Abcd1234"); encryptor.setKeyUserid("keyflag"); encryptor.setSignatureKeyUserid("keyflag"); encryptor.setIntegrity(false); encryptor.setFileName("fileNameABC"); // the following keyring contains a primary key with KeyFlag "Certify" and a subkey for signing and a subkey for encryption decryptor.setKeyFileName("org/apache/camel/component/crypto/secringSubKeys.gpg"); decryptor.setSignatureKeyFileName(PUB_KEY_RING_SUBKEYS_FILE_NAME); decryptor.setPassword("Abcd1234"); decryptor.setSignatureKeyUserid("keyflag"); } protected String getKeyFileName() { return PUB_KEY_RING_FILE_NAME; } protected String getKeyFileNameSec() { return SEC_KEY_RING_FILE_NAME; } protected String getKeyUserId() { return "sdude@nowhere.net"; } protected List<String> getKeyUserIds() { List<String> userids = new ArrayList<String>(2); userids.add("second"); userids.add(getKeyUserId()); return userids; } protected List<String> getSignatureKeyUserIds() { List<String> userids = new ArrayList<String>(2); userids.add("second"); userids.add(getKeyUserId()); return userids; } protected String getKeyPassword() { return "sdude"; } protected String getProvider() { return "BC"; } protected int getAlgorithm() { return SymmetricKeyAlgorithmTags.TRIPLE_DES; } protected int getHashAlgorithm() { return HashAlgorithmTags.SHA256; } protected int getCompressionAlgorithm() { return CompressionAlgorithmTags.BZIP2; } @Test public void testEncryption() throws Exception { doRoundTripEncryptionTests("direct:inline"); } @Test public void testEncryption2() throws Exception { doRoundTripEncryptionTests("direct:inline2"); } @Test public void testEncryptionArmor() throws Exception { doRoundTripEncryptionTests("direct:inline-armor"); } @Test public void testEncryptionSigned() throws Exception { doRoundTripEncryptionTests("direct:inline-sign"); } @Test public void testEncryptionKeyRingByteArray() throws Exception { doRoundTripEncryptionTests("direct:key-ring-byte-array"); } @Test public void testEncryptionSignedKeyRingByteArray() throws Exception { doRoundTripEncryptionTests("direct:sign-key-ring-byte-array"); } @Test public void testSeveralSignerKeys() throws Exception { doRoundTripEncryptionTests("direct:several-signer-keys"); } @Test public void testOneUserIdWithServeralKeys() throws Exception { doRoundTripEncryptionTests("direct:one-userid-several-keys"); } @Test public void testKeyAccess() throws Exception { doRoundTripEncryptionTests("direct:key_access"); } @Test public void testVerifyExceptionNoPublicKeyFoundCorrespondingToSignatureUserIds() throws Exception { setupExpectations(context, 1, "mock:encrypted"); MockEndpoint exception = setupExpectations(context, 1, "mock:exception"); String payload = "Hi Alice, Be careful Eve is listening, signed Bob"; Map<String, Object> headers = getHeaders(); template.sendBodyAndHeaders("direct:verify_exception_sig_userids", payload, headers); assertMockEndpointsSatisfied(); checkThrownException(exception, IllegalArgumentException.class, null, "No public key found for the key ID(s)"); } @Test public void testVerifyExceptionNoPassphraseSpecifiedForSignatureKeyUserId() throws Exception { MockEndpoint exception = setupExpectations(context, 1, "mock:exception"); String payload = "Hi Alice, Be careful Eve is listening, signed Bob"; Map<String, Object> headers = new HashMap<String, Object>(); // add signature user id which does not have a passphrase headers.put(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID, "userIDWithNoPassphrase"); // the following entry is necessary for the dynamic test headers.put(PGPKeyAccessDataFormat.KEY_USERID, "second"); template.sendBodyAndHeaders("direct:several-signer-keys", payload, headers); assertMockEndpointsSatisfied(); checkThrownException(exception, IllegalArgumentException.class, null, "No passphrase specified for signature key user ID"); } /** * You get three keys with the UserId "keyflag", a primary key and its two * sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be * used for signing and the sub-key with KeyFlag * {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or * {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption. * * @throws Exception */ @Test public void testKeyFlagSelectsCorrectKey() throws Exception { MockEndpoint mockKeyFlag = getMockEndpoint("mock:encrypted_keyflag"); mockKeyFlag.setExpectedMessageCount(1); template.sendBody("direct:keyflag", "Test Message"); assertMockEndpointsSatisfied(); List<Exchange> exchanges = mockKeyFlag.getExchanges(); assertEquals(1, exchanges.size()); Exchange exchange = exchanges.get(0); Message inMess = exchange.getIn(); assertNotNull(inMess); // must contain exactly one encryption key and one signature assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_ENCRYPTION_KEYS)); assertEquals(1, inMess.getHeader(PGPKeyAccessDataFormat.NUMBER_OF_SIGNING_KEYS)); } /** * You get three keys with the UserId "keyflag", a primary key and its two * sub-keys. The sub-key with KeyFlag {@link KeyFlags#SIGN_DATA} should be * used for signing and the sub-key with KeyFlag * {@link KeyFlags#ENCRYPT_COMMS} or {@link KeyFlags#ENCRYPT_COMMS} or * {@link KeyFlags#ENCRYPT_STORAGE} should be used for decryption. * <p> * Tests also the decryption and verifying part with the subkeys. * * @throws Exception */ @Test public void testDecryptVerifyWithSubkey() throws Exception { // do not use doRoundTripEncryptionTests("direct:subkey"); because otherwise you get an error in the dynamic test String payload = "Test Message"; MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted"); mockSubkey.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } @Test public void testEmptyBody() throws Exception { String payload = ""; MockEndpoint mockSubkey = getMockEndpoint("mock:unencrypted"); mockSubkey.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } @Test public void testExceptionDecryptorIncorrectInputFormatNoPGPMessage() throws Exception { String payload = "Not Correct Format"; MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", payload); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test public void testExceptionDecryptorIncorrectInputFormatPGPSignedData() throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); createSignature(bos); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test public void testEncryptSignWithoutCompressedDataPacket() throws Exception { doRoundTripEncryptionTests("direct:encrypt-sign-without-compressed-data-packet"); // ByteArrayOutputStream bos = new ByteArrayOutputStream(); // //// createEncryptedNonCompressedData(bos, PUB_KEY_RING_SUBKEYS_FILE_NAME); // // MockEndpoint mock = getMockEndpoint("mock:exception"); // mock.expectedMessageCount(1); // template.sendBody("direct:encrypt-sign-without-compressed-data-packet", bos.toByteArray()); // assertMockEndpointsSatisfied(); // // //checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test public void testExceptionDecryptorNoKeyFound() throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); createEncryptedNonCompressedData(bos, PUB_KEY_RING_FILE_NAME); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message is encrypted with a key which could not be found in the Secret Keyring"); } void createEncryptedNonCompressedData(ByteArrayOutputStream bos, String keyringPath) throws Exception, IOException, PGPException, UnsupportedEncodingException { PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5) .setSecureRandom(new SecureRandom()).setProvider(getProvider())); encGen.addMethod(new JcePublicKeyKeyEncryptionMethodGenerator(readPublicKey(keyringPath))); OutputStream encOut = encGen.open(bos, new byte[512]); PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator(); OutputStream litOut = litData.open(encOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[512]); try { litOut.write("Test Message Without Compression".getBytes("UTF-8")); litOut.flush(); } finally { IOHelper.close(litOut); IOHelper.close(encOut, bos); } } private void createSignature(OutputStream out) throws Exception { PGPSecretKey pgpSec = readSecretKey(); PGPPrivateKey pgpPrivKey = pgpSec.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(getProvider()).build( "sdude".toCharArray())); PGPSignatureGenerator sGen = new PGPSignatureGenerator(new JcaPGPContentSignerBuilder(pgpSec.getPublicKey().getAlgorithm(), HashAlgorithmTags.SHA1).setProvider(getProvider())); sGen.init(PGPSignature.BINARY_DOCUMENT, pgpPrivKey); BCPGOutputStream bOut = new BCPGOutputStream(out); InputStream fIn = new ByteArrayInputStream("Test Signature".getBytes("UTF-8")); int ch; while ((ch = fIn.read()) >= 0) { sGen.update((byte) ch); } fIn.close(); sGen.generate().encode(bOut); } static PGPSecretKey readSecretKey() throws Exception { InputStream input = new ByteArrayInputStream(getSecKeyRing()); PGPSecretKeyRingCollection pgpSec = new PGPSecretKeyRingCollection(PGPUtil.getDecoderStream(input), new BcKeyFingerprintCalculator()); @SuppressWarnings("rawtypes") Iterator keyRingIter = pgpSec.getKeyRings(); while (keyRingIter.hasNext()) { PGPSecretKeyRing keyRing = (PGPSecretKeyRing) keyRingIter.next(); @SuppressWarnings("rawtypes") Iterator keyIter = keyRing.getSecretKeys(); while (keyIter.hasNext()) { PGPSecretKey key = (PGPSecretKey) keyIter.next(); if (key.isSigningKey()) { return key; } } } throw new IllegalArgumentException("Can't find signing key in key ring."); } static PGPPublicKey readPublicKey(String keyringPath) throws Exception { InputStream input = new ByteArrayInputStream(getKeyRing(keyringPath)); PGPPublicKeyRingCollection pgpPub = new PGPPublicKeyRingCollection(PGPUtil.getDecoderStream(input), new BcKeyFingerprintCalculator()); @SuppressWarnings("rawtypes") Iterator keyRingIter = pgpPub.getKeyRings(); while (keyRingIter.hasNext()) { PGPPublicKeyRing keyRing = (PGPPublicKeyRing) keyRingIter.next(); @SuppressWarnings("rawtypes") Iterator keyIter = keyRing.getPublicKeys(); while (keyIter.hasNext()) { PGPPublicKey key = (PGPPublicKey) keyIter.next(); if (key.isEncryptionKey()) { return key; } } } throw new IllegalArgumentException("Can't find encryption key in key ring."); } @Test public void testExceptionDecryptorIncorrectInputFormatSymmetricEncryptedData() throws Exception { byte[] payload = "Not Correct Format".getBytes("UTF-8"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); PGPEncryptedDataGenerator encGen = new PGPEncryptedDataGenerator(new JcePGPDataEncryptorBuilder(SymmetricKeyAlgorithmTags.CAST5) .setSecureRandom(new SecureRandom()).setProvider(getProvider())); encGen.addMethod(new JcePBEKeyEncryptionMethodGenerator("pw".toCharArray())); OutputStream encOut = encGen.open(bos, new byte[1024]); PGPCompressedDataGenerator comData = new PGPCompressedDataGenerator(CompressionAlgorithmTags.ZIP); OutputStream comOut = new BufferedOutputStream(comData.open(encOut)); PGPLiteralDataGenerator litData = new PGPLiteralDataGenerator(); OutputStream litOut = litData.open(comOut, PGPLiteralData.BINARY, PGPLiteralData.CONSOLE, new Date(), new byte[1024]); litOut.write(payload); litOut.flush(); litOut.close(); comOut.close(); encOut.close(); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkeyUnmarshal", bos.toByteArray()); assertMockEndpointsSatisfied(); checkThrownException(mock, IllegalArgumentException.class, null, "The input message body has an invalid format."); } @Test public void testExceptionForSignatureVerificationOptionNoSignatureAllowed() throws Exception { decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkey", "Test Message"); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message contains a signature although a signature is not expected"); } @Test public void testExceptionForSignatureVerificationOptionRequired() throws Exception { encryptor.setSignatureKeyUserid(null); // no signature decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); MockEndpoint mock = getMockEndpoint("mock:exception"); mock.expectedMessageCount(1); template.sendBody("direct:subkey", "Test Message"); assertMockEndpointsSatisfied(); checkThrownException(mock, PGPException.class, null, "PGP message does not contain any signatures although a signature is expected"); } @Test public void testSignatureVerificationOptionIgnore() throws Exception { // encryptor is sending a PGP message with signature! Decryptor is ignoreing the signature decryptor.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_IGNORE); decryptor.setSignatureKeyUserids(null); decryptor.setSignatureKeyFileName(null); // no public keyring! --> no signature validation possible String payload = "Test Message"; MockEndpoint mock = getMockEndpoint("mock:unencrypted"); mock.expectedBodiesReceived(payload); template.sendBody("direct:subkey", payload); assertMockEndpointsSatisfied(); } protected RouteBuilder[] createRouteBuilders() { return new RouteBuilder[] {new RouteBuilder() { public void configure() throws Exception { onException(Exception.class).handled(true).to("mock:exception"); // START SNIPPET: pgp-format // Public Key FileName String keyFileName = getKeyFileName(); // Private Key FileName String keyFileNameSec = getKeyFileNameSec(); // Keyring Userid Used to Encrypt String keyUserid = getKeyUserId(); // Private key password String keyPassword = getKeyPassword(); from("direct:inline").marshal().pgp(keyFileName, keyUserid).to("mock:encrypted").unmarshal() .pgp(keyFileNameSec, null, keyPassword).to("mock:unencrypted"); // END SNIPPET: pgp-format // START SNIPPET: pgp-format-header PGPDataFormat pgpEncrypt = new PGPDataFormat(); pgpEncrypt.setKeyFileName(keyFileName); pgpEncrypt.setKeyUserid(keyUserid); pgpEncrypt.setProvider(getProvider()); pgpEncrypt.setAlgorithm(getAlgorithm()); pgpEncrypt.setCompressionAlgorithm(getCompressionAlgorithm()); PGPDataFormat pgpDecrypt = new PGPDataFormat(); pgpDecrypt.setKeyFileName(keyFileNameSec); pgpDecrypt.setPassword(keyPassword); pgpDecrypt.setProvider(getProvider()); pgpDecrypt.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_NO_SIGNATURE_ALLOWED); from("direct:inline2").marshal(pgpEncrypt).to("mock:encrypted").unmarshal(pgpDecrypt).to("mock:unencrypted"); from("direct:inline-armor").marshal().pgp(keyFileName, keyUserid, null, true, true).to("mock:encrypted").unmarshal() .pgp(keyFileNameSec, null, keyPassword, true, true).to("mock:unencrypted"); // END SNIPPET: pgp-format-header // START SNIPPET: pgp-format-signature PGPDataFormat pgpSignAndEncrypt = new PGPDataFormat(); pgpSignAndEncrypt.setKeyFileName(keyFileName); pgpSignAndEncrypt.setKeyUserid(keyUserid); pgpSignAndEncrypt.setSignatureKeyFileName(keyFileNameSec); PGPPassphraseAccessor passphraseAccessor = getPassphraseAccessor(); pgpSignAndEncrypt.setSignatureKeyUserid("Super <sdude@nowhere.net>"); // must be the exact user Id because passphrase is searched in accessor pgpSignAndEncrypt.setPassphraseAccessor(passphraseAccessor); pgpSignAndEncrypt.setProvider(getProvider()); pgpSignAndEncrypt.setAlgorithm(getAlgorithm()); pgpSignAndEncrypt.setHashAlgorithm(getHashAlgorithm()); pgpSignAndEncrypt.setCompressionAlgorithm(getCompressionAlgorithm()); PGPDataFormat pgpVerifyAndDecrypt = new PGPDataFormat(); pgpVerifyAndDecrypt.setKeyFileName(keyFileNameSec); pgpVerifyAndDecrypt.setPassword(keyPassword); pgpVerifyAndDecrypt.setSignatureKeyFileName(keyFileName); pgpVerifyAndDecrypt.setProvider(getProvider()); pgpVerifyAndDecrypt.setSignatureKeyUserid(keyUserid); // restrict verification to public keys with certain User ID from("direct:inline-sign").marshal(pgpSignAndEncrypt).to("mock:encrypted").unmarshal(pgpVerifyAndDecrypt) .to("mock:unencrypted"); // END SNIPPET: pgp-format-signature // test verifying exception, no public key found corresponding to signature key userIds from("direct:verify_exception_sig_userids").marshal(pgpSignAndEncrypt).to("mock:encrypted") .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERIDS).constant(Arrays.asList(new String[] {"wrong1", "wrong2" })) .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("wrongUserID").unmarshal(pgpVerifyAndDecrypt) .to("mock:unencrypted"); /* ---- key ring as byte array -- */ // START SNIPPET: pgp-format-key-ring-byte-array PGPDataFormat pgpEncryptByteArray = new PGPDataFormat(); pgpEncryptByteArray.setEncryptionKeyRing(getPublicKeyRing()); pgpEncryptByteArray.setKeyUserids(getKeyUserIds()); pgpEncryptByteArray.setProvider(getProvider()); pgpEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.DES); pgpEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.UNCOMPRESSED); PGPDataFormat pgpDecryptByteArray = new PGPDataFormat(); pgpDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpDecryptByteArray.setPassphraseAccessor(passphraseAccessor); pgpDecryptByteArray.setProvider(getProvider()); from("direct:key-ring-byte-array").streamCaching().marshal(pgpEncryptByteArray).to("mock:encrypted") .unmarshal(pgpDecryptByteArray).to("mock:unencrypted"); // END SNIPPET: pgp-format-key-ring-byte-array // START SNIPPET: pgp-format-signature-key-ring-byte-array PGPDataFormat pgpSignAndEncryptByteArray = new PGPDataFormat(); pgpSignAndEncryptByteArray.setKeyUserid(keyUserid); pgpSignAndEncryptByteArray.setSignatureKeyRing(getSecKeyRing()); pgpSignAndEncryptByteArray.setSignatureKeyUserid(keyUserid); pgpSignAndEncryptByteArray.setSignaturePassword(keyPassword); pgpSignAndEncryptByteArray.setProvider(getProvider()); pgpSignAndEncryptByteArray.setAlgorithm(SymmetricKeyAlgorithmTags.BLOWFISH); pgpSignAndEncryptByteArray.setHashAlgorithm(HashAlgorithmTags.RIPEMD160); pgpSignAndEncryptByteArray.setCompressionAlgorithm(CompressionAlgorithmTags.ZLIB); PGPDataFormat pgpVerifyAndDecryptByteArray = new PGPDataFormat(); pgpVerifyAndDecryptByteArray.setPassphraseAccessor(passphraseAccessor); pgpVerifyAndDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptByteArray.setProvider(getProvider()); // restrict verification to public keys with certain User ID pgpVerifyAndDecryptByteArray.setSignatureKeyUserids(getSignatureKeyUserIds()); pgpVerifyAndDecryptByteArray.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); from("direct:sign-key-ring-byte-array").streamCaching() // encryption key ring can also be set as header .setHeader(PGPDataFormat.ENCRYPTION_KEY_RING).constant(getPublicKeyRing()).marshal(pgpSignAndEncryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.ENCRYPTION_KEY_RING).to("mock:encrypted") // signature key ring can also be set as header .setHeader(PGPDataFormat.SIGNATURE_KEY_RING).constant(getPublicKeyRing()).unmarshal(pgpVerifyAndDecryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.SIGNATURE_KEY_RING).to("mock:unencrypted"); // END SNIPPET: pgp-format-signature-key-ring-byte-array // START SNIPPET: pgp-format-several-signer-keys PGPDataFormat pgpSignAndEncryptSeveralSignerKeys = new PGPDataFormat(); pgpSignAndEncryptSeveralSignerKeys.setKeyUserid(keyUserid); pgpSignAndEncryptSeveralSignerKeys.setEncryptionKeyRing(getPublicKeyRing()); pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyRing(getSecKeyRing()); List<String> signerUserIds = new ArrayList<String>(); signerUserIds.add("Third (comment third) <email@third.com>"); signerUserIds.add("Second <email@second.com>"); pgpSignAndEncryptSeveralSignerKeys.setSignatureKeyUserids(signerUserIds); Map<String, String> userId2Passphrase = new HashMap<String, String>(); userId2Passphrase.put("Third (comment third) <email@third.com>", "sdude"); userId2Passphrase.put("Second <email@second.com>", "sdude"); PGPPassphraseAccessor passphraseAccessorSeveralKeys = new DefaultPGPPassphraseAccessor(userId2Passphrase); pgpSignAndEncryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessorSeveralKeys); PGPDataFormat pgpVerifyAndDecryptSeveralSignerKeys = new PGPDataFormat(); pgpVerifyAndDecryptSeveralSignerKeys.setPassphraseAccessor(passphraseAccessor); pgpVerifyAndDecryptSeveralSignerKeys.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyRing(getPublicKeyRing()); pgpVerifyAndDecryptSeveralSignerKeys.setProvider(getProvider()); // only specify one expected signature List<String> expectedSigUserIds = new ArrayList<String>(); expectedSigUserIds.add("Second <email@second.com>"); pgpVerifyAndDecryptSeveralSignerKeys.setSignatureKeyUserids(expectedSigUserIds); from("direct:several-signer-keys").streamCaching().marshal(pgpSignAndEncryptSeveralSignerKeys).to("mock:encrypted") .unmarshal(pgpVerifyAndDecryptSeveralSignerKeys).to("mock:unencrypted"); // END SNIPPET: pgp-format-several-signer-keys // test encryption by several key and signing by serveral keys where the keys are specified by one User ID part PGPDataFormat pgpSignAndEncryptOneUserIdWithServeralKeys = new PGPDataFormat(); pgpSignAndEncryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getPublicKeyRing()); pgpSignAndEncryptOneUserIdWithServeralKeys.setSignatureKeyRing(getSecKeyRing()); // the two private keys have the same password therefore we do not need a passphrase accessor pgpSignAndEncryptOneUserIdWithServeralKeys.setPassword(getKeyPassword()); PGPDataFormat pgpVerifyAndDecryptOneUserIdWithServeralKeys = new PGPDataFormat(); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setPassword(getKeyPassword()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyRing(getPublicKeyRing()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setProvider(getProvider()); pgpVerifyAndDecryptOneUserIdWithServeralKeys.setSignatureKeyUserids(expectedSigUserIds); from("direct:one-userid-several-keys") // there are two keys which have a User ID which contains the string "econd" .setHeader(PGPKeyAccessDataFormat.KEY_USERID) .constant("econd") .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("econd") .marshal(pgpSignAndEncryptOneUserIdWithServeralKeys) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPKeyAccessDataFormat.KEY_USERID) .removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .to("mock:encrypted") // only specify one expected signature key, to check the first signature .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("Second <email@second.com>") .unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys) // do it again but now check the second signature key // there are two keys which have a User ID which contains the string "econd" .setHeader(PGPKeyAccessDataFormat.KEY_USERID).constant("econd").setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) .constant("econd").marshal(pgpSignAndEncryptOneUserIdWithServeralKeys) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPKeyAccessDataFormat.KEY_USERID).removeHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID) // only specify one expected signature key, to check the second signature .setHeader(PGPKeyAccessDataFormat.SIGNATURE_KEY_USERID).constant("Third (comment third) <email@third.com>") .unmarshal(pgpVerifyAndDecryptOneUserIdWithServeralKeys).to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() throws Exception { onException(Exception.class).handled(true).to("mock:exception"); from("direct:keyflag").marshal(encryptor).to("mock:encrypted_keyflag"); // test that the correct subkey is selected during decrypt and verify from("direct:subkey").marshal(encryptor).to("mock:encrypted").unmarshal(decryptor).to("mock:unencrypted"); from("direct:subkeyUnmarshal").unmarshal(decryptor).to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() throws Exception { PGPPublicKeyAccessor publicKeyAccessor = new DefaultPGPPublicKeyAccessor(getPublicKeyRing()); //password cannot be set dynamically! PGPSecretKeyAccessor secretKeyAccessor = new DefaultPGPSecretKeyAccessor(getSecKeyRing(), "sdude", getProvider()); PGPKeyAccessDataFormat dfEncryptSignKeyAccess = new PGPKeyAccessDataFormat(); dfEncryptSignKeyAccess.setPublicKeyAccessor(publicKeyAccessor); dfEncryptSignKeyAccess.setSecretKeyAccessor(secretKeyAccessor); dfEncryptSignKeyAccess.setKeyUserid(getKeyUserId()); dfEncryptSignKeyAccess.setSignatureKeyUserid(getKeyUserId()); PGPKeyAccessDataFormat dfDecryptVerifyKeyAccess = new PGPKeyAccessDataFormat(); dfDecryptVerifyKeyAccess.setPublicKeyAccessor(publicKeyAccessor); dfDecryptVerifyKeyAccess.setSecretKeyAccessor(secretKeyAccessor); dfDecryptVerifyKeyAccess.setSignatureKeyUserid(getKeyUserId()); from("direct:key_access").marshal(dfEncryptSignKeyAccess).to("mock:encrypted").unmarshal(dfDecryptVerifyKeyAccess) .to("mock:unencrypted"); } }, new RouteBuilder() { public void configure() throws Exception { // START SNIPPET: pgp-encrypt-sign-without-compressed-data-packet PGPDataFormat pgpEncryptSign = new PGPDataFormat(); pgpEncryptSign.setKeyUserid(getKeyUserId()); pgpEncryptSign.setSignatureKeyRing(getSecKeyRing()); pgpEncryptSign.setSignatureKeyUserid(getKeyUserId()); pgpEncryptSign.setSignaturePassword(getKeyPassword()); pgpEncryptSign.setProvider(getProvider()); pgpEncryptSign.setAlgorithm(SymmetricKeyAlgorithmTags.BLOWFISH); pgpEncryptSign.setHashAlgorithm(HashAlgorithmTags.RIPEMD160); // without compressed data packet pgpEncryptSign.setWithCompressedDataPacket(false); PGPDataFormat pgpVerifyAndDecryptByteArray = new PGPDataFormat(); pgpVerifyAndDecryptByteArray.setPassphraseAccessor(getPassphraseAccessor()); pgpVerifyAndDecryptByteArray.setEncryptionKeyRing(getSecKeyRing()); pgpVerifyAndDecryptByteArray.setProvider(getProvider()); // restrict verification to public keys with certain User ID pgpVerifyAndDecryptByteArray.setSignatureKeyUserids(getSignatureKeyUserIds()); pgpVerifyAndDecryptByteArray.setSignatureVerificationOption(PGPKeyAccessDataFormat.SIGNATURE_VERIFICATION_OPTION_REQUIRED); from("direct:encrypt-sign-without-compressed-data-packet").streamCaching() // encryption key ring can also be set as header .setHeader(PGPDataFormat.ENCRYPTION_KEY_RING).constant(getPublicKeyRing()).marshal(pgpEncryptSign) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.ENCRYPTION_KEY_RING).to("mock:encrypted") // signature key ring can also be set as header .setHeader(PGPDataFormat.SIGNATURE_KEY_RING).constant(getPublicKeyRing()).unmarshal(pgpVerifyAndDecryptByteArray) // it is recommended to remove the header immediately when it is no longer needed .removeHeader(PGPDataFormat.SIGNATURE_KEY_RING).to("mock:unencrypted"); // END SNIPPET: pgp-encrypt-sign-without-compressed-data-packet } }}; } public static byte[] getPublicKeyRing() throws Exception { return getKeyRing(PUB_KEY_RING_FILE_NAME); } public static byte[] getSecKeyRing() throws Exception { return getKeyRing(SEC_KEY_RING_FILE_NAME); } private static byte[] getKeyRing(String fileName) throws IOException { InputStream is = PGPDataFormatTest.class.getClassLoader().getResourceAsStream(fileName); ByteArrayOutputStream output = new ByteArrayOutputStream(); IOHelper.copyAndCloseInput(is, output); output.close(); return output.toByteArray(); } public static PGPPassphraseAccessor getPassphraseAccessor() { Map<String, String> userId2Passphrase = Collections.singletonMap("Super <sdude@nowhere.net>", "sdude"); PGPPassphraseAccessor passphraseAccessor = new DefaultPGPPassphraseAccessor(userId2Passphrase); return passphraseAccessor; } public static void checkThrownException(MockEndpoint mock, Class<? extends Exception> cl, Class<? extends Exception> expectedCauseClass, String expectedMessagePart) throws Exception { Exception e = (Exception) mock.getExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT); assertNotNull("Expected excpetion " + cl.getName() + " missing", e); if (e.getClass() != cl) { String stackTrace = getStrackTrace(e); fail("Exception " + cl.getName() + " excpected, but was " + e.getClass().getName() + ": " + stackTrace); } if (expectedMessagePart != null) { if (e.getMessage() == null) { fail("Expected excption does not contain a message. Stack trace: " + getStrackTrace(e)); } else { if (!e.getMessage().contains(expectedMessagePart)) { fail("Expected excption message does not contain a expected message part " + expectedMessagePart + ". Stack trace: " + getStrackTrace(e)); } } } if (expectedCauseClass != null) { Throwable cause = e.getCause(); assertNotNull("Expected cause exception" + expectedCauseClass.getName() + " missing", cause); if (expectedCauseClass != cause.getClass()) { fail("Cause exception " + expectedCauseClass.getName() + " expected, but was " + cause.getClass().getName() + ": " + getStrackTrace(e)); } } } public static String getStrackTrace(Exception e) throws UnsupportedEncodingException { ByteArrayOutputStream os = new ByteArrayOutputStream(); PrintWriter w = new PrintWriter(os); e.printStackTrace(w); w.close(); String stackTrace = new String(os.toByteArray(), "UTF-8"); return stackTrace; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.optimizer.dag; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.ExecutionMode; import org.apache.flink.api.common.operators.AbstractUdfOperator; import org.apache.flink.api.common.operators.CompilerHints; import org.apache.flink.api.common.operators.Operator; import org.apache.flink.api.common.operators.SemanticProperties; import org.apache.flink.api.common.operators.util.FieldSet; import org.apache.flink.optimizer.CompilerException; import org.apache.flink.optimizer.DataStatistics; import org.apache.flink.optimizer.costs.CostEstimator; import org.apache.flink.optimizer.dataproperties.InterestingProperties; import org.apache.flink.optimizer.dataproperties.RequestedGlobalProperties; import org.apache.flink.optimizer.dataproperties.RequestedLocalProperties; import org.apache.flink.optimizer.plan.PlanNode; import org.apache.flink.optimizer.plandump.DumpableConnection; import org.apache.flink.optimizer.plandump.DumpableNode; import org.apache.flink.runtime.operators.shipping.ShipStrategyType; import org.apache.flink.util.Visitable; import org.apache.flink.util.Visitor; /** * The OptimizerNode is the base class of all nodes in the optimizer DAG. The optimizer DAG is the * optimizer's representation of a program, created before the actual optimization (which creates different * candidate plans and computes their cost). * <p> * Nodes in the DAG correspond (almost) one-to-one to the operators in a program. The optimizer DAG is constructed * to hold the additional information that the optimizer needs: * <ul> * <li>Estimates of the data size processed by each operator</li> * <li>Helper structures to track where the data flow "splits" and "joins", to support flows that are * DAGs but not trees.</li> * <li>Tags and weights to differentiate between loop-variant and -invariant parts of an iteration</li> * <li>Interesting properties to be used during the enumeration of candidate plans</li> * </ul> */ public abstract class OptimizerNode implements Visitable<OptimizerNode>, EstimateProvider, DumpableNode<OptimizerNode> { public static final int MAX_DYNAMIC_PATH_COST_WEIGHT = 100; // -------------------------------------------------------------------------------------------- // Members // -------------------------------------------------------------------------------------------- private final Operator<?> operator; // The operator (Reduce / Join / DataSource / ...) private List<String> broadcastConnectionNames = new ArrayList<String>(); // the broadcast inputs names of this node private List<DagConnection> broadcastConnections = new ArrayList<DagConnection>(); // the broadcast inputs of this node private List<DagConnection> outgoingConnections; // The links to succeeding nodes private InterestingProperties intProps; // the interesting properties of this node // --------------------------------- Branch Handling ------------------------------------------ protected List<UnclosedBranchDescriptor> openBranches; // stack of branches in the sub-graph that are not joined protected Set<OptimizerNode> closedBranchingNodes; // stack of branching nodes which have already been closed protected List<OptimizerNode> hereJoinedBranches; // the branching nodes (node with multiple outputs) // that are partially joined (through multiple inputs or broadcast vars) // ---------------------------- Estimates and Annotations ------------------------------------- protected long estimatedOutputSize = -1; // the estimated size of the output (bytes) protected long estimatedNumRecords = -1; // the estimated number of key/value pairs in the output protected Set<FieldSet> uniqueFields; // set of attributes that will always be unique after this node // --------------------------------- General Parameters --------------------------------------- private int parallelism = ExecutionConfig.PARALLELISM_DEFAULT; // the number of parallel instances of this node private long minimalMemoryPerSubTask = -1; protected int id = -1; // the id for this node. protected int costWeight = 1; // factor to weight the costs for dynamic paths protected boolean onDynamicPath; protected List<PlanNode> cachedPlans; // cache candidates, because the may be accessed repeatedly // ------------------------------------------------------------------------ // Constructor / Setup // ------------------------------------------------------------------------ /** * Creates a new optimizer node that represents the given program operator. * * @param op The operator that the node represents. */ public OptimizerNode(Operator<?> op) { this.operator = op; readStubAnnotations(); } protected OptimizerNode(OptimizerNode toCopy) { this.operator = toCopy.operator; this.intProps = toCopy.intProps; this.openBranches = toCopy.openBranches; this.closedBranchingNodes = toCopy.closedBranchingNodes; this.estimatedOutputSize = toCopy.estimatedOutputSize; this.estimatedNumRecords = toCopy.estimatedNumRecords; this.parallelism = toCopy.parallelism; this.minimalMemoryPerSubTask = toCopy.minimalMemoryPerSubTask; this.id = toCopy.id; this.costWeight = toCopy.costWeight; this.onDynamicPath = toCopy.onDynamicPath; } // ------------------------------------------------------------------------ // Methods specific to unary- / binary- / special nodes // ------------------------------------------------------------------------ /** * Gets the name of this node, which is the name of the function/operator, or * data source / data sink. * * @return The node name. */ public abstract String getOperatorName(); /** * This function connects the predecessors to this operator. * * @param operatorToNode The map from program operators to optimizer nodes. * @param defaultExchangeMode The data exchange mode to use, if the operator does not * specify one. */ public abstract void setInput(Map<Operator<?>, OptimizerNode> operatorToNode, ExecutionMode defaultExchangeMode); /** * This function connects the operators that produce the broadcast inputs to this operator. * * @param operatorToNode The map from program operators to optimizer nodes. * @param defaultExchangeMode The data exchange mode to use, if the operator does not * specify one. * * @throws CompilerException */ public void setBroadcastInputs(Map<Operator<?>, OptimizerNode> operatorToNode, ExecutionMode defaultExchangeMode) { // skip for Operators that don't support broadcast variables if (!(getOperator() instanceof AbstractUdfOperator<?, ?>)) { return; } // get all broadcast inputs AbstractUdfOperator<?, ?> operator = ((AbstractUdfOperator<?, ?>) getOperator()); // create connections and add them for (Map.Entry<String, Operator<?>> input : operator.getBroadcastInputs().entrySet()) { OptimizerNode predecessor = operatorToNode.get(input.getValue()); DagConnection connection = new DagConnection(predecessor, this, ShipStrategyType.BROADCAST, defaultExchangeMode); addBroadcastConnection(input.getKey(), connection); predecessor.addOutgoingConnection(connection); } } /** * Gets all incoming connections of this node. * This method needs to be overridden by subclasses to return the children. * * @return The list of incoming connections. */ public abstract List<DagConnection> getIncomingConnections(); /** * Tells the node to compute the interesting properties for its inputs. The interesting properties * for the node itself must have been computed before. * The node must then see how many of interesting properties it preserves and add its own. * * @param estimator The {@code CostEstimator} instance to use for plan cost estimation. */ public abstract void computeInterestingPropertiesForInputs(CostEstimator estimator); /** * This method causes the node to compute the description of open branches in its sub-plan. An open branch * describes, that a (transitive) child node had multiple outputs, which have not all been re-joined in the * sub-plan. This method needs to set the <code>openBranches</code> field to a stack of unclosed branches, the * latest one top. A branch is considered closed, if some later node sees all of the branching node's outputs, * no matter if there have been more branches to different paths in the meantime. */ public abstract void computeUnclosedBranchStack(); protected List<UnclosedBranchDescriptor> computeUnclosedBranchStackForBroadcastInputs( List<UnclosedBranchDescriptor> branchesSoFar) { // handle the data flow branching for the broadcast inputs for (DagConnection broadcastInput : getBroadcastConnections()) { OptimizerNode bcSource = broadcastInput.getSource(); addClosedBranches(bcSource.closedBranchingNodes); List<UnclosedBranchDescriptor> bcBranches = bcSource.getBranchesForParent(broadcastInput); ArrayList<UnclosedBranchDescriptor> mergedBranches = new ArrayList<UnclosedBranchDescriptor>(); mergeLists(branchesSoFar, bcBranches, mergedBranches, true); branchesSoFar = mergedBranches.isEmpty() ? Collections.<UnclosedBranchDescriptor>emptyList() : mergedBranches; } return branchesSoFar; } /** * Computes the plan alternatives for this node, an implicitly for all nodes that are children of * this node. This method must determine for each alternative the global and local properties * and the costs. This method may recursively call <code>getAlternatives()</code> on its children * to get their plan alternatives, and build its own alternatives on top of those. * * @param estimator * The cost estimator used to estimate the costs of each plan alternative. * @return A list containing all plan alternatives. */ public abstract List<PlanNode> getAlternativePlans(CostEstimator estimator); /** * This method implements the visit of a depth-first graph traversing visitor. Implementers must first * call the <code>preVisit()</code> method, then hand the visitor to their children, and finally call * the <code>postVisit()</code> method. * * @param visitor * The graph traversing visitor. * @see org.apache.flink.util.Visitable#accept(org.apache.flink.util.Visitor) */ @Override public abstract void accept(Visitor<OptimizerNode> visitor); public abstract SemanticProperties getSemanticProperties(); // ------------------------------------------------------------------------ // Getters / Setters // ------------------------------------------------------------------------ @Override public Iterable<OptimizerNode> getPredecessors() { List<OptimizerNode> allPredecessors = new ArrayList<OptimizerNode>(); for (DagConnection dagConnection : getIncomingConnections()) { allPredecessors.add(dagConnection.getSource()); } for (DagConnection conn : getBroadcastConnections()) { allPredecessors.add(conn.getSource()); } return allPredecessors; } /** * Gets the ID of this node. If the id has not yet been set, this method returns -1; * * @return This node's id, or -1, if not yet set. */ public int getId() { return this.id; } /** * Sets the ID of this node. * * @param id * The id for this node. */ public void initId(int id) { if (id <= 0) { throw new IllegalArgumentException(); } if (this.id == -1) { this.id = id; } else { throw new IllegalStateException("Id has already been initialized."); } } /** * Adds the broadcast connection identified by the given {@code name} to this node. * * @param broadcastConnection The connection to add. */ public void addBroadcastConnection(String name, DagConnection broadcastConnection) { this.broadcastConnectionNames.add(name); this.broadcastConnections.add(broadcastConnection); } /** * Return the list of names associated with broadcast inputs for this node. */ public List<String> getBroadcastConnectionNames() { return this.broadcastConnectionNames; } /** * Return the list of inputs associated with broadcast variables for this node. */ public List<DagConnection> getBroadcastConnections() { return this.broadcastConnections; } /** * Adds a new outgoing connection to this node. * * @param connection * The connection to add. */ public void addOutgoingConnection(DagConnection connection) { if (this.outgoingConnections == null) { this.outgoingConnections = new ArrayList<DagConnection>(); } else { if (this.outgoingConnections.size() == 64) { throw new CompilerException("Cannot currently handle nodes with more than 64 outputs."); } } this.outgoingConnections.add(connection); } /** * The list of outgoing connections from this node to succeeding tasks. * * @return The list of outgoing connections. */ public List<DagConnection> getOutgoingConnections() { return this.outgoingConnections; } /** * Gets the operator represented by this optimizer node. * * @return This node's operator. */ public Operator<?> getOperator() { return this.operator; } /** * Gets the parallelism for the operator represented by this optimizer node. * The parallelism denotes how many parallel instances of the operator on will be * spawned during the execution. If this value is {@link ExecutionConfig#PARALLELISM_DEFAULT} * then the system will take the default number of parallel instances. * * @return The parallelism of the operator. */ public int getParallelism() { return this.parallelism; } /** * Sets the parallelism for this optimizer node. * The parallelism denotes how many parallel instances of the operator will be * spawned during the execution. * * @param parallelism The parallelism to set. If this value is {@link ExecutionConfig#PARALLELISM_DEFAULT} * then the system will take the default number of parallel instances. * @throws IllegalArgumentException If the parallelism is smaller than one. */ public void setParallelism(int parallelism) { if (parallelism < 1 && parallelism != ExecutionConfig.PARALLELISM_DEFAULT) { throw new IllegalArgumentException("Parallelism of " + parallelism + " is invalid."); } this.parallelism = parallelism; } /** * Gets the amount of memory that all subtasks of this task have jointly available. * * @return The total amount of memory across all subtasks. */ public long getMinimalMemoryAcrossAllSubTasks() { return this.minimalMemoryPerSubTask == -1 ? -1 : this.minimalMemoryPerSubTask * this.parallelism; } public boolean isOnDynamicPath() { return this.onDynamicPath; } public void identifyDynamicPath(int costWeight) { boolean anyDynamic = false; boolean allDynamic = true; for (DagConnection conn : getIncomingConnections()) { boolean dynamicIn = conn.isOnDynamicPath(); anyDynamic |= dynamicIn; allDynamic &= dynamicIn; } for (DagConnection conn : getBroadcastConnections()) { boolean dynamicIn = conn.isOnDynamicPath(); anyDynamic |= dynamicIn; allDynamic &= dynamicIn; } if (anyDynamic) { this.onDynamicPath = true; this.costWeight = costWeight; if (!allDynamic) { // this node joins static and dynamic path. // mark the connections where the source is not dynamic as cached for (DagConnection conn : getIncomingConnections()) { if (!conn.getSource().isOnDynamicPath()) { conn.setMaterializationMode(conn.getMaterializationMode().makeCached()); } } // broadcast variables are always cached, because they stay unchanged available in the // runtime context of the functions } } } public int getCostWeight() { return this.costWeight; } public int getMaxDepth() { int maxDepth = 0; for (DagConnection conn : getIncomingConnections()) { maxDepth = Math.max(maxDepth, conn.getMaxDepth()); } for (DagConnection conn : getBroadcastConnections()) { maxDepth = Math.max(maxDepth, conn.getMaxDepth()); } return maxDepth; } /** * Gets the properties that are interesting for this node to produce. * * @return The interesting properties for this node, or null, if not yet computed. */ public InterestingProperties getInterestingProperties() { return this.intProps; } @Override public long getEstimatedOutputSize() { return this.estimatedOutputSize; } @Override public long getEstimatedNumRecords() { return this.estimatedNumRecords; } public void setEstimatedOutputSize(long estimatedOutputSize) { this.estimatedOutputSize = estimatedOutputSize; } public void setEstimatedNumRecords(long estimatedNumRecords) { this.estimatedNumRecords = estimatedNumRecords; } @Override public float getEstimatedAvgWidthPerOutputRecord() { if (this.estimatedOutputSize > 0 && this.estimatedNumRecords > 0) { return ((float) this.estimatedOutputSize) / this.estimatedNumRecords; } else { return -1.0f; } } /** * Checks whether this node has branching output. A node's output is branched, if it has more * than one output connection. * * @return True, if the node's output branches. False otherwise. */ public boolean isBranching() { return getOutgoingConnections() != null && getOutgoingConnections().size() > 1; } public void markAllOutgoingConnectionsAsPipelineBreaking() { if (this.outgoingConnections == null) { throw new IllegalStateException("The outgoing connections have not yet been initialized."); } for (DagConnection conn : getOutgoingConnections()) { conn.markBreaksPipeline(); } } // ------------------------------------------------------------------------ // Miscellaneous // ------------------------------------------------------------------------ /** * Checks, if all outgoing connections have their interesting properties set from their target nodes. * * @return True, if on all outgoing connections, the interesting properties are set. False otherwise. */ public boolean haveAllOutputConnectionInterestingProperties() { for (DagConnection conn : getOutgoingConnections()) { if (conn.getInterestingProperties() == null) { return false; } } return true; } /** * Computes all the interesting properties that are relevant to this node. The interesting * properties are a union of the interesting properties on each outgoing connection. * However, if two interesting properties on the outgoing connections overlap, * the interesting properties will occur only once in this set. For that, this * method deduplicates and merges the interesting properties. * This method returns copies of the original interesting properties objects and * leaves the original objects, contained by the connections, unchanged. */ public void computeUnionOfInterestingPropertiesFromSuccessors() { List<DagConnection> conns = getOutgoingConnections(); if (conns.size() == 0) { // no incoming, we have none ourselves this.intProps = new InterestingProperties(); } else { this.intProps = conns.get(0).getInterestingProperties().clone(); for (int i = 1; i < conns.size(); i++) { this.intProps.addInterestingProperties(conns.get(i).getInterestingProperties()); } } this.intProps.dropTrivials(); } public void clearInterestingProperties() { this.intProps = null; for (DagConnection conn : getIncomingConnections()) { conn.clearInterestingProperties(); } for (DagConnection conn : getBroadcastConnections()) { conn.clearInterestingProperties(); } } /** * Causes this node to compute its output estimates (such as number of rows, size in bytes) * based on the inputs and the compiler hints. The compiler hints are instantiated with conservative * default values which are used if no other values are provided. Nodes may access the statistics to * determine relevant information. * * @param statistics * The statistics object which may be accessed to get statistical information. * The parameter may be null, if no statistics are available. */ public void computeOutputEstimates(DataStatistics statistics) { // sanity checking for (DagConnection c : getIncomingConnections()) { if (c.getSource() == null) { throw new CompilerException("Bug: Estimate computation called before inputs have been set."); } } // let every operator do its computation computeOperatorSpecificDefaultEstimates(statistics); if (this.estimatedOutputSize < 0) { this.estimatedOutputSize = -1; } if (this.estimatedNumRecords < 0) { this.estimatedNumRecords = -1; } // overwrite default estimates with hints, if given if (getOperator() == null || getOperator().getCompilerHints() == null) { return ; } CompilerHints hints = getOperator().getCompilerHints(); if (hints.getOutputSize() >= 0) { this.estimatedOutputSize = hints.getOutputSize(); } if (hints.getOutputCardinality() >= 0) { this.estimatedNumRecords = hints.getOutputCardinality(); } if (hints.getFilterFactor() >= 0.0f) { if (this.estimatedNumRecords >= 0) { this.estimatedNumRecords = (long) (this.estimatedNumRecords * hints.getFilterFactor()); if (this.estimatedOutputSize >= 0) { this.estimatedOutputSize = (long) (this.estimatedOutputSize * hints.getFilterFactor()); } } else if (this instanceof SingleInputNode) { OptimizerNode pred = ((SingleInputNode) this).getPredecessorNode(); if (pred != null && pred.getEstimatedNumRecords() >= 0) { this.estimatedNumRecords = (long) (pred.getEstimatedNumRecords() * hints.getFilterFactor()); } } } // use the width to infer the cardinality (given size) and vice versa if (hints.getAvgOutputRecordSize() >= 1) { // the estimated number of rows based on size if (this.estimatedNumRecords == -1 && this.estimatedOutputSize >= 0) { this.estimatedNumRecords = (long) (this.estimatedOutputSize / hints.getAvgOutputRecordSize()); } else if (this.estimatedOutputSize == -1 && this.estimatedNumRecords >= 0) { this.estimatedOutputSize = (long) (this.estimatedNumRecords * hints.getAvgOutputRecordSize()); } } } protected abstract void computeOperatorSpecificDefaultEstimates(DataStatistics statistics); // ------------------------------------------------------------------------ // Reading of stub annotations // ------------------------------------------------------------------------ /** * Reads all stub annotations, i.e. which fields remain constant, what cardinality bounds the * functions have, which fields remain unique. */ protected void readStubAnnotations() { readUniqueFieldsAnnotation(); } protected void readUniqueFieldsAnnotation() { if (this.operator.getCompilerHints() != null) { Set<FieldSet> uniqueFieldSets = operator.getCompilerHints().getUniqueFields(); if (uniqueFieldSets != null) { if (this.uniqueFields == null) { this.uniqueFields = new HashSet<FieldSet>(); } this.uniqueFields.addAll(uniqueFieldSets); } } } // ------------------------------------------------------------------------ // Access of stub annotations // ------------------------------------------------------------------------ /** * Gets the FieldSets which are unique in the output of the node. */ public Set<FieldSet> getUniqueFields() { return this.uniqueFields == null ? Collections.<FieldSet>emptySet() : this.uniqueFields; } // -------------------------------------------------------------------------------------------- // Pruning // -------------------------------------------------------------------------------------------- protected void prunePlanAlternatives(List<PlanNode> plans) { if (plans.isEmpty()) { throw new CompilerException("No plan meeting the requirements could be created @ " + this + ". Most likely reason: Too restrictive plan hints."); } // shortcut for the simple case if (plans.size() == 1) { return; } // we can only compare plan candidates that made equal choices // at the branching points. for each choice at a branching point, // we need to keep the cheapest (wrt. interesting properties). // if we do not keep candidates for each branch choice, we might not // find branch compatible candidates when joining the branches back. // for pruning, we are quasi AFTER the node, so in the presence of // branches, we need form the per-branch-choice groups by the choice // they made at the latest un-joined branching node. Note that this is // different from the check for branch compatibility of candidates, as // this happens on the input sub-plans and hence BEFORE the node (therefore // it is relevant to find the latest (partially) joined branch point. if (this.openBranches == null || this.openBranches.isEmpty()) { prunePlanAlternativesWithCommonBranching(plans); } else { // partition the candidates into groups that made the same sub-plan candidate // choice at the latest unclosed branch point final OptimizerNode[] branchDeterminers = new OptimizerNode[this.openBranches.size()]; for (int i = 0; i < branchDeterminers.length; i++) { branchDeterminers[i] = this.openBranches.get(this.openBranches.size() - 1 - i).getBranchingNode(); } // this sorter sorts by the candidate choice at the branch point Comparator<PlanNode> sorter = new Comparator<PlanNode>() { @Override public int compare(PlanNode o1, PlanNode o2) { for (OptimizerNode branchDeterminer : branchDeterminers) { PlanNode n1 = o1.getCandidateAtBranchPoint(branchDeterminer); PlanNode n2 = o2.getCandidateAtBranchPoint(branchDeterminer); int hash1 = System.identityHashCode(n1); int hash2 = System.identityHashCode(n2); if (hash1 != hash2) { return hash1 - hash2; } } return 0; } }; Collections.sort(plans, sorter); List<PlanNode> result = new ArrayList<PlanNode>(); List<PlanNode> turn = new ArrayList<PlanNode>(); final PlanNode[] determinerChoice = new PlanNode[branchDeterminers.length]; while (!plans.isEmpty()) { // take one as the determiner turn.clear(); PlanNode determiner = plans.remove(plans.size() - 1); turn.add(determiner); for (int i = 0; i < determinerChoice.length; i++) { determinerChoice[i] = determiner.getCandidateAtBranchPoint(branchDeterminers[i]); } // go backwards through the plans and find all that are equal boolean stillEqual = true; for (int k = plans.size() - 1; k >= 0 && stillEqual; k--) { PlanNode toCheck = plans.get(k); for (int i = 0; i < branchDeterminers.length; i++) { PlanNode checkerChoice = toCheck.getCandidateAtBranchPoint(branchDeterminers[i]); if (checkerChoice != determinerChoice[i]) { // not the same anymore stillEqual = false; break; } } if (stillEqual) { // the same plans.remove(k); turn.add(toCheck); } } // now that we have only plans with the same branch alternatives, prune! if (turn.size() > 1) { prunePlanAlternativesWithCommonBranching(turn); } result.addAll(turn); } // after all turns are complete plans.clear(); plans.addAll(result); } } protected void prunePlanAlternativesWithCommonBranching(List<PlanNode> plans) { // for each interesting property, which plans are cheapest final RequestedGlobalProperties[] gps = this.intProps.getGlobalProperties().toArray( new RequestedGlobalProperties[this.intProps.getGlobalProperties().size()]); final RequestedLocalProperties[] lps = this.intProps.getLocalProperties().toArray( new RequestedLocalProperties[this.intProps.getLocalProperties().size()]); final PlanNode[][] toKeep = new PlanNode[gps.length][]; final PlanNode[] cheapestForGlobal = new PlanNode[gps.length]; PlanNode cheapest = null; // the overall cheapest plan // go over all plans from the list for (PlanNode candidate : plans) { // check if that plan is the overall cheapest if (cheapest == null || (cheapest.getCumulativeCosts().compareTo(candidate.getCumulativeCosts()) > 0)) { cheapest = candidate; } // find the interesting global properties that this plan matches for (int i = 0; i < gps.length; i++) { if (gps[i].isMetBy(candidate.getGlobalProperties())) { // the candidate meets the global property requirements. That means // it has a chance that its local properties are re-used (they would be // destroyed if global properties need to be established) if (cheapestForGlobal[i] == null || (cheapestForGlobal[i].getCumulativeCosts().compareTo(candidate.getCumulativeCosts()) > 0)) { cheapestForGlobal[i] = candidate; } final PlanNode[] localMatches; if (toKeep[i] == null) { localMatches = new PlanNode[lps.length]; toKeep[i] = localMatches; } else { localMatches = toKeep[i]; } for (int k = 0; k < lps.length; k++) { if (lps[k].isMetBy(candidate.getLocalProperties())) { final PlanNode previous = localMatches[k]; if (previous == null || previous.getCumulativeCosts().compareTo(candidate.getCumulativeCosts()) > 0) { // this one is cheaper! localMatches[k] = candidate; } } } } } } // all plans are set now plans.clear(); // add the cheapest plan if (cheapest != null) { plans.add(cheapest); cheapest.setPruningMarker(); // remember that that plan is in the set } // add all others, which are optimal for some interesting properties for (int i = 0; i < gps.length; i++) { if (toKeep[i] != null) { final PlanNode[] localMatches = toKeep[i]; for (final PlanNode n : localMatches) { if (n != null && !n.isPruneMarkerSet()) { n.setPruningMarker(); plans.add(n); } } } if (cheapestForGlobal[i] != null) { final PlanNode n = cheapestForGlobal[i]; if (!n.isPruneMarkerSet()) { n.setPruningMarker(); plans.add(n); } } } } // -------------------------------------------------------------------------------------------- // Handling of branches // -------------------------------------------------------------------------------------------- public boolean hasUnclosedBranches() { return this.openBranches != null && !this.openBranches.isEmpty(); } public Set<OptimizerNode> getClosedBranchingNodes() { return this.closedBranchingNodes; } public List<UnclosedBranchDescriptor> getOpenBranches() { return this.openBranches; } protected List<UnclosedBranchDescriptor> getBranchesForParent(DagConnection toParent) { if (this.outgoingConnections.size() == 1) { // return our own stack of open branches, because nothing is added if (this.openBranches == null || this.openBranches.isEmpty()) { return Collections.emptyList(); } else { return new ArrayList<UnclosedBranchDescriptor>(this.openBranches); } } else if (this.outgoingConnections.size() > 1) { // we branch add a branch info to the stack List<UnclosedBranchDescriptor> branches = new ArrayList<UnclosedBranchDescriptor>(4); if (this.openBranches != null) { branches.addAll(this.openBranches); } // find out, which output number the connection to the parent int num; for (num = 0; num < this.outgoingConnections.size(); num++) { if (this.outgoingConnections.get(num) == toParent) { break; } } if (num >= this.outgoingConnections.size()) { throw new CompilerException("Error in compiler: " + "Parent to get branch info for is not contained in the outgoing connections."); } // create the description and add it long bitvector = 0x1L << num; branches.add(new UnclosedBranchDescriptor(this, bitvector)); return branches; } else { throw new CompilerException( "Error in compiler: Cannot get branch info for successor in a node with no successors."); } } protected void removeClosedBranches(List<UnclosedBranchDescriptor> openList) { if (openList == null || openList.isEmpty() || this.closedBranchingNodes == null || this.closedBranchingNodes.isEmpty()) { return; } Iterator<UnclosedBranchDescriptor> it = openList.iterator(); while (it.hasNext()) { if (this.closedBranchingNodes.contains(it.next().getBranchingNode())) { //this branch was already closed --> remove it from the list it.remove(); } } } protected void addClosedBranches(Set<OptimizerNode> alreadyClosed) { if (alreadyClosed == null || alreadyClosed.isEmpty()) { return; } if (this.closedBranchingNodes == null) { this.closedBranchingNodes = new HashSet<OptimizerNode>(alreadyClosed); } else { this.closedBranchingNodes.addAll(alreadyClosed); } } protected void addClosedBranch(OptimizerNode alreadyClosed) { if (this.closedBranchingNodes == null) { this.closedBranchingNodes = new HashSet<OptimizerNode>(); } this.closedBranchingNodes.add(alreadyClosed); } /** * Checks whether to candidate plans for the sub-plan of this node are comparable. The two * alternative plans are comparable, if * * a) There is no branch in the sub-plan of this node * b) Both candidates have the same candidate as the child at the last open branch. * * @param plan1 The root node of the first candidate plan. * @param plan2 The root node of the second candidate plan. * @return True if the nodes are branch compatible in the inputs. */ protected boolean areBranchCompatible(PlanNode plan1, PlanNode plan2) { if (plan1 == null || plan2 == null) { throw new NullPointerException(); } // if there is no open branch, the children are always compatible. // in most plans, that will be the dominant case if (this.hereJoinedBranches == null || this.hereJoinedBranches.isEmpty()) { return true; } for (OptimizerNode joinedBrancher : hereJoinedBranches) { final PlanNode branch1Cand = plan1.getCandidateAtBranchPoint(joinedBrancher); final PlanNode branch2Cand = plan2.getCandidateAtBranchPoint(joinedBrancher); if (branch1Cand != null && branch2Cand != null && branch1Cand != branch2Cand) { return false; } } return true; } /** * The node IDs are assigned in graph-traversal order (pre-order), hence, each list is * sorted by ID in ascending order and all consecutive lists start with IDs in ascending order. * * @param markJoinedBranchesAsPipelineBreaking True, if the */ protected final boolean mergeLists(List<UnclosedBranchDescriptor> child1open, List<UnclosedBranchDescriptor> child2open, List<UnclosedBranchDescriptor> result, boolean markJoinedBranchesAsPipelineBreaking) { //remove branches which have already been closed removeClosedBranches(child1open); removeClosedBranches(child2open); result.clear(); // check how many open branches we have. the cases: // 1) if both are null or empty, the result is null // 2) if one side is null (or empty), the result is the other side. // 3) both are set, then we need to merge. if (child1open == null || child1open.isEmpty()) { if(child2open != null && !child2open.isEmpty()) { result.addAll(child2open); } return false; } if (child2open == null || child2open.isEmpty()) { result.addAll(child1open); return false; } int index1 = child1open.size() - 1; int index2 = child2open.size() - 1; boolean didCloseABranch = false; // as both lists (child1open and child2open) are sorted in ascending ID order // we can do a merge-join-like loop which preserved the order in the result list // and eliminates duplicates while (index1 >= 0 || index2 >= 0) { int id1 = -1; int id2 = index2 >= 0 ? child2open.get(index2).getBranchingNode().getId() : -1; while (index1 >= 0 && (id1 = child1open.get(index1).getBranchingNode().getId()) > id2) { result.add(child1open.get(index1)); index1--; } while (index2 >= 0 && (id2 = child2open.get(index2).getBranchingNode().getId()) > id1) { result.add(child2open.get(index2)); index2--; } // match: they share a common branching child if (id1 == id2) { didCloseABranch = true; // if this is the latest common child, remember it OptimizerNode currBanchingNode = child1open.get(index1).getBranchingNode(); long vector1 = child1open.get(index1).getJoinedPathsVector(); long vector2 = child2open.get(index2).getJoinedPathsVector(); // check if this is the same descriptor, (meaning that it contains the same paths) // if it is the same, add it only once, otherwise process the join of the paths if (vector1 == vector2) { result.add(child1open.get(index1)); } else { // we merge (re-join) a branch // mark the branch as a point where we break the pipeline if (markJoinedBranchesAsPipelineBreaking) { currBanchingNode.markAllOutgoingConnectionsAsPipelineBreaking(); } if (this.hereJoinedBranches == null) { this.hereJoinedBranches = new ArrayList<OptimizerNode>(2); } this.hereJoinedBranches.add(currBanchingNode); // see, if this node closes the branch long joinedInputs = vector1 | vector2; // this is 2^size - 1, which is all bits set at positions 0..size-1 long allInputs = (0x1L << currBanchingNode.getOutgoingConnections().size()) - 1; if (joinedInputs == allInputs) { // closed - we can remove it from the stack addClosedBranch(currBanchingNode); } else { // not quite closed result.add(new UnclosedBranchDescriptor(currBanchingNode, joinedInputs)); } } index1--; index2--; } } // merged. now we need to reverse the list, because we added the elements in reverse order Collections.reverse(result); return didCloseABranch; } @Override public OptimizerNode getOptimizerNode() { return this; } @Override public PlanNode getPlanNode() { return null; } @Override public Iterable<DumpableConnection<OptimizerNode>> getDumpableInputs() { List<DumpableConnection<OptimizerNode>> allInputs = new ArrayList<DumpableConnection<OptimizerNode>>(); allInputs.addAll(getIncomingConnections()); allInputs.addAll(getBroadcastConnections()); return allInputs; } @Override public String toString() { StringBuilder bld = new StringBuilder(); bld.append(getOperatorName()); bld.append(" (").append(getOperator().getName()).append(") "); int i = 1; for (DagConnection conn : getIncomingConnections()) { String shipStrategyName = conn.getShipStrategy() == null ? "null" : conn.getShipStrategy().name(); bld.append('(').append(i++).append(":").append(shipStrategyName).append(')'); } return bld.toString(); } // -------------------------------------------------------------------------------------------- /** * Description of an unclosed branch. An unclosed branch is when the data flow branched (one operator's * result is consumed by multiple targets), but these different branches (targets) have not been joined * together. */ public static final class UnclosedBranchDescriptor { protected OptimizerNode branchingNode; protected long joinedPathsVector; /** * Creates a new branching descriptor. * * @param branchingNode The node where the branch occurred (the node with multiple outputs). * @param joinedPathsVector A bit vector describing which branches are tracked by this descriptor. * The bit vector is one, where the branch is tracked, zero otherwise. */ protected UnclosedBranchDescriptor(OptimizerNode branchingNode, long joinedPathsVector) { this.branchingNode = branchingNode; this.joinedPathsVector = joinedPathsVector; } public OptimizerNode getBranchingNode() { return this.branchingNode; } public long getJoinedPathsVector() { return this.joinedPathsVector; } @Override public String toString() { return "(" + this.branchingNode.getOperator() + ") [" + this.joinedPathsVector + "]"; } } }
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.UnsupportedEncodingException; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.junit.Test; import com.adaptris.util.GuidGenerator; import com.adaptris.util.PseudoRandomIdGenerator; public abstract class AdaptrisMessageFactoryImplCase { protected static final String TEST_PAYLOAD = "test payload"; protected abstract AdaptrisMessageFactory getMessageFactory(); @Test public void testCreateWithEncoding() { AdaptrisMessageFactory encodedMF = getMessageFactory(); encodedMF.setDefaultCharEncoding("ISO-8859-1"); AdaptrisMessage m1 = encodedMF.newMessage(TEST_PAYLOAD); assertNotNull(m1.getContentEncoding()); assertEquals(m1.getContentEncoding(), "ISO-8859-1"); assertTrue(m1.getContent().equals(TEST_PAYLOAD)); // encodedMF.setDefaultCharEncoding(""); // AdaptrisMessage m2 = encodedMF.newMessage(TEST_PAYLOAD); // assertNull(m2.getCharEncoding()); encodedMF.setDefaultCharEncoding(null); AdaptrisMessage m3 = encodedMF.newMessage(TEST_PAYLOAD); assertNull(m3.getContentEncoding()); } @Test public void testCreateBytesSet() { byte[] pld = TEST_PAYLOAD.getBytes(); Set mtd = new HashSet(); mtd.add(new MetadataElement("key1", "val1")); mtd.add(new MetadataElement("key2", "val2")); AdaptrisMessage msg = getMessageFactory().newMessage(pld, mtd); // fails array equals() is == // assertTrue(msg.getPayload().equals(pld)); // therefore test using Strings... assertTrue(new String(msg.getPayload()).equals(new String(pld))); assertTrue(msg.getMetadata().equals(mtd)); } @Test public void testCreateBytes() { byte[] pld = TEST_PAYLOAD.getBytes(); AdaptrisMessage msg = getMessageFactory().newMessage(pld); // fails array equals() is == // assertTrue(msg.getPayload().equals(pld)); // therefore test using Strings... // assertNotNull(msg.getCharEncoding()); assertTrue(new String(msg.getPayload()).equals(new String(pld))); } @Test public void testCreateStringSet() { String pld = TEST_PAYLOAD; Set mtd = new HashSet(); mtd.add(new MetadataElement("key1", "val1")); mtd.add(new MetadataElement("key2", "val2")); AdaptrisMessage msg = getMessageFactory().newMessage(pld, mtd); assertTrue(msg.getContent().equals(pld)); assertTrue(msg.getMetadata().equals(mtd)); } @Test public void testCreateString() { String pld = TEST_PAYLOAD; AdaptrisMessage msg = getMessageFactory().newMessage(pld); assertTrue(msg.getContent().equals(pld)); } @Test public void testCreateStringStringSet() throws UnsupportedEncodingException { String pld = TEST_PAYLOAD; String enc = "ISO-8859-1"; Set mtd = new HashSet(); mtd.add(new MetadataElement("key1", "val1")); mtd.add(new MetadataElement("key2", "val2")); AdaptrisMessage msg = getMessageFactory().newMessage(pld, enc, mtd); assertTrue(new String(msg.getPayload(), enc).equals(pld)); assertTrue(msg.getMetadata().equals(mtd)); } @Test public void testCreateStringString() throws UnsupportedEncodingException { String pld = TEST_PAYLOAD; String enc = "ISO-8859-1"; AdaptrisMessage msg = getMessageFactory().newMessage(pld, enc); assertTrue(new String(msg.getPayload(), enc).equals(pld)); } @Test public void testCreateMessageFromSource() throws Exception { String pld = TEST_PAYLOAD; AdaptrisMessage orig = getMessageFactory().newMessage(pld); orig.addMetadata(new MetadataElement("key1", "val1")); orig.addMetadata(new MetadataElement("key2", "val2")); orig.addEvent(new MessageEventGenerator() { @Override public String createName() { return "event"; } @Override public String createQualifier() { return "qualifier"; } public Boolean getIsTrackingEndpoint() { return Boolean.FALSE; } @Override public boolean isTrackingEndpoint() { return false; } public void setIsTrackingEndpoint(Boolean b) { } }, true); List keysToKeep = Arrays.asList(new String[] { "key1", "key3" }); AdaptrisMessage dest = getMessageFactory().newMessage(orig, keysToKeep); assertEquals(0, dest.getPayload().length); assertEquals("Metadata Key Values", orig.getMetadataValue("key1"), dest .getMetadataValue("key1")); assertFalse(orig.headersContainsKey("key3")); assertFalse(dest.headersContainsKey("key3")); assertEquals("MessageId", orig.getUniqueId(), dest.getUniqueId()); assertEquals("Mle MessageId", orig.getMessageLifecycleEvent() .getMessageUniqueId(), dest.getMessageLifecycleEvent() .getMessageUniqueId()); assertEquals("MarkerSizes", orig.getMessageLifecycleEvent().getMleMarkers() .size(), dest.getMessageLifecycleEvent().getMleMarkers().size()); } @Test public void testCreateMessageFromSource_NullKeysToKeep() throws Exception { String pld = TEST_PAYLOAD; AdaptrisMessage orig = getMessageFactory().newMessage(pld); orig.addMetadata(new MetadataElement("key1", "val1")); orig.addMetadata(new MetadataElement("key2", "val2")); orig.addEvent(new MessageEventGenerator() { @Override public String createName() { return "event"; } @Override public String createQualifier() { return "qualifier"; } public Boolean getIsTrackingEndpoint() { return Boolean.FALSE; } @Override public boolean isTrackingEndpoint() { return false; } public void setIsTrackingEndpoint(Boolean b) {} }, true); AdaptrisMessage dest = getMessageFactory().newMessage(orig, null); assertEquals(0, dest.getPayload().length); assertEquals("Metadata Key Values", orig.getMetadataValue("key1"), dest.getMetadataValue("key1")); assertTrue(orig.headersContainsKey("key2")); assertTrue(dest.headersContainsKey("key2")); assertEquals("MessageId", orig.getUniqueId(), dest.getUniqueId()); assertEquals("Mle MessageId", orig.getMessageLifecycleEvent().getMessageUniqueId(), dest.getMessageLifecycleEvent().getMessageUniqueId()); assertEquals("MarkerSizes", orig.getMessageLifecycleEvent().getMleMarkers().size(), dest.getMessageLifecycleEvent().getMleMarkers().size()); } @Test public void testCreate() { AdaptrisMessage msg = getMessageFactory().newMessage(); assertEquals(0, msg.getPayload().length); } @Test public void testIdGenerator() { AdaptrisMessageFactory fac = getMessageFactory(); assertNull(fac.getUniqueIdGenerator()); assertEquals(GuidGenerator.class, fac.uniqueIdGenerator().getClass()); fac.setUniqueIdGenerator(new PseudoRandomIdGenerator("testIdGenerator", false)); assertNotNull(fac.getUniqueIdGenerator()); assertEquals(PseudoRandomIdGenerator.class, fac.getUniqueIdGenerator().getClass()); assertEquals(PseudoRandomIdGenerator.class, fac.uniqueIdGenerator().getClass()); AdaptrisMessage msg = fac.newMessage(); assertTrue(msg.getUniqueId().startsWith("testIdGenerator")); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.psi.formatter.java; import com.intellij.JavaTestUtil; import com.intellij.application.options.CodeStyle; import com.intellij.codeInsight.actions.ReformatCodeProcessor; import com.intellij.formatting.FormatterTestUtils.Action; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.impl.DocumentImpl; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.codeStyle.DetectableIndentOptionsProvider; import com.intellij.psi.codeStyle.JavaCodeStyleSettings; import com.intellij.psi.util.PsiUtil; import com.intellij.testFramework.LightIdeaTestCase; import com.intellij.util.IncorrectOperationException; import com.intellij.util.text.LineReader; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; import static com.intellij.formatting.FormatterTestUtils.ACTIONS; import static com.intellij.formatting.FormatterTestUtils.Action.REFORMAT; /** * Base class for java formatter tests that holds utility methods. * * @author Denis Zhdanov */ public abstract class AbstractJavaFormatterTest extends LightIdeaTestCase { @NotNull public static String shiftIndentInside(@NotNull String initial, final int i, boolean shiftEmptyLines) { StringBuilder result = new StringBuilder(initial.length()); List<byte[]> lines; try { LineReader reader = new LineReader(new ByteArrayInputStream(initial.getBytes(StandardCharsets.UTF_8))); lines = reader.readLines(); } catch (IOException e) { throw new RuntimeException(e); } boolean first = true; for (byte[] line : lines) { try { if (!first) result.append('\n'); if (line.length > 0 || shiftEmptyLines) { StringUtil.repeatSymbol(result, ' ', i); } result.append(new String(line, StandardCharsets.UTF_8)); } finally { first = false; } } return result.toString(); } public JavaCodeStyleSettings getJavaSettings() { return getSettings().getRootSettings().getCustomSettings(JavaCodeStyleSettings.class); } private static final String BASE_PATH = JavaTestUtil.getJavaTestDataPath() + "/psi/formatter/java"; public TextRange myTextRange; public TextRange myLineRange; @Override protected void setUp() throws Exception { super.setUp(); LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.HIGHEST); } public CommonCodeStyleSettings getSettings() { CodeStyleSettings rootSettings = CodeStyle.getSettings(getProject()); return rootSettings.getCommonSettings(JavaLanguage.INSTANCE); } public CommonCodeStyleSettings.IndentOptions getIndentOptions() { return getSettings().getRootSettings().getIndentOptions(JavaFileType.INSTANCE); } public void doTest() { doTest(getTestName(false) + ".java", getTestName(false) + "_after.java"); } public void doTest(@NotNull String fileNameBefore, @NotNull String fileNameAfter) { doTextTest(REFORMAT, loadFile(fileNameBefore), loadFile(fileNameAfter)); } public void doTestWithDetectableIndentOptions(@NotNull String text, @NotNull String textAfter) { DetectableIndentOptionsProvider provider = DetectableIndentOptionsProvider.getInstance(); assertNotNull("DetectableIndentOptionsProvider not found", provider); provider.setEnabledInTest(true); try { doTextTest(text, textAfter); } finally { provider.setEnabledInTest(false); } } public void doTextTest(@NotNull String text, @NotNull String textAfter) throws IncorrectOperationException { doTextTest(REFORMAT, text, textAfter); } public void doTextTest(@NotNull Action action, @NotNull String text, @NotNull String textAfter) throws IncorrectOperationException { final PsiFile file = createFile("A.java", text); file.putUserData(PsiUtil.FILE_LANGUAGE_LEVEL_KEY, LanguageLevel.JDK_15_PREVIEW); final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject()); final Document document = manager.getDocument(file); if (document == null) { fail("Document is null"); return; } replaceAndProcessDocument(action, text, file, document); assertEquals(textAfter, document.getText()); manager.commitDocument(document); assertEquals(textAfter, file.getText()); } public void formatEveryoneAndCheckIfResultEqual(final String @NotNull ... before) { assert before.length > 1; final PsiFile file = createFile("A.java", ""); final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject()); final Document document = manager.getDocument(file); String afterFirst = replaceAndProcessDocument(REFORMAT, before[0], file, document); for (String nextBefore: before) { assertEquals(afterFirst, replaceAndProcessDocument(REFORMAT, nextBefore, file, document)); } } @NotNull private String replaceAndProcessDocument(@NotNull final Action action, @NotNull final String text, @NotNull final PsiFile file, @Nullable final Document document) throws IncorrectOperationException { if (document == null) { fail("Don't expect the document to be null"); return null; } if (myLineRange != null) { final DocumentImpl doc = new DocumentImpl(text); myTextRange = new TextRange(doc.getLineStartOffset(myLineRange.getStartOffset()), doc.getLineEndOffset(myLineRange.getEndOffset())); } final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject()); CommandProcessor.getInstance().executeCommand(getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> { document.replaceString(0, document.getTextLength(), text); manager.commitDocument(document); try { TextRange rangeToUse = myTextRange; if (rangeToUse == null) { rangeToUse = file.getTextRange(); } ACTIONS.get(action).run(file, rangeToUse.getStartOffset(), rangeToUse.getEndOffset()); } catch (IncorrectOperationException e) { assertTrue(e.getLocalizedMessage(), false); } }), action == REFORMAT ? ReformatCodeProcessor.getCommandName() : "", ""); return document.getText(); } public void doMethodTest(@NotNull String before, @NotNull String after) { doTextTest( REFORMAT, "class Foo{\n" + " void foo() {\n" + before + '\n' + " }\n" + "}", "class Foo {\n" + " void foo() {\n" + shiftIndentInside(after, 8, false) + '\n' + " }\n" + "}" ); } public void doClassTest(@NotNull String before, @NotNull String after) { doTextTest( REFORMAT, "class Foo{\n" + before + '\n' + "}", "class Foo {\n" + shiftIndentInside(after, 4, false) + '\n' + "}" ); } protected static String loadFile(String name) { String fullName = BASE_PATH + File.separatorChar + name; try { String text = FileUtil.loadFile(new File(fullName)); return StringUtil.convertLineSeparators(text); } catch (IOException e) { throw new RuntimeException(e); } } }
/* * This file is part of dependency-check-maven. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright (c) 2014 Jeremy Long. All Rights Reserved. */ package org.owasp.dependencycheck.maven; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.util.List; import java.util.Locale; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException; import org.apache.maven.artifact.metadata.ArtifactMetadataSource; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.versioning.ArtifactVersion; import org.apache.maven.doxia.sink.Sink; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.reporting.MavenReport; import org.apache.maven.reporting.MavenReportException; import org.apache.maven.settings.Proxy; import org.owasp.dependencycheck.data.nexus.MavenArtifact; import org.owasp.dependencycheck.data.nvdcve.CveDB; import org.owasp.dependencycheck.data.nvdcve.DatabaseException; import org.owasp.dependencycheck.data.nvdcve.DatabaseProperties; import org.owasp.dependencycheck.dependency.Confidence; import org.owasp.dependencycheck.dependency.Dependency; import org.owasp.dependencycheck.dependency.Identifier; import org.owasp.dependencycheck.dependency.Vulnerability; import org.owasp.dependencycheck.reporting.ReportGenerator; import org.owasp.dependencycheck.utils.DependencyVersion; import org.owasp.dependencycheck.utils.Settings; /** * * @author Jeremy Long */ public abstract class BaseDependencyCheckMojo extends AbstractMojo implements MavenReport { //<editor-fold defaultstate="collapsed" desc="Private fields"> /** * The properties file location. */ private static final String PROPERTIES_FILE = "mojo.properties"; /** * System specific new line character. */ private static final String NEW_LINE = System.getProperty("line.separator", "\n").intern(); /** * Sets whether or not the external report format should be used. */ @Parameter(property = "metaFileName", defaultValue = "dependency-check.ser", required = true) private String dataFileName; //</editor-fold> // <editor-fold defaultstate="collapsed" desc="Maven bound parameters and components"> /** * The Maven Project Object. */ @Component private MavenProject project; /** * The meta data source for retrieving artifact version information. */ @Component private ArtifactMetadataSource metadataSource; /** * A reference to the local repository. */ @Parameter(property = "localRepository", readonly = true) private ArtifactRepository localRepository; /** * References to the remote repositories. */ @Parameter(property = "project.remoteArtifactRepositories", readonly = true) private List<ArtifactRepository> remoteRepositories; /** * List of Maven project of the current build */ @Parameter(readonly = true, required = true, property = "reactorProjects") private List<MavenProject> reactorProjects; /** * The output directory. This generally maps to "target". */ @Parameter(defaultValue = "${project.build.directory}", required = true) private File outputDirectory; /** * Specifies the destination directory for the generated Dependency-Check report. This generally maps to "target/site". */ @Parameter(property = "project.reporting.outputDirectory", required = true) private File reportOutputDirectory; /** * Specifies if the build should be failed if a CVSS score above a specified level is identified. The default is 11 which * means since the CVSS scores are 0-10, by default the build will never fail. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "failBuildOnCVSS", defaultValue = "11", required = true) private float failBuildOnCVSS = 11; /** * Sets whether auto-updating of the NVD CVE/CPE data is enabled. It is not recommended that this be turned to false. Default * is true. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "autoupdate", defaultValue = "true", required = true) private boolean autoUpdate = true; /** * Generate aggregate reports in multi-module projects. * * @deprecated use the aggregate goal instead */ @Parameter(property = "aggregate", defaultValue = "false") @Deprecated private boolean aggregate; /** * The report format to be generated (HTML, XML, VULN, ALL). This configuration option has no affect if using this within the * Site plug-in unless the externalReport is set to true. Default is HTML. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "format", defaultValue = "HTML", required = true) private String format = "HTML"; /** * The Maven settings. */ @Parameter(property = "mavenSettings", defaultValue = "${settings}", required = false) private org.apache.maven.settings.Settings mavenSettings; /** * The maven settings proxy id. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "mavenSettingsProxyId", required = false) private String mavenSettingsProxyId; /** * The Connection Timeout. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "connectionTimeout", defaultValue = "", required = false) private String connectionTimeout = null; /** * The path to the suppression file. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "suppressionFile", defaultValue = "", required = false) private String suppressionFile = null; /** * Flag indicating whether or not to show a summary in the output. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "showSummary", defaultValue = "true", required = false) private boolean showSummary = true; /** * Whether or not the Jar Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "jarAnalyzerEnabled", defaultValue = "true", required = false) private boolean jarAnalyzerEnabled = true; /** * Whether or not the Archive Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "archiveAnalyzerEnabled", defaultValue = "true", required = false) private boolean archiveAnalyzerEnabled = true; /** * Whether or not the .NET Assembly Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "assemblyAnalyzerEnabled", defaultValue = "true", required = false) private boolean assemblyAnalyzerEnabled = true; /** * Whether or not the .NET Nuspec Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "nuspecAnalyzerEnabled", defaultValue = "true", required = false) private boolean nuspecAnalyzerEnabled = true; /** * Whether or not the Central Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "centralAnalyzerEnabled", defaultValue = "true", required = false) private boolean centralAnalyzerEnabled = true; /** * Whether or not the Nexus Analyzer is enabled. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "nexusAnalyzerEnabled", defaultValue = "true", required = false) private boolean nexusAnalyzerEnabled = true; /** * The URL of a Nexus server's REST API end point (http://domain/nexus/service/local). */ @Parameter(property = "nexusUrl", defaultValue = "", required = false) private String nexusUrl; /** * Whether or not the configured proxy is used to connect to Nexus. */ @Parameter(property = "nexusUsesProxy", defaultValue = "true", required = false) private boolean nexusUsesProxy = true; /** * The database connection string. */ @Parameter(property = "connectionString", defaultValue = "", required = false) private String connectionString; /** * The database driver name. An example would be org.h2.Driver. */ @Parameter(property = "databaseDriverName", defaultValue = "", required = false) private String databaseDriverName; /** * The path to the database driver if it is not on the class path. */ @Parameter(property = "databaseDriverPath", defaultValue = "", required = false) private String databaseDriverPath; /** * The database user name. */ @Parameter(property = "databaseUser", defaultValue = "", required = false) private String databaseUser; /** * The password to use when connecting to the database. */ @Parameter(property = "databasePassword", defaultValue = "", required = false) private String databasePassword; /** * A comma-separated list of file extensions to add to analysis next to jar, zip, .... */ @Parameter(property = "zipExtensions", required = false) private String zipExtensions; /** * Skip Analysis for Test Scope Dependencies. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "skipTestScope", defaultValue = "true", required = false) private boolean skipTestScope = true; /** * Skip Analysis for Runtime Scope Dependencies. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "skipRuntimeScope", defaultValue = "false", required = false) private boolean skipRuntimeScope = false; /** * Skip Analysis for Provided Scope Dependencies. */ @SuppressWarnings("CanBeFinal") @Parameter(property = "skipProvidedScope", defaultValue = "false", required = false) private boolean skipProvidedScope = false; /** * The data directory, hold DC SQL DB. */ @Parameter(property = "dataDirectory", defaultValue = "", required = false) private String dataDirectory; /** * Data Mirror URL for CVE 1.2. */ @Parameter(property = "cveUrl12Modified", defaultValue = "", required = false) private String cveUrl12Modified; /** * Data Mirror URL for CVE 2.0. */ @Parameter(property = "cveUrl20Modified", defaultValue = "", required = false) private String cveUrl20Modified; /** * Base Data Mirror URL for CVE 1.2. */ @Parameter(property = "cveUrl12Base", defaultValue = "", required = false) private String cveUrl12Base; /** * Data Mirror URL for CVE 2.0. */ @Parameter(property = "cveUrl20Base", defaultValue = "", required = false) private String cveUrl20Base; /** * The path to mono for .NET Assembly analysis on non-windows systems. */ @Parameter(property = "pathToMono", defaultValue = "", required = false) private String pathToMono; /** * The Proxy URL. * * @deprecated Please use mavenSettings instead */ @SuppressWarnings("CanBeFinal") @Parameter(property = "proxyUrl", defaultValue = "", required = false) @Deprecated private String proxyUrl = null; /** * Sets whether or not the external report format should be used. * * @deprecated the internal report is no longer supported */ @SuppressWarnings("CanBeFinal") @Parameter(property = "externalReport") @Deprecated private String externalReport = null; // </editor-fold> //<editor-fold defaultstate="collapsed" desc="Base Maven implementation"> /** * Executes dependency-check. * * @throws MojoExecutionException thrown if there is an exception executing the mojo * @throws MojoFailureException thrown if dependency-check failed the build */ @Override public void execute() throws MojoExecutionException, MojoFailureException { validateAggregate(); project.setContextValue(getOutputDirectoryContextKey(), this.outputDirectory); runCheck(); } /** * Checks if the aggregate configuration parameter has been set to true. If it has a MojoExecutionException is thrown because * the aggregate configuration parameter is no longer supported. * * @throws MojoExecutionException thrown if aggregate is set to true */ private void validateAggregate() throws MojoExecutionException { if (aggregate) { final String msg = "Aggregate configuration detected - as of dependency-check 1.2.8 this no longer supported. " + "Please use the aggregate goal instead."; throw new MojoExecutionException(msg); } } /** * Generates the Dependency-Check Site Report. * * @param sink the sink to write the report to * @param locale the locale to use when generating the report * @throws MavenReportException if a maven report exception occurs * @deprecated use {@link #generate(org.apache.maven.doxia.sink.Sink, java.util.Locale)} instead. */ @Deprecated public final void generate(@SuppressWarnings("deprecation") org.codehaus.doxia.sink.Sink sink, Locale locale) throws MavenReportException { generate((Sink) sink, locale); } /** * Generates the Dependency-Check Site Report. * * @param sink the sink to write the report to * @param locale the locale to use when generating the report * @throws MavenReportException if a maven report exception occurs */ public void generate(Sink sink, Locale locale) throws MavenReportException { try { validateAggregate(); } catch (MojoExecutionException ex) { throw new MavenReportException(ex.getMessage()); } project.setContextValue(getOutputDirectoryContextKey(), getReportOutputDirectory()); try { runCheck(); } catch (MojoExecutionException ex) { throw new MavenReportException(ex.getMessage(), ex); } catch (MojoFailureException ex) { getLog().warn("Vulnerabilities were identifies that exceed the CVSS threshold for failing the build"); } } /** * Returns the correct output directory depending on if a site is being executed or not. * * @return the directory to write the report(s) * @throws MojoExecutionException thrown if there is an error loading the file path */ protected File getCorrectOutputDirectory() throws MojoExecutionException { return getCorrectOutputDirectory(this.project); } /** * Returns the correct output directory depending on if a site is being executed or not. * * @param current the Maven project to get the output directory from * @return the directory to write the report(s) */ protected File getCorrectOutputDirectory(MavenProject current) { final Object obj = current.getContextValue(getOutputDirectoryContextKey()); if (obj != null && obj instanceof File) { return (File) obj; } File target = new File(current.getBuild().getDirectory()); if (target.getParentFile() != null && "target".equals(target.getParentFile().getName())) { target = target.getParentFile(); } return target; } /** * Returns the correct output directory depending on if a site is being executed or not. * * @param current the Maven project to get the output directory from * @return the directory to write the report(s) */ protected File getDataFile(MavenProject current) { if (getLog().isDebugEnabled()) { getLog().debug(String.format("Getting data filefor %s using key '%s'", current.getName(), getDataFileContextKey())); } final Object obj = current.getContextValue(getDataFileContextKey()); if (obj != null) { if (obj instanceof File) { return (File) obj; } } else { if (getLog().isDebugEnabled()) { getLog().debug("Context value not found"); } } return null; } /** * Scans the project's artifacts and adds them to the engine's dependency list. * * @param project the project to scan the dependencies of * @param engine the engine to use to scan the dependencies */ protected void scanArtifacts(MavenProject project, Engine engine) { for (Artifact a : project.getArtifacts()) { if (excludeFromScan(a)) { continue; } final List<Dependency> deps = engine.scan(a.getFile().getAbsoluteFile()); if (deps != null) { if (deps.size() == 1) { final Dependency d = deps.get(0); if (d != null) { final MavenArtifact ma = new MavenArtifact(a.getGroupId(), a.getArtifactId(), a.getVersion()); d.addAsEvidence("pom", ma, Confidence.HIGHEST); d.addProjectReference(project.getName()); if (getLog().isDebugEnabled()) { getLog().debug(String.format("Adding project reference %s on dependency %s", project.getName(), d.getDisplayFileName())); } if (metadataSource != null) { try { final DependencyVersion currentVersion = new DependencyVersion(a.getVersion()); final List<ArtifactVersion> versions = metadataSource.retrieveAvailableVersions(a, localRepository, remoteRepositories); for (ArtifactVersion av : versions) { final DependencyVersion newVersion = new DependencyVersion(av.toString()); if (currentVersion.compareTo(newVersion) < 0) { d.addAvailableVersion(av.toString()); } } } catch (ArtifactMetadataRetrievalException ex) { getLog().warn( "Unable to check for new versions of dependencies; see the log for more details."); if (getLog().isDebugEnabled()) { getLog().debug("", ex); } } catch (Throwable t) { getLog().warn( "Unexpected error occured checking for new versions; see the log for more details."); if (getLog().isDebugEnabled()) { getLog().debug("", t); } } } } } else { if (getLog().isDebugEnabled()) { final String msg = String.format("More then 1 dependency was identified in first pass scan of '%s:%s:%s'", a.getGroupId(), a.getArtifactId(), a.getVersion()); getLog().debug(msg); } } } } } /** * Executes the dependency-check scan and generates the necassary report. * * @throws MojoExecutionException thrown if there is an exception running the scan * @throws MojoFailureException thrown if dependency-check is configured to fail the build */ public abstract void runCheck() throws MojoExecutionException, MojoFailureException; /** * Sets the Reporting output directory. * * @param directory the output directory */ @Override public void setReportOutputDirectory(File directory) { reportOutputDirectory = directory; } /** * Returns the report output directory. * * @return the report output directory */ @Override public File getReportOutputDirectory() { return reportOutputDirectory; } /** * Returns the output directory. * * @return the output directory */ public File getOutputDirectory() { return outputDirectory; } /** * Returns whether this is an external report. This method always returns true. * * @return <code>true</code> */ @Override public final boolean isExternalReport() { return true; } /** * Returns the output name. * * @return the output name */ public String getOutputName() { if ("HTML".equalsIgnoreCase(this.format) || "ALL".equalsIgnoreCase(this.format)) { return "dependency-check-report"; } else if ("XML".equalsIgnoreCase(this.format)) { return "dependency-check-report.xml#"; } else if ("VULN".equalsIgnoreCase(this.format)) { return "dependency-check-vulnerability"; } else { getLog().warn("Unknown report format used during site generation."); return "dependency-check-report"; } } /** * Returns the category name. * * @return the category name */ public String getCategoryName() { return MavenReport.CATEGORY_PROJECT_REPORTS; } //</editor-fold> /** * Initializes a new <code>Engine</code> that can be used for scanning. * * @return a newly instantiated <code>Engine</code> * @throws DatabaseException thrown if there is a database exception */ protected Engine initializeEngine() throws DatabaseException { populateSettings(); return new Engine(this.project, this.reactorProjects); } /** * Takes the properties supplied and updates the dependency-check settings. Additionally, this sets the system properties * required to change the proxy url, port, and connection timeout. */ private void populateSettings() { Settings.initialize(); InputStream mojoProperties = null; try { mojoProperties = this.getClass().getClassLoader().getResourceAsStream(PROPERTIES_FILE); Settings.mergeProperties(mojoProperties); } catch (IOException ex) { getLog().warn("Unable to load the dependency-check ant task.properties file."); if (getLog().isDebugEnabled()) { getLog().debug("", ex); } } finally { if (mojoProperties != null) { try { mojoProperties.close(); } catch (IOException ex) { if (getLog().isDebugEnabled()) { getLog().debug("", ex); } } } } Settings.setBoolean(Settings.KEYS.AUTO_UPDATE, autoUpdate); if (externalReport != null) { getLog().warn("The 'externalReport' option was set; this configuration option has been removed. " + "Please update the dependency-check-maven plugin's configuration"); } if (proxyUrl != null && !proxyUrl.isEmpty()) { getLog().warn("Deprecated configuration detected, proxyUrl will be ignored; use the maven settings " + "to configure the proxy instead"); } final Proxy proxy = getMavenProxy(); if (proxy != null) { Settings.setString(Settings.KEYS.PROXY_SERVER, proxy.getHost()); Settings.setString(Settings.KEYS.PROXY_PORT, Integer.toString(proxy.getPort())); final String userName = proxy.getUsername(); final String password = proxy.getPassword(); if (userName != null) { Settings.setString(Settings.KEYS.PROXY_USERNAME, userName); } if (password != null) { Settings.setString(Settings.KEYS.PROXY_PASSWORD, password); } } if (connectionTimeout != null && !connectionTimeout.isEmpty()) { Settings.setString(Settings.KEYS.CONNECTION_TIMEOUT, connectionTimeout); } if (suppressionFile != null && !suppressionFile.isEmpty()) { Settings.setString(Settings.KEYS.SUPPRESSION_FILE, suppressionFile); } //File Type Analyzer Settings //JAR ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, jarAnalyzerEnabled); //NUSPEC ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, nuspecAnalyzerEnabled); //NEXUS ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, centralAnalyzerEnabled); //NEXUS ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_ENABLED, nexusAnalyzerEnabled); if (nexusUrl != null && !nexusUrl.isEmpty()) { Settings.setString(Settings.KEYS.ANALYZER_NEXUS_URL, nexusUrl); } Settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_PROXY, nexusUsesProxy); //ARCHIVE ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, archiveAnalyzerEnabled); if (zipExtensions != null && !zipExtensions.isEmpty()) { Settings.setString(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, zipExtensions); } //ASSEMBLY ANALYZER Settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, assemblyAnalyzerEnabled); if (pathToMono != null && !pathToMono.isEmpty()) { Settings.setString(Settings.KEYS.ANALYZER_ASSEMBLY_MONO_PATH, pathToMono); } //Database configuration if (databaseDriverName != null && !databaseDriverName.isEmpty()) { Settings.setString(Settings.KEYS.DB_DRIVER_NAME, databaseDriverName); } if (databaseDriverPath != null && !databaseDriverPath.isEmpty()) { Settings.setString(Settings.KEYS.DB_DRIVER_PATH, databaseDriverPath); } if (connectionString != null && !connectionString.isEmpty()) { Settings.setString(Settings.KEYS.DB_CONNECTION_STRING, connectionString); } if (databaseUser != null && !databaseUser.isEmpty()) { Settings.setString(Settings.KEYS.DB_USER, databaseUser); } if (databasePassword != null && !databasePassword.isEmpty()) { Settings.setString(Settings.KEYS.DB_PASSWORD, databasePassword); } // Data Directory if (dataDirectory != null && !dataDirectory.isEmpty()) { Settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); } // Scope Exclusion Settings.setBoolean(Settings.KEYS.SKIP_TEST_SCOPE, skipTestScope); Settings.setBoolean(Settings.KEYS.SKIP_RUNTIME_SCOPE, skipRuntimeScope); Settings.setBoolean(Settings.KEYS.SKIP_PROVIDED_SCOPE, skipProvidedScope); // CVE Data Mirroring if (cveUrl12Modified != null && !cveUrl12Modified.isEmpty()) { Settings.setString(Settings.KEYS.CVE_MODIFIED_12_URL, cveUrl12Modified); } if (cveUrl20Modified != null && !cveUrl20Modified.isEmpty()) { Settings.setString(Settings.KEYS.CVE_MODIFIED_20_URL, cveUrl20Modified); } if (cveUrl12Base != null && !cveUrl12Base.isEmpty()) { Settings.setString(Settings.KEYS.CVE_SCHEMA_1_2, cveUrl12Base); } if (cveUrl20Base != null && !cveUrl20Base.isEmpty()) { Settings.setString(Settings.KEYS.CVE_SCHEMA_2_0, cveUrl20Base); } } /** * Returns the maven proxy. * * @return the maven proxy */ private Proxy getMavenProxy() { if (mavenSettings != null) { final List<Proxy> proxies = mavenSettings.getProxies(); if (proxies != null && !proxies.isEmpty()) { if (mavenSettingsProxyId != null) { for (Proxy proxy : proxies) { if (mavenSettingsProxyId.equalsIgnoreCase(proxy.getId())) { return proxy; } } } else if (proxies.size() == 1) { return proxies.get(0); } else { getLog().warn("Multiple proxy definitions exist in the Maven settings. In the dependency-check " + "configuration set the mavenSettingsProxyId so that the correct proxy will be used."); throw new IllegalStateException("Ambiguous proxy definition"); } } } return null; } /** * Tests is the artifact should be included in the scan (i.e. is the dependency in a scope that is being scanned). * * @param a the Artifact to test * @return <code>true</code> if the artifact is in an excluded scope; otherwise <code>false</code> */ protected boolean excludeFromScan(Artifact a) { if (skipTestScope && Artifact.SCOPE_TEST.equals(a.getScope())) { return true; } if (skipProvidedScope && Artifact.SCOPE_PROVIDED.equals(a.getScope())) { return true; } if (skipRuntimeScope && !Artifact.SCOPE_RUNTIME.equals(a.getScope())) { return true; } return false; } /** * Returns a reference to the current project. This method is used instead of auto-binding the project via component * annotation in concrete implementations of this. If the child has a <code>@Component MavenProject project;</code> defined * then the abstract class (i.e. this class) will not have access to the current project (just the way Maven works with the * binding). * * @return returns a reference to the current project */ protected MavenProject getProject() { return project; } /** * Returns the list of Maven Projects in this build. * * @return the list of Maven Projects in this build */ protected List<MavenProject> getReactorProjects() { return reactorProjects; } /** * Returns the report format. * * @return the report format */ protected String getFormat() { return format; } /** * Generates the reports for a given dependency-check engine. * * @param engine a dependency-check engine * @param p the maven project * @param outputDir the directory path to write the report(s). */ protected void writeReports(Engine engine, MavenProject p, File outputDir) { DatabaseProperties prop = null; CveDB cve = null; try { cve = new CveDB(); cve.open(); prop = cve.getDatabaseProperties(); } catch (DatabaseException ex) { if (getLog().isDebugEnabled()) { getLog().debug("Unable to retrieve DB Properties", ex); } } finally { if (cve != null) { cve.close(); } } final ReportGenerator r = new ReportGenerator(p.getName(), engine.getDependencies(), engine.getAnalyzers(), prop); try { r.generateReports(outputDir.getAbsolutePath(), format); } catch (IOException ex) { getLog().error( "Unexpected exception occurred during analysis; please see the verbose error log for more details."); if (getLog().isDebugEnabled()) { getLog().debug("", ex); } } catch (Throwable ex) { getLog().error( "Unexpected exception occurred during analysis; please see the verbose error log for more details."); if (getLog().isDebugEnabled()) { getLog().debug("", ex); } } } //<editor-fold defaultstate="collapsed" desc="Methods to fail build or show summary"> /** * Checks to see if a vulnerability has been identified with a CVSS score that is above the threshold set in the * configuration. * * @param dependencies the list of dependency objects * @throws MojoFailureException thrown if a CVSS score is found that is higher then the threshold set */ protected void checkForFailure(List<Dependency> dependencies) throws MojoFailureException { if (failBuildOnCVSS <= 10) { final StringBuilder ids = new StringBuilder(); for (Dependency d : dependencies) { boolean addName = true; for (Vulnerability v : d.getVulnerabilities()) { if (v.getCvssScore() >= failBuildOnCVSS) { if (addName) { addName = false; ids.append(NEW_LINE).append(d.getFileName()).append(": "); ids.append(v.getName()); } else { ids.append(", ").append(v.getName()); } } } } if (ids.length() > 0) { final String msg = String.format("%n%nDependency-Check Failure:%n" + "One or more dependencies were identified with vulnerabilities that have a CVSS score greater then '%.1f': %s%n" + "See the dependency-check report for more details.%n%n", failBuildOnCVSS, ids.toString()); throw new MojoFailureException(msg); } } } /** * Generates a warning message listing a summary of dependencies and their associated CPE and CVE entries. * * @param mp the Maven project for which the summary is shown * @param dependencies a list of dependency objects */ protected void showSummary(MavenProject mp, List<Dependency> dependencies) { if (showSummary) { final StringBuilder summary = new StringBuilder(); for (Dependency d : dependencies) { boolean firstEntry = true; final StringBuilder ids = new StringBuilder(); for (Vulnerability v : d.getVulnerabilities()) { if (firstEntry) { firstEntry = false; } else { ids.append(", "); } ids.append(v.getName()); } if (ids.length() > 0) { summary.append(d.getFileName()).append(" ("); firstEntry = true; for (Identifier id : d.getIdentifiers()) { if (firstEntry) { firstEntry = false; } else { summary.append(", "); } summary.append(id.getValue()); } summary.append(") : ").append(ids).append(NEW_LINE); } } if (summary.length() > 0) { final String msg = String.format("%n%n" + "One or more dependencies were identified with known vulnerabilities in %s:%n%n%s" + "%n%nSee the dependency-check report for more details.%n%n", mp.getName(), summary.toString()); getLog().warn(msg); } } } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Methods to read/write the serialized data file"> /** * Returns the key used to store the path to the data file that is saved by <code>writeDataFile()</code>. This key is used in * the <code>MavenProject.(set|get)ContextValue</code>. * * @return the key used to store the path to the data file */ protected String getDataFileContextKey() { return "dependency-check-path-" + dataFileName; } /** * Returns the key used to store the path to the output directory. When generating the report in the * <code>executeAggregateReport()</code> the output directory should be obtained by using this key. * * @return the key used to store the path to the output directory */ protected String getOutputDirectoryContextKey() { return "dependency-output-dir-" + dataFileName; } /** * Writes the scan data to disk. This is used to serialize the scan data between the "check" and "aggregate" phase. * * @param mp the mMven project for which the data file was created * @param writeTo the directory to write the data file * @param dependencies the list of dependencies to serialize */ protected void writeDataFile(MavenProject mp, File writeTo, List<Dependency> dependencies) { File file; //check to see if this was already written out if (mp.getContextValue(this.getDataFileContextKey()) == null) { if (writeTo == null) { file = new File(mp.getBuild().getDirectory()); file = new File(file, dataFileName); } else { file = new File(writeTo, dataFileName); } final File parent = file.getParentFile(); if (!parent.isDirectory()) { if (parent.mkdirs()) { getLog().error(String.format("Directory '%s' does not exist and cannot be created; unable to write data file.", parent.getAbsolutePath())); } } OutputStream os = null; OutputStream bos = null; ObjectOutputStream out = null; try { if (dependencies != null) { os = new FileOutputStream(file); bos = new BufferedOutputStream(os); out = new ObjectOutputStream(bos); out.writeObject(dependencies); out.flush(); //call reset to prevent resource leaks per //https://www.securecoding.cert.org/confluence/display/java/SER10-J.+Avoid+memory+and+resource+leaks+during+serialization out.reset(); } if (getLog().isDebugEnabled()) { getLog().debug(String.format("Serialized data file written to '%s' for %s, referenced by key %s", file.getAbsolutePath(), mp.getName(), this.getDataFileContextKey())); } mp.setContextValue(this.getDataFileContextKey(), file.getAbsolutePath()); } catch (IOException ex) { getLog().warn("Unable to create data file used for report aggregation; " + "if report aggregation is being used the results may be incomplete."); if (getLog().isDebugEnabled()) { getLog().debug(ex.getMessage(), ex); } } finally { if (out != null) { try { out.close(); } catch (IOException ex) { if (getLog().isDebugEnabled()) { getLog().debug("ignore", ex); } } } if (bos != null) { try { bos.close(); } catch (IOException ex) { if (getLog().isDebugEnabled()) { getLog().debug("ignore", ex); } } } if (os != null) { try { os.close(); } catch (IOException ex) { if (getLog().isDebugEnabled()) { getLog().debug("ignore", ex); } } } } } } /** * Reads the serialized scan data from disk. This is used to serialize the scan data between the "check" and "aggregate" * phase. * * @param project the Maven project to read the data file from * @return a <code>Engine</code> object populated with dependencies if the serialized data file exists; otherwise * <code>null</code> is returned */ protected List<Dependency> readDataFile(MavenProject project) { final Object oPath = project.getContextValue(this.getDataFileContextKey()); if (oPath == null) { return null; } List<Dependency> ret = null; final String path = (String) oPath; ObjectInputStream ois = null; try { ois = new ObjectInputStream(new FileInputStream(path)); ret = (List<Dependency>) ois.readObject(); } catch (FileNotFoundException ex) { //TODO fix logging getLog().error("", ex); } catch (IOException ex) { getLog().error("", ex); } catch (ClassNotFoundException ex) { getLog().error("", ex); } finally { if (ois != null) { try { ois.close(); } catch (IOException ex) { getLog().error("", ex); } } } return ret; } //</editor-fold> }
/* * Copyright 2016 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.storage.contrib.nio; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.cloud.storage.BlobId; import com.google.common.collect.UnmodifiableIterator; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.LinkOption; import java.nio.file.Path; import java.nio.file.WatchEvent.Kind; import java.nio.file.WatchEvent.Modifier; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.util.Collections; import java.util.Iterator; import java.util.Objects; import java.util.regex.Pattern; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; /** * Google Cloud Storage {@link Path}. * * @see UnixPath */ @Immutable public final class CloudStoragePath implements Path { private static final Pattern EXTRA_SLASHES_OR_DOT_DIRS_PATTERN = Pattern.compile("^\\.\\.?/|//|/\\.\\.?/|/\\.\\.?$"); private final CloudStorageFileSystem fileSystem; private final UnixPath path; private CloudStoragePath(CloudStorageFileSystem fileSystem, UnixPath path) { this.fileSystem = fileSystem; this.path = path; } static CloudStoragePath getPath(CloudStorageFileSystem fileSystem, String path, String... more) { return new CloudStoragePath( fileSystem, UnixPath.getPath(fileSystem.config().permitEmptyPathComponents(), path, more)); } /** * Returns the Cloud Storage bucket name being served by this file system. */ public String bucket() { return fileSystem.bucket(); } /** * Returns path converted to a {@link BlobId} so I/O can be performed. */ BlobId getBlobId() { checkArgument(!path.toString().isEmpty(), "Object names cannot be empty."); return BlobId.of(bucket(), toRealPath().path.toString()); } boolean seemsLikeADirectory() { return path.seemsLikeADirectory(); } boolean seemsLikeADirectoryAndUsePseudoDirectories() { return path.seemsLikeADirectory() && fileSystem.config().usePseudoDirectories(); } @Override public CloudStorageFileSystem getFileSystem() { return fileSystem; } @Nullable @Override public CloudStoragePath getRoot() { return newPath(path.getRoot()); } @Override public boolean isAbsolute() { return path.isAbsolute(); } /** * Changes relative path to be absolute, using * {@link CloudStorageConfiguration#workingDirectory() workingDirectory} as current dir. */ @Override public CloudStoragePath toAbsolutePath() { return newPath(path.toAbsolutePath(getWorkingDirectory())); } /** * Returns this path rewritten to the Cloud Storage object name that'd be used to perform i/o. * * <p>This method makes path {@link #toAbsolutePath() absolute} and removes the prefix slash from * the absolute path when {@link CloudStorageConfiguration#stripPrefixSlash() stripPrefixSlash} * is {@code true}. * * @throws IllegalArgumentException if path contains extra slashes or dot-dirs when * {@link CloudStorageConfiguration#permitEmptyPathComponents() permitEmptyPathComponents} * is {@code false}, or if the resulting path is empty. */ @Override public CloudStoragePath toRealPath(LinkOption... options) { CloudStorageUtil.checkNotNullArray(options); return newPath(toRealPathInternal(true)); } private UnixPath toRealPathInternal(boolean errorCheck) { UnixPath objectName = path.toAbsolutePath(getWorkingDirectory()); if (errorCheck && !fileSystem.config().permitEmptyPathComponents()) { checkArgument( !EXTRA_SLASHES_OR_DOT_DIRS_PATTERN.matcher(objectName).find(), "I/O not allowed on dot-dirs or extra slashes when !permitEmptyPathComponents: %s", objectName); } if (fileSystem.config().stripPrefixSlash()) { objectName = objectName.removeBeginningSeparator(); } return objectName; } /** * Returns path without extra slashes or {@code .} and {@code ..} and preserves trailing slash. */ @Override public CloudStoragePath normalize() { return newPath(path.normalize()); } @Override public CloudStoragePath resolve(Path object) { return newPath(path.resolve(CloudStorageUtil.checkPath(object).path)); } @Override public CloudStoragePath resolve(String other) { return newPath(path.resolve(getUnixPath(other))); } @Override public CloudStoragePath resolveSibling(Path other) { return newPath(path.resolveSibling(CloudStorageUtil.checkPath(other).path)); } @Override public CloudStoragePath resolveSibling(String other) { return newPath(path.resolveSibling(getUnixPath(other))); } @Override public CloudStoragePath relativize(Path object) { return newPath(path.relativize(CloudStorageUtil.checkPath(object).path)); } @Nullable @Override public CloudStoragePath getParent() { return newPath(path.getParent()); } @Nullable @Override public CloudStoragePath getFileName() { return newPath(path.getFileName()); } @Override public CloudStoragePath subpath(int beginIndex, int endIndex) { return newPath(path.subpath(beginIndex, endIndex)); } @Override public int getNameCount() { return path.getNameCount(); } @Override public CloudStoragePath getName(int index) { return newPath(path.getName(index)); } @Override public boolean startsWith(Path other) { if (!(checkNotNull(other) instanceof CloudStoragePath)) { return false; } CloudStoragePath that = (CloudStoragePath) other; if (!bucket().equals(that.bucket())) { return false; } return path.startsWith(that.path); } @Override public boolean startsWith(String other) { return path.startsWith(getUnixPath(other)); } @Override public boolean endsWith(Path other) { if (!(checkNotNull(other) instanceof CloudStoragePath)) { return false; } CloudStoragePath that = (CloudStoragePath) other; if (!bucket().equals(that.bucket())) { return false; } return path.endsWith(that.path); } @Override public boolean endsWith(String other) { return path.endsWith(getUnixPath(other)); } /** * Throws {@link UnsupportedOperationException} because this feature hasn't been implemented yet. */ @Override public WatchKey register(WatchService watcher, Kind<?>[] events, Modifier... modifiers) { // TODO: Implement me. throw new UnsupportedOperationException(); } /** * Throws {@link UnsupportedOperationException} because this feature hasn't been implemented yet. */ @Override public WatchKey register(WatchService watcher, Kind<?>... events) { // TODO: Implement me. throw new UnsupportedOperationException(); } /** * Throws {@link UnsupportedOperationException} because Google Cloud Storage files are not backed * by the local file system. */ @Override public File toFile() { throw new UnsupportedOperationException("GCS objects aren't available locally"); } @Override public Iterator<Path> iterator() { if (path.isEmpty()) { return Collections.<Path>singleton(this).iterator(); } else if (path.isRoot()) { return Collections.emptyIterator(); } else { return new PathIterator(); } } @Override public int compareTo(Path other) { // Documented to throw CCE if other is associated with a different FileSystemProvider. CloudStoragePath that = (CloudStoragePath) other; int res = bucket().compareTo(that.bucket()); if (res != 0) { return res; } return toRealPathInternal(false).compareTo(that.toRealPathInternal(false)); } @Override public boolean equals(Object other) { return this == other || other instanceof CloudStoragePath && Objects.equals(bucket(), ((CloudStoragePath) other).bucket()) && Objects.equals( toRealPathInternal(false), ((CloudStoragePath) other).toRealPathInternal(false)); } @Override public int hashCode() { return Objects.hash(bucket(), toRealPathInternal(false)); } @Override public String toString() { return path.toString(); } @Override public URI toUri() { try { return new URI( CloudStorageFileSystem.URI_SCHEME, bucket(), path.toAbsolutePath().toString(), null); } catch (URISyntaxException e) { throw new AssertionError(e); } } @Nullable private CloudStoragePath newPath(@Nullable UnixPath newPath) { if (newPath == path) { // Nonuse of equals is intentional. return this; } else if (newPath != null) { return new CloudStoragePath(fileSystem, newPath); } else { return null; } } private UnixPath getUnixPath(String newPath) { return UnixPath.getPath(fileSystem.config().permitEmptyPathComponents(), newPath); } private UnixPath getWorkingDirectory() { return getUnixPath(fileSystem.config().workingDirectory()); } /** * Transform iterator providing a slight performance boost over {@code FluentIterable}. */ private final class PathIterator extends UnmodifiableIterator<Path> { private final Iterator<String> delegate = path.split(); @Override public Path next() { return newPath(getUnixPath(delegate.next())); } @Override public boolean hasNext() { return delegate.hasNext(); } } }
package com.azavea.prs.driver; import android.app.ActivityManager; import android.net.Uri; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.Toolbar; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.View; import com.google.android.gms.appindexing.Action; import com.google.android.gms.appindexing.AppIndex; import com.google.android.gms.common.api.GoogleApiClient; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.stream.JsonReader; import com.sun.codemodel.CodeWriter; import com.sun.codemodel.JCodeModel; import org.hibernate.validator.HibernateValidator; import org.hibernate.validator.HibernateValidatorConfiguration; import org.hibernate.validator.HibernateValidatorFactory; import org.jsonschema2pojo.*; import org.jsonschema2pojo.rules.RuleFactory; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; import com.azavea.prs.driver.schemas.*; import javax.validation.ConstraintViolation; import javax.validation.MessageInterpolator; import javax.validation.Validation; import javax.validation.ValidationProviderResolver; import javax.validation.Validator; import javax.validation.ValidatorFactory; import javax.validation.constraints.NotNull; import javax.validation.spi.BootstrapState; import javax.validation.spi.ValidationProvider; public class MainActivity extends AppCompatActivity { private RecyclerView mRecyclerView; private RecyclerView.Adapter mAdapter; private RecyclerView.LayoutManager mLayoutManager; /** * ATTENTION: This was auto-generated to implement the App Indexing API. * See https://g.co/AppIndexing/AndroidStudio for more information. */ private GoogleApiClient client; //ExampleSchema mySchema; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); ///////////////////////////////////////////////////////// Runtime rt = Runtime.getRuntime(); long maxMemory = rt.maxMemory(); // total bytes of heap allowed to use before hard error Log.v("onCreate", "maxMemory:" + Long.toString(maxMemory)); Log.v("onCreate", "total memory: " + Long.toString(rt.totalMemory())); Log.v("onCreate", "free memory: " + Long.toString(rt.freeMemory())); //for HTC Incredible (1st gen) //maxMemory:33554432 //memoryClass:32 ActivityManager am = (ActivityManager) getSystemService(ACTIVITY_SERVICE); int memoryClass = am.getMemoryClass(); // approx. mb of heap should use to respect device limitations Log.v("onCreate", "memoryClass:" + Integer.toString(memoryClass)); //////////////////////////////////////////////////////// FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { String response = loadRecord(); Snackbar.make(view, response, Snackbar.LENGTH_SHORT) .setAction("Action", null).show(); } }); //View mainContent = findViewById(R.id.content_main); mRecyclerView = (RecyclerView) findViewById(R.id.content_main); // let layout size can change dynamically with content mRecyclerView.setHasFixedSize(false); // use a linear layout manager mLayoutManager = new LinearLayoutManager(this); mRecyclerView.setLayoutManager(mLayoutManager); // specify an adapter String[] recordInfo = {"thing one", "thing two", "foo", "bar", "baz", "jazzy jeff", "fresh prince", "stravinsky", "beethoven", "Indiana", "New Hampshire", "Ohio", "gingerbread", "ice cream sandwich", "Finland", "Australia", "penguins" }; mAdapter = new RecordAdapter(recordInfo); mRecyclerView.setAdapter(mAdapter); // ATTENTION: This was auto-generated to implement the App Indexing API. // See https://g.co/AppIndexing/AndroidStudio for more information. client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build(); } String loadRecord() { // open JSON record example file in assets dir try { BufferedReader ir = new BufferedReader(new InputStreamReader(getAssets() .open("json/data/DriverRecord.json"), "UTF-8")); StringBuilder stringBuilder = new StringBuilder(); String line; while ((line = ir.readLine()) != null) { stringBuilder.append(line); } ir.close(); String responseStr = stringBuilder.toString(); Log.d("MainActivity:loadRecord", responseStr); Gson gson = new GsonBuilder().create(); DriverSchema record = gson.fromJson(responseStr, DriverSchema.class); if (record == null) { Log.e("MainActivity:loadRecord", "NO RECORD FOUND GAAAAAH!"); return "Got nuthin"; } //AccidentDetails deets = record.getAccidentDetails(); final AccidentDetails deets = record.AccidentDetails; //String wat = gson.toJson(record, DriverSchema.class); //Log.d("MainActivity:loadRecord", wat); //Vehicle vehicle = record.getVehicle().get(0); final Vehicle vehicle = record.Vehicle.get(0); if (vehicle != null) { //String plateNo = vehicle.getPlateNumber(); String plateNo = vehicle.PlateNumber; if (plateNo != null) { Log.d("MainActivity:loadRecord", "Got vehicle plate #" + plateNo); } } if (deets == null) { Log.e("MainActivity:loadRecord", "NO ACCIDENT DETAILS FOUND GAAAAAH!"); return "Got no deets?!?"; } //AccidentDetails.Severity severity = deets.getSeverity(); String severity = deets.Severity.name(); if (severity == null) { Log.e("MainActivity:loadRecord", "NO SEVERITY FOUND GAAAAAH!"); return "Got no severity?!?"; } Log.d("MainActivity:loadRecord", "Read accident with severity: " + severity); /* Field[] deetFields = AccidentDetails.class.getDeclaredFields(); Log.d("loadrecord", "Looking into deets..."); if (deetFields.length == 0) { Log.d("loadrecord", "No fields on deets?"); } for (Field fld : deetFields) { String name = fld.getName(); Annotation[] annotations = fld.getDeclaredAnnotations(); for (Annotation annotation : annotations) { Log.d("MainActivity", "Details Field " + name + " has annotation " + annotation.toString()); } } */ final ValidationTask.ValidationCallbackListener listener3 = new ValidationTask.ValidationCallbackListener() { @Override public void callback(boolean haveErrors) { String response = "YAY"; if (haveErrors) { response = "BOO"; } Snackbar.make(findViewById(R.id.fab), response, Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } }; final ValidationTask.ValidationCallbackListener listener2 = new ValidationTask.ValidationCallbackListener() { @Override public void callback(boolean haveErrors) { String response = "YAY"; if (haveErrors) { response = "BOO"; } Snackbar.make(findViewById(R.id.fab), response, Snackbar.LENGTH_LONG) .setAction("Action", null).show(); new ValidationTask<Vehicle>(listener3).execute(vehicle); } }; ValidationTask.ValidationCallbackListener listener1 = new ValidationTask.ValidationCallbackListener() { @Override public void callback(boolean haveErrors) { String response = "YAY"; if (haveErrors) { response = "BOO"; } Snackbar.make(findViewById(R.id.fab), response, Snackbar.LENGTH_LONG) .setAction("Action", null).show(); // introduce error deets.LocalId = "IAMNOTAVALIDID"; new ValidationTask<AccidentDetails>(listener2).execute(deets); } }; //new ValidationTask<DriverSchema>(listener).execute(record); new ValidationTask<AccidentDetails>(listener1).execute(deets); return severity; } catch (IOException e) { e.printStackTrace(); return "Something broke."; } } void buildSchema() { JCodeModel codeModel = new JCodeModel(); /* URL source = null; try { source = new URL("file:///res/values/json/schemas/example_schema.json"); GenerationConfig config = new DefaultGenerationConfig() { @Override public boolean isGenerateBuilders() { // set config option by overriding method return true; } }; SchemaMapper mapper = new SchemaMapper(new RuleFactory(config, new Jackson2Annotator(), new SchemaStore()), new SchemaGenerator()); mapper.generate(codeModel, "MySchema", "com.azavea.prs", source); codeModel.build(new File(getFilesDir() + "/MyFooOutput")); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } */ } @Override public void onStart() { super.onStart(); // ATTENTION: This was auto-generated to implement the App Indexing API. // See https://g.co/AppIndexing/AndroidStudio for more information. client.connect(); Action viewAction = Action.newAction( Action.TYPE_VIEW, // TODO: choose an action type. "Main Page", // TODO: Define a title for the content shown. // TODO: If you have web page content that matches this app activity's content, // make sure this auto-generated web page URL is correct. // Otherwise, set the URL to null. Uri.parse("http://host/path"), // TODO: Make sure this auto-generated app deep link URI is correct. Uri.parse("android-app://com.azavea.prs.driver/http/host/path") ); AppIndex.AppIndexApi.start(client, viewAction); } @Override public void onStop() { super.onStop(); // ATTENTION: This was auto-generated to implement the App Indexing API. // See https://g.co/AppIndexing/AndroidStudio for more information. Action viewAction = Action.newAction( Action.TYPE_VIEW, // TODO: choose an action type. "Main Page", // TODO: Define a title for the content shown. // TODO: If you have web page content that matches this app activity's content, // make sure this auto-generated web page URL is correct. // Otherwise, set the URL to null. Uri.parse("http://host/path"), // TODO: Make sure this auto-generated app deep link URI is correct. Uri.parse("android-app://com.azavea.prs.driver/http/host/path") ); AppIndex.AppIndexApi.end(client, viewAction); client.disconnect(); } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.extractor.jpeg; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static java.lang.annotation.ElementType.TYPE_USE; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.Extractor; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.ExtractorOutput; import com.google.android.exoplayer2.extractor.PositionHolder; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.TrackOutput; import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.mp4.MotionPhotoMetadata; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** Extracts JPEG image using the Exif format. */ public final class JpegExtractor implements Extractor { /** Parser states. */ @Documented @Retention(RetentionPolicy.SOURCE) @Target(TYPE_USE) @IntDef({ STATE_READING_MARKER, STATE_READING_SEGMENT_LENGTH, STATE_READING_SEGMENT, STATE_SNIFFING_MOTION_PHOTO_VIDEO, STATE_READING_MOTION_PHOTO_VIDEO, STATE_ENDED, }) private @interface State {} private static final int STATE_READING_MARKER = 0; private static final int STATE_READING_SEGMENT_LENGTH = 1; private static final int STATE_READING_SEGMENT = 2; private static final int STATE_SNIFFING_MOTION_PHOTO_VIDEO = 4; private static final int STATE_READING_MOTION_PHOTO_VIDEO = 5; private static final int STATE_ENDED = 6; private static final int EXIF_ID_CODE_LENGTH = 6; private static final long EXIF_HEADER = 0x45786966; // Exif private static final int MARKER_SOI = 0xFFD8; // Start of image marker private static final int MARKER_SOS = 0xFFDA; // Start of scan (image data) marker private static final int MARKER_APP0 = 0xFFE0; // Application data 0 marker private static final int MARKER_APP1 = 0xFFE1; // Application data 1 marker private static final String HEADER_XMP_APP1 = "http://ns.adobe.com/xap/1.0/"; /** * The identifier to use for the image track. Chosen to avoid colliding with track IDs used by * {@link Mp4Extractor} for motion photos. */ private static final int IMAGE_TRACK_ID = 1024; private final ParsableByteArray scratch; private @MonotonicNonNull ExtractorOutput extractorOutput; private @State int state; private int marker; private int segmentLength; private long mp4StartPosition; @Nullable private MotionPhotoMetadata motionPhotoMetadata; private @MonotonicNonNull ExtractorInput lastExtractorInput; private @MonotonicNonNull StartOffsetExtractorInput mp4ExtractorStartOffsetExtractorInput; @Nullable private Mp4Extractor mp4Extractor; public JpegExtractor() { scratch = new ParsableByteArray(EXIF_ID_CODE_LENGTH); mp4StartPosition = C.POSITION_UNSET; } @Override public boolean sniff(ExtractorInput input) throws IOException { // See ITU-T.81 (1992) subsection B.1.1.3 and Exif version 2.2 (2002) subsection 4.5.4. if (peekMarker(input) != MARKER_SOI) { return false; } marker = peekMarker(input); // Even though JFIF and Exif standards are incompatible in theory, Exif files often contain a // JFIF APP0 marker segment preceding the Exif APP1 marker segment. Skip the JFIF segment if // present. if (marker == MARKER_APP0) { advancePeekPositionToNextSegment(input); marker = peekMarker(input); } if (marker != MARKER_APP1) { return false; } input.advancePeekPosition(2); // Unused segment length scratch.reset(/* limit= */ EXIF_ID_CODE_LENGTH); input.peekFully(scratch.getData(), /* offset= */ 0, EXIF_ID_CODE_LENGTH); return scratch.readUnsignedInt() == EXIF_HEADER && scratch.readUnsignedShort() == 0; // Exif\0\0 } @Override public void init(ExtractorOutput output) { extractorOutput = output; } @Override public @ReadResult int read(ExtractorInput input, PositionHolder seekPosition) throws IOException { switch (state) { case STATE_READING_MARKER: readMarker(input); return RESULT_CONTINUE; case STATE_READING_SEGMENT_LENGTH: readSegmentLength(input); return RESULT_CONTINUE; case STATE_READING_SEGMENT: readSegment(input); return RESULT_CONTINUE; case STATE_SNIFFING_MOTION_PHOTO_VIDEO: if (input.getPosition() != mp4StartPosition) { seekPosition.position = mp4StartPosition; return RESULT_SEEK; } sniffMotionPhotoVideo(input); return RESULT_CONTINUE; case STATE_READING_MOTION_PHOTO_VIDEO: if (mp4ExtractorStartOffsetExtractorInput == null || input != lastExtractorInput) { lastExtractorInput = input; mp4ExtractorStartOffsetExtractorInput = new StartOffsetExtractorInput(input, mp4StartPosition); } @ReadResult int readResult = checkNotNull(mp4Extractor).read(mp4ExtractorStartOffsetExtractorInput, seekPosition); if (readResult == RESULT_SEEK) { seekPosition.position += mp4StartPosition; } return readResult; case STATE_ENDED: return RESULT_END_OF_INPUT; default: throw new IllegalStateException(); } } @Override public void seek(long position, long timeUs) { if (position == 0) { state = STATE_READING_MARKER; mp4Extractor = null; } else if (state == STATE_READING_MOTION_PHOTO_VIDEO) { checkNotNull(mp4Extractor).seek(position, timeUs); } } @Override public void release() { if (mp4Extractor != null) { mp4Extractor.release(); } } private int peekMarker(ExtractorInput input) throws IOException { scratch.reset(/* limit= */ 2); input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); return scratch.readUnsignedShort(); } private void advancePeekPositionToNextSegment(ExtractorInput input) throws IOException { scratch.reset(/* limit= */ 2); input.peekFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); int segmentLength = scratch.readUnsignedShort() - 2; input.advancePeekPosition(segmentLength); } private void readMarker(ExtractorInput input) throws IOException { scratch.reset(/* limit= */ 2); input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); marker = scratch.readUnsignedShort(); if (marker == MARKER_SOS) { // Start of scan. if (mp4StartPosition != C.POSITION_UNSET) { state = STATE_SNIFFING_MOTION_PHOTO_VIDEO; } else { endReadingWithImageTrack(); } } else if ((marker < 0xFFD0 || marker > 0xFFD9) && marker != 0xFF01) { state = STATE_READING_SEGMENT_LENGTH; } } private void readSegmentLength(ExtractorInput input) throws IOException { scratch.reset(2); input.readFully(scratch.getData(), /* offset= */ 0, /* length= */ 2); segmentLength = scratch.readUnsignedShort() - 2; state = STATE_READING_SEGMENT; } private void readSegment(ExtractorInput input) throws IOException { if (marker == MARKER_APP1) { ParsableByteArray payload = new ParsableByteArray(segmentLength); input.readFully(payload.getData(), /* offset= */ 0, /* length= */ segmentLength); if (motionPhotoMetadata == null && HEADER_XMP_APP1.equals(payload.readNullTerminatedString())) { @Nullable String xmpString = payload.readNullTerminatedString(); if (xmpString != null) { motionPhotoMetadata = getMotionPhotoMetadata(xmpString, input.getLength()); if (motionPhotoMetadata != null) { mp4StartPosition = motionPhotoMetadata.videoStartPosition; } } } } else { input.skipFully(segmentLength); } state = STATE_READING_MARKER; } private void sniffMotionPhotoVideo(ExtractorInput input) throws IOException { // Check if the file is truncated. boolean peekedData = input.peekFully( scratch.getData(), /* offset= */ 0, /* length= */ 1, /* allowEndOfInput= */ true); if (!peekedData) { endReadingWithImageTrack(); } else { input.resetPeekPosition(); if (mp4Extractor == null) { mp4Extractor = new Mp4Extractor(); } mp4ExtractorStartOffsetExtractorInput = new StartOffsetExtractorInput(input, mp4StartPosition); if (mp4Extractor.sniff(mp4ExtractorStartOffsetExtractorInput)) { mp4Extractor.init( new StartOffsetExtractorOutput(mp4StartPosition, checkNotNull(extractorOutput))); startReadingMotionPhoto(); } else { endReadingWithImageTrack(); } } } private void startReadingMotionPhoto() { outputImageTrack(checkNotNull(motionPhotoMetadata)); state = STATE_READING_MOTION_PHOTO_VIDEO; } private void endReadingWithImageTrack() { outputImageTrack(); checkNotNull(extractorOutput).endTracks(); extractorOutput.seekMap(new SeekMap.Unseekable(/* durationUs= */ C.TIME_UNSET)); state = STATE_ENDED; } private void outputImageTrack(Metadata.Entry... metadataEntries) { TrackOutput imageTrackOutput = checkNotNull(extractorOutput).track(IMAGE_TRACK_ID, C.TRACK_TYPE_IMAGE); imageTrackOutput.format( new Format.Builder() .setContainerMimeType(MimeTypes.IMAGE_JPEG) .setMetadata(new Metadata(metadataEntries)) .build()); } /** * Attempts to parse the specified XMP data describing the motion photo, returning the resulting * {@link MotionPhotoMetadata} or {@code null} if it wasn't possible to derive motion photo * metadata. * * @param xmpString A string of XML containing XMP motion photo metadata to attempt to parse. * @param inputLength The length of the input stream in bytes, or {@link C#LENGTH_UNSET} if * unknown. * @return The {@link MotionPhotoMetadata}, or {@code null} if it wasn't possible to derive motion * photo metadata. * @throws IOException If an error occurs parsing the XMP string. */ @Nullable private static MotionPhotoMetadata getMotionPhotoMetadata(String xmpString, long inputLength) throws IOException { // Metadata defines offsets from the end of the stream, so we need the stream length to // determine start offsets. if (inputLength == C.LENGTH_UNSET) { return null; } // Motion photos have (at least) a primary image media item and a secondary video media item. @Nullable MotionPhotoDescription motionPhotoDescription = XmpMotionPhotoDescriptionParser.parse(xmpString); if (motionPhotoDescription == null) { return null; } return motionPhotoDescription.getMotionPhotoMetadata(inputLength); } }
/* Copyright 2011-2013 Frederic Langlet Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. you may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kanzi.function.wavelet; // Works on images post wavelet transform // Uses oriented raster scan (horizontal, then vertical, then diagonal) // Sub-bands: // LL HL (low) (horizontal) // LH HH (vertical) (diagonal) // Example: // 0 1 2 3 // 4 5 6 7 // 8 9 10 11 // 12 13 14 15 // scanner => 2 3 6 7 8 12 9 13 10 14 11 15 // Ignore L0 (0 1 4 5) // Horizontal (HL) 2 3 6 7 // Vertical (LH) 8 12 9 13 // Diagonal (HH) 10 14 11 15 public class WaveletBandScanner { public static final int HL_BAND = 1; public static final int LH_BAND = 2; public static final int HH_BAND = 4; public static final int ALL_BANDS = HL_BAND | HH_BAND | LH_BAND; private final int width; private final int height; private final int levels; private final int bandType; private final int size; // levels is used to limit the scanning to a subset of all bands public WaveletBandScanner(int width, int height, int bandType, int levels) { if (height < 2) throw new IllegalArgumentException("Invalid height parameter (must be at least 8)"); if (width < 8) throw new IllegalArgumentException("Invalid width parameter (must be at least 8)"); if (((bandType & HL_BAND) == 0) && ((bandType & LH_BAND) == 0) && ((bandType & HH_BAND) == 0)) throw new IllegalArgumentException("Invalid bandType parameter"); if (levels < 1) throw new IllegalArgumentException("Invalid levels parameter (must be at least 1)"); this.width = width; this.height = height; this.bandType = bandType; this.levels = levels; int sz = 0; int subtreeSize = 0; int x0 = this.width >> this.levels; int y0 = this.height >> this.levels; int x = x0; int y = y0; for (int i=0; i<levels; i++) { subtreeSize += (x * y); x <<= 1; y <<= 1; } if ((this.bandType & HL_BAND) != 0) sz += subtreeSize; if ((this.bandType & LH_BAND) != 0) sz += subtreeSize; if ((this.bandType & HH_BAND) != 0) sz += subtreeSize; this.size = sz; } public int getSize() { return this.size; } // Read a chunk of the subtree of size length // Allows the use of an array much smaller than the tree // Return the number of integers put in the provided array public int getIndexes(int[] block, int length, int offset) { if (offset >= this.size) return 0; if (length > block.length) length = block.length; final int initialW = this.width >> this.levels; final int initialH = this.height >> this.levels; int w = initialW; int h = initialH; int offsetInBand = 0; int level = 0; // Find offset in band if (offset > 0) { int count = 0; int previousCount = 0; // Remove already scanned bands for (level=0; level<this.levels; level++) { if ((this.bandType & HL_BAND) != 0) count += (w * h); if ((this.bandType & HH_BAND) != 0) count += (w * h); if ((this.bandType & LH_BAND) != 0) count += (w * h); if (count > offset) break; w <<= 1; h <<= 1; previousCount = count; } offsetInBand = offset - previousCount; } int count = 0; while (level < this.levels) { // Scan sub-band by sub-band with increasing dimension count += this.getBandIndexes(block, w, h, count, offsetInBand); offsetInBand = 0; if (count >= length) break; w <<= 1; h <<= 1; level++; } return count; } // Read chunk of band of dimension 'dim' filtered by band type // Return the number of integers put in the provided array protected int getBandIndexes(int[] block, int w, int h, int blockIdx, int offsetInBand) { if ((w >= this.width) || (h >= this.height)) return 0; int idx = blockIdx; int mult = h * this.width; int count = 0; // HL band: horizontal scan if (((this.bandType & HL_BAND) != 0) && (idx < block.length)) { final int end = w + mult; for (int offs=w; offs<end; offs+=this.width) { if (count + w < offsetInBand) { count += w; continue; } final int endStep = offs + w; for (int i=offs; i<endStep; i++, count++) { if (count < offsetInBand) continue; if (idx == block.length) return idx - blockIdx; block[idx++] = i; } } } // LH band: vertical scan if (((this.bandType & LH_BAND) != 0) && (idx < block.length)) { final int end = w + mult; for (int offs=mult; offs<end; offs++) { if (count + h < offsetInBand) { count += h; continue; } final int endStep = offs + mult; for (int i=offs; i<endStep; i+=this.width, count++) { if (count < offsetInBand) continue; if (idx == block.length) return idx - blockIdx; block[idx++] = i; } } } // HH band: diagonal scan (from lower left to higher right) if (((this.bandType & HH_BAND) != 0) && (idx < block.length)) { final int min = (w < h) ? w : h; int offset = w + mult; for (int j=0; j<min; j++) { int offs = offset; for (int i=0; i<=j; i++, count++) { if (count < offsetInBand) { offs -= this.width; continue; } if (idx == block.length) return idx - blockIdx; block[idx++] = offs + i; offs -= this.width; } offset += this.width; } for (int j=min; j<h; j++) { int offs = offset; for (int i=0; i<w; i++, count++) { if (count < offsetInBand) { offs -= this.width; continue; } if (idx == block.length) return idx - blockIdx; block[idx++] = offs + i; offs -= this.width; } offset += this.width; } offset = w + mult + mult - this.width + 1; for (int i=min; i<w; i++) { int offs = offset; for (int j=0; j<h; j++, count++) { if (count < offsetInBand) { offs -= (this.width - 1); continue; } if (idx == block.length) return idx - blockIdx; block[idx++] = offs; offs -= (this.width - 1); } offset++; } for (int i=1; i<min; i++) { int offs = offset; for (int j=0; j<min-i; j++, count++) { if (count < offsetInBand) { offs -= (this.width - 1); continue; } if (idx == block.length) return idx - blockIdx; block[idx++] = offs; offs -= (this.width - 1); } offset++; } } return idx - blockIdx; } // Read whole tree (except top LL band) filtered by band type // Max speed compared to partial scan // Return the number of integers put in the provided array public int getIndexes(int[] block) { int w = this.width >> this.levels; int h = this.height >> this.levels; int count = 0; for (int i=0; i<this.levels; i++) { // Scan sub-band by sub-band with increasing dimension count += this.getBandIndexes(block, w, h, count, 0); if (count >= block.length) break; w <<= 1; h <<= 1; } return count; } // Read band of dimensions w & h filtered by band type // Return the number of integers put in the provided array public int getBandIndexes(int[] block, int w, int h, int blockIdx) { return this.getBandIndexes(block, w, h, blockIdx, 0); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search; import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.CacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.TemplateQueryParser; import org.elasticsearch.index.search.stats.StatsGroupsParseElement; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer.TerminationHandle; import org.elasticsearch.indices.IndicesWarmer.WarmerContext; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.dfs.CachedDfSource; import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.*; import org.elasticsearch.search.internal.*; import org.elasticsearch.search.internal.SearchContext.Lifetime; import org.elasticsearch.search.query.*; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.common.Strings.hasLength; import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; /** * */ public class SearchService extends AbstractLifecycleComponent<SearchService> { public static final String NORMS_LOADING_KEY = "index.norms.loading"; private static final String DEFAUTL_KEEPALIVE_COMPONENENT_KEY = "default_keep_alive"; public static final String DEFAUTL_KEEPALIVE_KEY = "search." + DEFAUTL_KEEPALIVE_COMPONENENT_KEY; private static final String KEEPALIVE_INTERVAL_COMPONENENT_KEY = "keep_alive_interval"; public static final String KEEPALIVE_INTERVAL_KEY = "search." + KEEPALIVE_INTERVAL_COMPONENENT_KEY; private final ThreadPool threadPool; private final ClusterService clusterService; private final IndicesService indicesService; private final IndicesWarmer indicesWarmer; private final ScriptService scriptService; private final CacheRecycler cacheRecycler; private final PageCacheRecycler pageCacheRecycler; private final BigArrays bigArrays; private final DfsPhase dfsPhase; private final QueryPhase queryPhase; private final FetchPhase fetchPhase; private final IndicesQueryCache indicesQueryCache; private final long defaultKeepAlive; private final ScheduledFuture<?> keepAliveReaper; private final AtomicLong idGenerator = new AtomicLong(); private final ConcurrentMapLong<SearchContext> activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); private final ImmutableMap<String, SearchParseElement> elementParsers; @Inject public SearchService(Settings settings, ClusterService clusterService, IndicesService indicesService, IndicesWarmer indicesWarmer, ThreadPool threadPool, ScriptService scriptService, CacheRecycler cacheRecycler, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase, IndicesQueryCache indicesQueryCache) { super(settings); this.threadPool = threadPool; this.clusterService = clusterService; this.indicesService = indicesService; indicesService.indicesLifecycle().addListener(new IndicesLifecycle.Listener() { @Override public void afterIndexDeleted(Index index, @IndexSettings Settings indexSettings) { // once an index is closed we can just clean up all the pending search context information // to release memory and let references to the filesystem go etc. freeAllContextForIndex(index); } }); this.indicesWarmer = indicesWarmer; this.scriptService = scriptService; this.cacheRecycler = cacheRecycler; this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays; this.dfsPhase = dfsPhase; this.queryPhase = queryPhase; this.fetchPhase = fetchPhase; this.indicesQueryCache = indicesQueryCache; TimeValue keepAliveInterval = componentSettings.getAsTime(KEEPALIVE_INTERVAL_COMPONENENT_KEY, timeValueMinutes(1)); // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes this.defaultKeepAlive = componentSettings.getAsTime(DEFAUTL_KEEPALIVE_COMPONENENT_KEY, timeValueMinutes(5)).millis(); Map<String, SearchParseElement> elementParsers = new HashMap<>(); elementParsers.putAll(dfsPhase.parseElements()); elementParsers.putAll(queryPhase.parseElements()); elementParsers.putAll(fetchPhase.parseElements()); elementParsers.put("stats", new StatsGroupsParseElement()); this.elementParsers = ImmutableMap.copyOf(elementParsers); this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval); this.indicesWarmer.addListener(new NormsWarmer()); this.indicesWarmer.addListener(new FieldDataWarmer()); this.indicesWarmer.addListener(new SearchWarmer()); } @Override protected void doStart() throws ElasticsearchException { } @Override protected void doStop() throws ElasticsearchException { for (final SearchContext context : activeContexts.values()) { freeContext(context.id()); } activeContexts.clear(); } @Override protected void doClose() throws ElasticsearchException { doStop(); FutureUtils.cancel(keepAliveReaper); } public DfsSearchResult executeDfsPhase(ShardSearchRequest request) throws ElasticsearchException { final SearchContext context = createAndPutContext(request); try { contextProcessing(context); dfsPhase.execute(context); contextProcessedSuccessfully(context); return context.dfsResult(); } catch (Throwable e) { logger.trace("Dfs phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public QuerySearchResult executeScan(ShardSearchRequest request) throws ElasticsearchException { final SearchContext context = createAndPutContext(request); try { if (context.aggregations() != null) { throw new ElasticsearchIllegalArgumentException("aggregations are not supported with search_type=scan"); } assert context.searchType() == SearchType.SCAN; context.searchType(SearchType.COUNT); // move to COUNT, and then, when scrolling, move to SCAN assert context.searchType() == SearchType.COUNT; if (context.scroll() == null) { throw new ElasticsearchException("Scroll must be provided when scanning..."); } contextProcessing(context); queryPhase.execute(context); contextProcessedSuccessfully(context); return context.queryResult(); } catch (Throwable e) { logger.trace("Scan phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public ScrollQueryFetchSearchResult executeScan(InternalScrollSearchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); contextProcessing(context); try { processScroll(request, context); if (context.searchType() == SearchType.COUNT) { // first scanning, reset the from to 0 context.searchType(SearchType.SCAN); context.from(0); } queryPhase.execute(context); shortcutDocIdsToLoadForScanning(context); fetchPhase.execute(context); if (context.scroll() == null || context.fetchResult().hits().hits().length < context.size()) { freeContext(request.id()); } else { contextProcessedSuccessfully(context); } return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget()); } catch (Throwable e) { logger.trace("Scan phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } /** * Try to load the query results from the cache or execute the query phase directly if the cache cannot be used. */ private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final SearchContext context, final QueryPhase queryPhase) throws Exception { final boolean canCache = indicesQueryCache.canCache(request, context); if (canCache) { indicesQueryCache.loadIntoContext(request, context, queryPhase); } else { queryPhase.execute(context); } } public QuerySearchResultProvider executeQueryPhase(ShardSearchRequest request) throws ElasticsearchException { final SearchContext context = createAndPutContext(request); try { context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); contextProcessing(context); loadOrExecuteQueryPhase(request, context, queryPhase); if (context.searchType() == SearchType.COUNT) { freeContext(context.id()); } else { contextProcessedSuccessfully(context); } context.indexShard().searchService().onQueryPhase(context, System.nanoTime() - time); return context.queryResult(); } catch (Throwable e) { // execution exception can happen while loading the cache, strip it if (e instanceof ExecutionException) { e = e.getCause(); } context.indexShard().searchService().onFailedQueryPhase(context); logger.trace("Query phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); try { context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); contextProcessing(context); processScroll(request, context); queryPhase.execute(context); contextProcessedSuccessfully(context); context.indexShard().searchService().onQueryPhase(context, System.nanoTime() - time); return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget()); } catch (Throwable e) { context.indexShard().searchService().onFailedQueryPhase(context); logger.trace("Query phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public QuerySearchResult executeQueryPhase(QuerySearchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); contextProcessing(context); try { context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity())); } catch (Throwable e) { freeContext(context.id()); cleanContext(context); throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e); } try { context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); queryPhase.execute(context); contextProcessedSuccessfully(context); context.indexShard().searchService().onQueryPhase(context, System.nanoTime() - time); return context.queryResult(); } catch (Throwable e) { context.indexShard().searchService().onFailedQueryPhase(context); logger.trace("Query phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public QueryFetchSearchResult executeFetchPhase(ShardSearchRequest request) throws ElasticsearchException { final SearchContext context = createAndPutContext(request); contextProcessing(context); try { context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); try { queryPhase.execute(context); } catch (Throwable e) { context.indexShard().searchService().onFailedQueryPhase(context); throw ExceptionsHelper.convertToRuntime(e); } long time2 = System.nanoTime(); context.indexShard().searchService().onQueryPhase(context, time2 - time); context.indexShard().searchService().onPreFetchPhase(context); try { shortcutDocIdsToLoad(context); fetchPhase.execute(context); if (context.scroll() == null) { freeContext(context.id()); } else { contextProcessedSuccessfully(context); } } catch (Throwable e) { context.indexShard().searchService().onFailedFetchPhase(context); throw ExceptionsHelper.convertToRuntime(e); } context.indexShard().searchService().onFetchPhase(context, System.nanoTime() - time2); return new QueryFetchSearchResult(context.queryResult(), context.fetchResult()); } catch (Throwable e) { logger.trace("Fetch phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public QueryFetchSearchResult executeFetchPhase(QuerySearchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); contextProcessing(context); try { context.searcher().dfSource(new CachedDfSource(context.searcher().getIndexReader(), request.dfs(), context.similarityService().similarity())); } catch (Throwable e) { freeContext(context.id()); cleanContext(context); throw new QueryPhaseExecutionException(context, "Failed to set aggregated df", e); } try { context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); try { queryPhase.execute(context); } catch (Throwable e) { context.indexShard().searchService().onFailedQueryPhase(context); throw ExceptionsHelper.convertToRuntime(e); } long time2 = System.nanoTime(); context.indexShard().searchService().onQueryPhase(context, time2 - time); context.indexShard().searchService().onPreFetchPhase(context); try { shortcutDocIdsToLoad(context); fetchPhase.execute(context); if (context.scroll() == null) { freeContext(request.id()); } else { contextProcessedSuccessfully(context); } } catch (Throwable e) { context.indexShard().searchService().onFailedFetchPhase(context); throw ExceptionsHelper.convertToRuntime(e); } context.indexShard().searchService().onFetchPhase(context, System.nanoTime() - time2); return new QueryFetchSearchResult(context.queryResult(), context.fetchResult()); } catch (Throwable e) { logger.trace("Fetch phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); contextProcessing(context); try { processScroll(request, context); context.indexShard().searchService().onPreQueryPhase(context); long time = System.nanoTime(); try { queryPhase.execute(context); } catch (Throwable e) { context.indexShard().searchService().onFailedQueryPhase(context); throw ExceptionsHelper.convertToRuntime(e); } long time2 = System.nanoTime(); context.indexShard().searchService().onQueryPhase(context, time2 - time); context.indexShard().searchService().onPreFetchPhase(context); try { shortcutDocIdsToLoad(context); fetchPhase.execute(context); if (context.scroll() == null) { freeContext(request.id()); } else { contextProcessedSuccessfully(context); } } catch (Throwable e) { context.indexShard().searchService().onFailedFetchPhase(context); throw ExceptionsHelper.convertToRuntime(e); } context.indexShard().searchService().onFetchPhase(context, System.nanoTime() - time2); return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget()); } catch (Throwable e) { logger.trace("Fetch phase failed", e); freeContext(context.id()); throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } public FetchSearchResult executeFetchPhase(ShardFetchRequest request) throws ElasticsearchException { final SearchContext context = findContext(request.id()); contextProcessing(context); try { if (request.lastEmittedDoc() != null) { context.lastEmittedDoc(request.lastEmittedDoc()); } context.docIdsToLoad(request.docIds(), 0, request.docIdsSize()); context.indexShard().searchService().onPreFetchPhase(context); long time = System.nanoTime(); fetchPhase.execute(context); if (context.scroll() == null) { freeContext(request.id()); } else { contextProcessedSuccessfully(context); } context.indexShard().searchService().onFetchPhase(context, System.nanoTime() - time); return context.fetchResult(); } catch (Throwable e) { context.indexShard().searchService().onFailedFetchPhase(context); logger.trace("Fetch phase failed", e); freeContext(context.id()); // we just try to make sure this is freed - rethrow orig exception. throw ExceptionsHelper.convertToRuntime(e); } finally { cleanContext(context); } } private SearchContext findContext(long id) throws SearchContextMissingException { SearchContext context = activeContexts.get(id); if (context == null) { throw new SearchContextMissingException(id); } SearchContext.setCurrent(context); return context; } final SearchContext createAndPutContext(ShardSearchRequest request) throws ElasticsearchException { SearchContext context = createContext(request, null); boolean success = false; try { activeContexts.put(context.id(), context); context.indexShard().searchService().onNewContext(context); success = true; return context; } finally { if (!success) { freeContext(context.id()); } } } final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws ElasticsearchException { IndexService indexService = indicesService.indexServiceSafe(request.index()); IndexShard indexShard = indexService.shardSafe(request.shardId()); SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId()); Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher; final SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, cacheRecycler, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter()); SearchContext.setCurrent(context); try { context.scroll(request.scroll()); context.useSlowScroll(request.useSlowScroll()); parseTemplate(request); parseSource(context, request.source()); parseSource(context, request.extraSource()); // if the from and size are still not set, default them if (context.from() == -1) { context.from(0); } if (context.size() == -1) { context.size(10); } // pre process dfsPhase.preProcess(context); queryPhase.preProcess(context); fetchPhase.preProcess(context); // compute the context keep alive long keepAlive = defaultKeepAlive; if (request.scroll() != null && request.scroll().keepAlive() != null) { keepAlive = request.scroll().keepAlive().millis(); } context.keepAlive(keepAlive); } catch (Throwable e) { context.close(); throw ExceptionsHelper.convertToRuntime(e); } return context; } private void freeAllContextForIndex(Index index) { assert index != null; for (SearchContext ctx : activeContexts.values()) { if (index.equals(ctx.indexShard().shardId().index())) { freeContext(ctx.id()); } } } public boolean freeContext(long id) { final SearchContext context = activeContexts.remove(id); if (context != null) { try { context.indexShard().searchService().onFreeContext(context); } finally { context.close(); } return true; } return false; } public void freeAllScrollContexts() { for (SearchContext searchContext : activeContexts.values()) { if (searchContext.scroll() != null) { freeContext(searchContext.id()); } } } private void contextProcessing(SearchContext context) { // disable timeout while executing a search context.accessed(-1); } private void contextProcessedSuccessfully(SearchContext context) { context.accessed(threadPool.estimatedTimeInMillis()); } private void cleanContext(SearchContext context) { assert context == SearchContext.current(); context.clearReleasables(Lifetime.PHASE); SearchContext.removeCurrent(); } private void parseTemplate(ShardSearchRequest request) { final ExecutableScript executable; if (hasLength(request.templateName())) { executable = this.scriptService.executable("mustache", request.templateName(), request.templateType(), request.templateParams()); } else { if (!hasLength(request.templateSource())) { return; } XContentParser parser = null; TemplateQueryParser.TemplateContext templateContext = null; try { parser = XContentFactory.xContent(request.templateSource()).createParser(request.templateSource()); templateContext = TemplateQueryParser.parse(parser, "params", "template"); if (templateContext.scriptType().equals(ScriptService.ScriptType.INLINE)) { //Try to double parse for nested template id/file parser = null; try { byte[] templateBytes = templateContext.template().getBytes(Charsets.UTF_8); parser = XContentFactory.xContent(templateBytes).createParser(templateBytes); } catch (ElasticsearchParseException epe) { //This was an non-nested template, the parse failure was due to this, it is safe to assume this refers to a file //for backwards compatibility and keep going templateContext = new TemplateQueryParser.TemplateContext(ScriptService.ScriptType.FILE, templateContext.template(), templateContext.params()); } if (parser != null) { TemplateQueryParser.TemplateContext innerContext = TemplateQueryParser.parse(parser, "params"); if (hasLength(innerContext.template()) && !innerContext.scriptType().equals(ScriptService.ScriptType.INLINE)) { //An inner template referring to a filename or id templateContext = new TemplateQueryParser.TemplateContext(innerContext.scriptType(), innerContext.template(), templateContext.params()); } } } } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse template", e); } finally { Releasables.closeWhileHandlingException(parser); } if (templateContext == null || !hasLength(templateContext.template())) { throw new ElasticsearchParseException("Template must have [template] field configured"); } executable = this.scriptService.executable("mustache", templateContext.template(), templateContext.scriptType(), templateContext.params()); } BytesReference processedQuery = (BytesReference) executable.run(); request.source(processedQuery); } private void parseSource(SearchContext context, BytesReference source) throws SearchParseException { // nothing to parse... if (source == null || source.length() == 0) { return; } XContentParser parser = null; try { parser = XContentFactory.xContent(source).createParser(source); XContentParser.Token token; token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("Expected START_OBJECT but got " + token.name() + " " + parser.currentName()); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { String fieldName = parser.currentName(); parser.nextToken(); SearchParseElement element = elementParsers.get(fieldName); if (element == null) { throw new SearchParseException(context, "No parser for element [" + fieldName + "]"); } element.parse(parser, context); } else { if (token == null) { throw new ElasticsearchParseException("End of query source reached but query is not complete."); } else { throw new ElasticsearchParseException("Expected field name but got " + token.name() + " \"" + parser.currentName() + "\""); } } } } catch (Throwable e) { String sSource = "_na_"; try { sSource = XContentHelper.convertToJson(source, false); } catch (Throwable e1) { // ignore } throw new SearchParseException(context, "Failed to parse source [" + sSource + "]", e); } finally { if (parser != null) { parser.close(); } } } private static final int[] EMPTY_DOC_IDS = new int[0]; /** * Shortcut ids to load, we load only "from" and up to "size". The phase controller * handles this as well since the result is always size * shards for Q_A_F */ private void shortcutDocIdsToLoad(SearchContext context) { if (!context.useSlowScroll() && context.request().scroll() != null) { TopDocs topDocs = context.queryResult().topDocs(); int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; } context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); } else { TopDocs topDocs = context.queryResult().topDocs(); if (topDocs.scoreDocs.length < context.from()) { // no more docs... context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0); return; } int totalSize = context.from() + context.size(); int[] docIdsToLoad = new int[Math.min(topDocs.scoreDocs.length - context.from(), context.size())]; int counter = 0; for (int i = context.from(); i < totalSize; i++) { if (i < topDocs.scoreDocs.length) { docIdsToLoad[counter] = topDocs.scoreDocs[i].doc; } else { break; } counter++; } context.docIdsToLoad(docIdsToLoad, 0, counter); } } private void shortcutDocIdsToLoadForScanning(SearchContext context) { TopDocs topDocs = context.queryResult().topDocs(); if (topDocs.scoreDocs.length == 0) { // no more docs... context.docIdsToLoad(EMPTY_DOC_IDS, 0, 0); return; } int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; for (int i = 0; i < docIdsToLoad.length; i++) { docIdsToLoad[i] = topDocs.scoreDocs[i].doc; } context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); } private void processScroll(InternalScrollSearchRequest request, SearchContext context) { // process scroll context.from(context.from() + context.size()); context.scroll(request.scroll()); // update the context keep alive based on the new scroll value if (request.scroll() != null && request.scroll().keepAlive() != null) { context.keepAlive(request.scroll().keepAlive().millis()); } } /** * Returns the number of active contexts in this * SearchService */ public int getActiveContexts() { return this.activeContexts.size(); } static class NormsWarmer extends IndicesWarmer.Listener { @Override public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { final Loading defaultLoading = Loading.parse(indexMetaData.settings().get(NORMS_LOADING_KEY), Loading.LAZY); final MapperService mapperService = indexShard.mapperService(); final ObjectSet<String> warmUp = new ObjectOpenHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper<?> fieldMapper : docMapper.mappers()) { final String indexName = fieldMapper.names().indexName(); if (fieldMapper.fieldType().indexed() && !fieldMapper.fieldType().omitNorms() && fieldMapper.normsLoading(defaultLoading) == Loading.EAGER) { warmUp.add(indexName); } } } final CountDownLatch latch = new CountDownLatch(1); // Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task threadPool.executor(executor()).execute(new Runnable() { @Override public void run() { try { for (Iterator<ObjectCursor<String>> it = warmUp.iterator(); it.hasNext(); ) { final String indexName = it.next().value; final long start = System.nanoTime(); for (final AtomicReaderContext ctx : context.searcher().reader().leaves()) { final NumericDocValues values = ctx.reader().getNormValues(indexName); if (values != null) { values.get(0); } } if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard.warmerService().logger().trace("warmed norms for [{}], took [{}]", indexName, TimeValue.timeValueNanos(System.nanoTime() - start)); } } } catch (Throwable t) { indexShard.warmerService().logger().warn("failed to warm-up norms", t); } finally { latch.countDown(); } } }); return new TerminationHandle() { @Override public void awaitTermination() throws InterruptedException { latch.await(); } }; } @Override public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { return TerminationHandle.NO_WAIT; } } static class FieldDataWarmer extends IndicesWarmer.Listener { @Override public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { final MapperService mapperService = indexShard.mapperService(); final Map<String, FieldMapper<?>> warmUp = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper<?> fieldMapper : docMapper.mappers()) { final FieldDataType fieldDataType = fieldMapper.fieldDataType(); if (fieldDataType == null) { continue; } if (fieldDataType.getLoading() == Loading.LAZY) { continue; } final String indexName = fieldMapper.names().indexName(); if (warmUp.containsKey(indexName)) { continue; } warmUp.put(indexName, fieldMapper); } } final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final Executor executor = threadPool.executor(executor()); final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size()); for (final AtomicReaderContext ctx : context.searcher().reader().leaves()) { for (final FieldMapper<?> fieldMapper : warmUp.values()) { executor.execute(new Runnable() { @Override public void run() { try { final long start = System.nanoTime(); indexFieldDataService.getForField(fieldMapper).load(ctx); if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().name()); } finally { latch.countDown(); } } }); } } return new TerminationHandle() { @Override public void awaitTermination() throws InterruptedException { latch.await(); } }; } @Override public TerminationHandle warmTopReader(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { final MapperService mapperService = indexShard.mapperService(); final Map<String, FieldMapper<?>> warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper<?> fieldMapper : docMapper.mappers()) { final FieldDataType fieldDataType = fieldMapper.fieldDataType(); if (fieldDataType == null) { continue; } if (fieldDataType.getLoading() != Loading.EAGER_GLOBAL_ORDINALS) { continue; } final String indexName = fieldMapper.names().indexName(); if (warmUpGlobalOrdinals.containsKey(indexName)) { continue; } warmUpGlobalOrdinals.put(indexName, fieldMapper); } } final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final Executor executor = threadPool.executor(executor()); final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); for (final FieldMapper<?> fieldMapper : warmUpGlobalOrdinals.values()) { executor.execute(new Runnable() { @Override public void run() { try { final long start = System.nanoTime(); IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper); ifd.loadGlobal(context.reader()); if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().name(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().name()); } finally { latch.countDown(); } } }); } return new TerminationHandle() { @Override public void awaitTermination() throws InterruptedException { latch.await(); } }; } } class SearchWarmer extends IndicesWarmer.Listener { @Override public TerminationHandle warmNewReaders(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { return internalWarm(indexShard, indexMetaData, context, threadPool, false); } @Override public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { return internalWarm(indexShard, indexMetaData, context, threadPool, true); } public TerminationHandle internalWarm(final IndexShard indexShard, final IndexMetaData indexMetaData, final IndicesWarmer.WarmerContext warmerContext, ThreadPool threadPool, final boolean top) { IndexWarmersMetaData custom = indexMetaData.custom(IndexWarmersMetaData.TYPE); if (custom == null) { return TerminationHandle.NO_WAIT; } final Executor executor = threadPool.executor(executor()); final CountDownLatch latch = new CountDownLatch(custom.entries().size()); for (final IndexWarmersMetaData.Entry entry : custom.entries()) { executor.execute(new Runnable() { @Override public void run() { SearchContext context = null; try { long now = System.nanoTime(); ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexMetaData.numberOfShards(), SearchType.QUERY_THEN_FETCH, entry.source(), entry.types(), entry.queryCache()); context = createContext(request, warmerContext.searcher()); // if we use sort, we need to do query to sort on it and load relevant field data // if not, we might as well use COUNT (and cache if needed) if (context.sort() == null) { context.searchType(SearchType.COUNT); } boolean canCache = indicesQueryCache.canCache(request, context); // early terminate when we can cache, since we can only do proper caching on top level searcher // also, if we can't cache, and its top, we don't need to execute it, since we already did when its not top if (canCache != top) { return; } loadOrExecuteQueryPhase(request, context, queryPhase); long took = System.nanoTime() - now; if (indexShard.warmerService().logger().isTraceEnabled()) { indexShard.warmerService().logger().trace("warmed [{}], took [{}]", entry.name(), TimeValue.timeValueNanos(took)); } } catch (Throwable t) { indexShard.warmerService().logger().warn("warmer [{}] failed", t, entry.name()); } finally { try { if (context != null) { freeContext(context.id()); cleanContext(context); } } finally { latch.countDown(); } } } }); } return new TerminationHandle() { @Override public void awaitTermination() throws InterruptedException { latch.await(); } }; } } class Reaper implements Runnable { @Override public void run() { final long time = threadPool.estimatedTimeInMillis(); for (SearchContext context : activeContexts.values()) { // Use the same value for both checks since lastAccessTime can // be modified by another thread between checks! final long lastAccessTime = context.lastAccessTime(); if (lastAccessTime == -1l) { // its being processed or timeout is disabled continue; } if ((time - lastAccessTime > context.keepAlive())) { logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time, lastAccessTime, context.keepAlive()); freeContext(context.id()); } } } } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.impl; // Start of user code for imports import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class ThrottleMediatorOutputConnectorPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, ThrottleMediatorOutputConnectorPropertiesEditionPart { protected ReferencesTable commentMediators; protected List<ViewerFilter> commentMediatorsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> commentMediatorsFilters = new ArrayList<ViewerFilter>(); /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public ThrottleMediatorOutputConnectorPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence throttleMediatorOutputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent); throttleMediatorOutputConnectorStep .addStep(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.class) .addStep(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators); composer = new PartComposer(throttleMediatorOutputConnectorStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.class) { return createPropertiesGroup(parent); } if (key == EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators) { return createCommentMediatorsAdvancedTableComposition(parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(Composite parent) { Group propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EsbMessages.ThrottleMediatorOutputConnectorPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } /** * @param container * */ protected Composite createCommentMediatorsAdvancedTableComposition(Composite parent) { this.commentMediators = new ReferencesTable(getDescription(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, EsbMessages.ThrottleMediatorOutputConnectorPropertiesEditionPart_CommentMediatorsLabel), new ReferencesTableListener() { public void handleAdd() { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ThrottleMediatorOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null)); commentMediators.refresh(); } public void handleEdit(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ThrottleMediatorOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element)); commentMediators.refresh(); } public void handleMove(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ThrottleMediatorOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); commentMediators.refresh(); } public void handleRemove(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ThrottleMediatorOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); commentMediators.refresh(); } public void navigateTo(EObject element) { } }); for (ViewerFilter filter : this.commentMediatorsFilters) { this.commentMediators.addFilter(filter); } this.commentMediators.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, EsbViewsRepository.SWT_KIND)); this.commentMediators.createControls(parent); this.commentMediators.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(ThrottleMediatorOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData commentMediatorsData = new GridData(GridData.FILL_HORIZONTAL); commentMediatorsData.horizontalSpan = 3; this.commentMediators.setLayoutData(commentMediatorsData); this.commentMediators.setLowerBound(0); this.commentMediators.setUpperBound(-1); commentMediators.setID(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators); commentMediators.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$ // Start of user code for createCommentMediatorsAdvancedTableComposition // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart#initCommentMediators(EObject current, EReference containingFeature, EReference feature) */ public void initCommentMediators(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); commentMediators.setContentProvider(contentProvider); commentMediators.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.ThrottleMediatorOutputConnector.Properties.commentMediators); if (eefElementEditorReadOnlyState && commentMediators.isEnabled()) { commentMediators.setEnabled(false); commentMediators.setToolTipText(EsbMessages.ThrottleMediatorOutputConnector_ReadOnly); } else if (!eefElementEditorReadOnlyState && !commentMediators.isEnabled()) { commentMediators.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart#updateCommentMediators() * */ public void updateCommentMediators() { commentMediators.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart#addFilterCommentMediators(ViewerFilter filter) * */ public void addFilterToCommentMediators(ViewerFilter filter) { commentMediatorsFilters.add(filter); if (this.commentMediators != null) { this.commentMediators.addFilter(filter); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart#addBusinessFilterCommentMediators(ViewerFilter filter) * */ public void addBusinessFilterToCommentMediators(ViewerFilter filter) { commentMediatorsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.ThrottleMediatorOutputConnectorPropertiesEditionPart#isContainedInCommentMediatorsTable(EObject element) * */ public boolean isContainedInCommentMediatorsTable(EObject element) { return ((ReferencesTableSettings)commentMediators.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.ThrottleMediatorOutputConnector_Part_Title; } // Start of user code additional methods // End of user code }
package com.bukkit.cppchriscpp.TravelPortals; /** * @(#)WarpLocation.java * * Stores warp locations for users. * * @author cppchriscpp * @version 1.10 */ /** * A quick serializable storage medium for warping points. * REPLACES WarpPoint * Replaced by the one in the right package... * @Deprecated * @author cppchriscpp */ public class WarpLocation implements java.io.Serializable { static final long serialVersionUID = 4523543646L; /** * Used to store the position of the warp. */ private int x,y,z; /** * Stores the name of the warp. */ private String name; /** * Stores the destination of the warp. */ private String destination; /** * The time this was last used. */ private transient long lastused; /** * Is this portal hidden? */ private boolean hidden = false; /** * Portal owner. */ private String owner = ""; /** * Where is the door? * 0: Unknown * 1: X-1 Y-1 * 2: X+1 Y-1 * 3: X-1 Y+1 * 4: X+1 Y+1 */ private int doorpos = 0; /** * The amount of time it takes for this to cool down so it can be * used again. */ // private static final transient /* enough keywords? */ int cooldown = 5000; /** * What world is this? */ private String world = ""; /** * Default constructor. I suggest against using this. */ public WarpLocation() { x=0; y=0; z=0; name = ""; lastused = 0; world = ""; } /** * Creates a warp point at a position. This is the most likely constructor you'll use. * @param _x The X coordinate of the warp point's position. * @param _y The Y coordinate of the warp point's position. * @param _z The Z coordinate of the warp point's position. * @param _world The world that this warp point is in. */ public WarpLocation(int _x, int _y, int _z, String _world) { x = _x; y = _y; z = _z; name = ""; destination = ""; lastused = 0; world = _world; } /** * Creates a warp point at a position. * @param _x The X coordinate of the warp point's position. * @param _y The Y coordinate of the warp point's position. * @param _z The Z coordinate of the warp point's position. * @param _doorpos The position of the door. * @param _world The world that this location is in. * @Deprecated */ public WarpLocation(int _x, int _y, int _z, int _doorpos, String _world) { x = _x; y = _y; z = _z; name = ""; destination = ""; lastused = 0; doorpos = _doorpos; world = _world; } /** * Creates a warp point at a position. This is the most likely constructor you'll use. * @param _x The X coordinate of the warp point's position. * @param _y The Y coordinate of the warp point's position. * @param _z The Z coordinate of the warp point's position. * @param _doorpos The position of the door. * @param _world The world to position this portal within. * @param _owner The person who owns this. */ public WarpLocation(int _x, int _y, int _z, int _doorpos, String _world, String _owner) { x = _x; y = _y; z = _z; name = ""; destination = ""; lastused = 0; doorpos = _doorpos; world = _world; owner = _owner; } /** * Check to see if this is a valid warp for use. (Has a name and destination) * @return true if the destination and name are both set, otherwise false. */ public boolean isValid() { return (!(name.equals("") && destination.equals(""))); } /** * Get the X coordinate of this point. * @return The X coordinate of this point. */ public int getX() { return x; } /** * Get the Y coordinate of this point. * @return The Y coordinate of this point. */ public int getY() { return y; } /** * Get the Z coordinate of this point. * @return The Z coordinate of this point. */ public int getZ() { return z; } /** * Retrieve the name of this point. * @return The name given to this point. */ public String getName() { return name; } /** * Sets the name of this point. * @param n The new name for this point. */ public void setName(String n) { name = n; } /** * Gets the current owner of the portal. * @return The owner of the portal. */ public String getOwner() { if (owner == null) owner = ""; return owner; } /** * Sets the current owner of the portal. * @param _owner The new owner. */ public void setOwner(String _owner) { owner = _owner; } /** * Gets the name of the destination portal for this point. * @return The name of the destination portal. */ public String getDestination() { return destination; } /** * Sets the name of the destination portal. * @param n New name for this portal's destination. */ public void setDestination(String n) { destination = n; } /** * Tests to see if this point has a destination set. * @return true if this point has a destination; false otherwise. */ public boolean hasDestination() { return !destination.equals(""); } /** * Checks to see if this point has a name. * @return true if this point has a name; false otherwise. */ public boolean hasName() { return !name.equals(""); } /** * Gets the last used time. * @return The last time it was used (ms) */ public long getLastUsed() { return lastused; } /** * Set the lastused time to now. */ public void setLastUsed() { lastused = System.currentTimeMillis(); } /** * Set the time this was last used * @param time the new time this was used. */ public void setLastUsed(long time) { lastused = time; } /** * Get the position of the door. * @return The position of the door. */ public int getDoorPosition() { return doorpos; } /** * Set the position of the door * @param dp The position of the door */ public void setDoorPosition(int dp) { doorpos = dp; } /** * Set whether the warp's name is hidden. * @param dp whether to show the warp. */ public void setHidden(boolean dp) { hidden = dp; } /** * Figure out whether this warp is hidden. * @return true to suppress name, false otherwise. */ public boolean getHidden() { return hidden; } /** * What world is this in? * @return The name of the world. */ public String getWorld() { return world; } /** * Set what world this portal is in. * @param w The world. */ public void setWorld(String w) { world = w; } /** * Check if this portal is usable. (Time limit) * @param cooldown How long of a cooldown to allow for. * @return true if this WarpLocation is usable; false otherwise. */ public boolean isUsable(int cooldown) { return (lastused+cooldown < System.currentTimeMillis()); } }
package com.psddev.dari.util; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicLong; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.util.concurrent.AtomicDouble; /** * Gathers statistics about arbitrary operations. * * <p>To use, first create an instance (usually as a {@code static}): * * <p><blockquote><pre> * private static final Stats STATS = new Stats("Stats Name"); * </pre></blockquote></p> * * <p>Then wrap the code you want to measure with the timer methods: * * <p><blockquote><pre> * Stats.Timer timer = STATS.startTimer(); * try { * &nbsp; ... * } finally { * &nbsp; timer.stop("Operation Name"); * } * </pre></blockquote></p> * * <p>Measurements will automatically be displayed through {@link * StatsDebugServlet}, which is typically available at {@code /_debug/stats}. */ public class Stats { private static final List<WeakReference<Stats>> STATS_REFERENCES = new ArrayList<WeakReference<Stats>>(); { STATS_REFERENCES.add(new WeakReference<Stats>(this)); } private final String name; private final double keepDuration; private final double measureInterval; private final List<Double> averageIntervals; private final long start; private final Measurement totalMeasurement; private final LoadingCache<String, Measurement> measurements = CacheBuilder .newBuilder() .build(new CacheLoader<String, Measurement>() { @Override public Measurement load(String operation) { return new Measurement(); } }); /** * Creates an instance with the given {@code name} and {@linkplain * ExponentialMovingAverage moving averages} with the given * {@code averageIntervals}. * * @param name Must not be blank. * @param keepDuration In seconds. Must be positive. * @param measureInterval In seconds. Must be positive. * @param averageIntervals In seconds. Must not be blank. * Must all be positive. * @throws IllegalArgumentException */ public Stats(String name, double keepDuration, double measureInterval, double... averageIntervals) { Preconditions.checkArgument(!ObjectUtils.isBlank(name)); Preconditions.checkArgument(keepDuration > 0); Preconditions.checkArgument(measureInterval > 0); Preconditions.checkNotNull(averageIntervals); Preconditions.checkArgument(averageIntervals.length > 0); List<Double> newAverageIntervals = new ArrayList<Double>(); for (int i = 0, length = averageIntervals.length; i < length; ++ i) { double averageInterval = averageIntervals[i]; Preconditions.checkArgument(averageInterval > 0); newAverageIntervals.add(averageInterval); } this.name = name; this.keepDuration = keepDuration; this.measureInterval = measureInterval; this.averageIntervals = Collections.unmodifiableList(newAverageIntervals); this.start = System.currentTimeMillis(); this.totalMeasurement = getMeasurements().get("Total"); } /** * Creates an instance with the given {@code name} with 2 moving averages * at 1 and 5 minute intervals. * * @see #Stats(String, double, double, double...) */ public Stats(String name) { this(name, 3600.0, 5.0, 60.0, 600.0); } /** * Returns the name. * * @return Never blank. */ public String getName() { return name; } /** * Returns how long the moving averages should be kept for. * * @return Always positive. */ public double getKeepDuration() { return keepDuration; } /** * Returns how often the moving averages are measured. * * @return Always positive. */ public double getMeasureInterval() { return measureInterval; } /** * Returns all moving average intervals. * * @return Never blank. All positives. */ public List<Double> getAverageIntervals() { return averageIntervals; } /** * Returns how long this stats instance has been gathering data. * * @return In seconds. */ public double getUptime() { return (System.currentTimeMillis() - start) / 1e3; } /** * Starts timing an operation. * * @return Never {@code null}. * @see Timer#stop */ public Timer startTimer() { return new Timer(); } /** * Returns the total measurement. * * @return Never {@code null}. */ public Measurement getTotalMeasurement() { return totalMeasurement; } /** * Returns the map of all measurements, including the total. * * @return Never blank. */ public Map<String, Measurement> getMeasurements() { return new LoadingCacheMap<String, Measurement>(String.class, measurements); } /** Timer for measuring the duration of an operation. */ public class Timer { private final long start = System.nanoTime(); /** * Stops timing and names the given {@code operation}. * * @return Duration in seconds. * @see Stats#startTimer */ public double stop(String operation) { return stop(operation, 1L); } /** * Stops timing given {@code count} number of events and names the * given {@code operation}. * * @return Duration in seconds. * @see Stats#startTimer */ public double stop(String operation, long count) { long end = System.nanoTime(); double duration = (end - start) / 1e9; if (duration < 0.0) { return 0.0; } else { getTotalMeasurement().update(end, duration, count); getMeasurements().get(operation).update(end, duration, count); return duration; } } } /** * Specific measurement of an operation within {@link Stats}. * Most methods may return a {@link Double#NaN} when the measurement * isn't available. */ public class Measurement { private final AtomicLong totalCount = new AtomicLong(); private final AtomicDouble totalDuration = new AtomicDouble(); private final List<ExponentialMovingAverage> countAverages = new ArrayList<ExponentialMovingAverage>(); private final List<ExponentialMovingAverage> durationAverages = new ArrayList<ExponentialMovingAverage>(); { double keepDuration = getKeepDuration(); double measureInterval = getMeasureInterval(); for (double averageInterval : getAverageIntervals()) { countAverages.add(new ExponentialMovingAverage(keepDuration, measureInterval, averageInterval)); durationAverages.add(new ExponentialMovingAverage(keepDuration, measureInterval, averageInterval)); } } /** Returns the overall total count. */ public long getOverallTotalCount() { return totalCount.get(); } /** Returns the overall count average. */ public double getOverallCountAverage() { return getOverallTotalCount() / getUptime(); } /** Returns the overall duration average. */ public double getOverallDurationAverage() { return totalDuration.get() / getOverallTotalCount(); } /** * Returns the current moving count average with the interval * identified by the given {@code intervalIndex}. */ public double getCurrentCountAverage(int intervalIndex) { return countAverages.get(intervalIndex).getCurrentAverage(); } /** * Returns the current moving duration average with the interval * identified by the given {@code intervalIndex}. */ public double getCurrentDurationAverage(int intervalIndex) { return durationAverages.get(intervalIndex).getCurrentAverage() / getCurrentCountAverage(intervalIndex); } /** * Returns all count averages. * * @return Never {@code null}. */ public Iterable<Double> getCountAverages(int intervalIndex, long begin, long end) { return countAverages.get(intervalIndex).getAverages().subIterable(begin, end); } /** * Returns all duration averages. * * @return Never {@code null}. */ public Iterable<Double> getDurationAverages(int intervalIndex, long begin, long end) { return new DurationIterable( durationAverages.get(intervalIndex).getAverages().subIterable(begin, end), countAverages.get(intervalIndex).getAverages().subIterable(begin, end)); } /** * Updates all count and duration averages based on the given * {@code end} and {@code duration}. */ protected void update(long end, double duration) { update(end, duration, 1L); } /** * Updates all count and duration averages based on the given * {@code end}, {@code duration} and {@code count}. */ protected void update(long end, double duration, long count) { totalCount.addAndGet(count); totalDuration.addAndGet(duration); for (ExponentialMovingAverage countAverage : countAverages) { countAverage.updateAt(end, count); } for (ExponentialMovingAverage durationAverage : durationAverages) { durationAverage.updateAt(end, duration); } } } private static class DurationIterable implements Iterable<Double> { private final Iterable<Double> durations; private final Iterable<Double> counts; public DurationIterable(Iterable<Double> durations, Iterable<Double> counts) { this.durations = durations; this.counts = counts; } @Override public Iterator<Double> iterator() { return new DurationIterator(durations.iterator(), counts.iterator()); } } private static class DurationIterator implements Iterator<Double> { private final Iterator<Double> durations; private final Iterator<Double> counts; public DurationIterator(Iterator<Double> durations, Iterator<Double> counts) { this.durations = durations; this.counts = counts; } @Override public boolean hasNext() { return durations.hasNext(); } @Override public Double next() { if (hasNext()) { return durations.next() / (counts.hasNext() ? counts.next() : 0.0); } else { throw new NoSuchElementException(); } } @Override public void remove() { throw new UnsupportedOperationException(); } } /** {@link Stats} utility methods. */ public static final class Static { /** * Returns all active stats instances. * * @return Never {@code null}. Mutable. */ public static List<Stats> getAll() { List<Stats> statsInstances = new ArrayList<Stats>(); for (Iterator<WeakReference<Stats>> i = STATS_REFERENCES.iterator(); i.hasNext();) { WeakReference<Stats> ref = i.next(); Stats stats = ref.get(); if (stats != null) { statsInstances.add(stats); } else { i.remove(); } } return statsInstances; } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.evaluate.quick; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.execution.console.LanguageConsoleView; import com.intellij.execution.impl.ConsoleViewImpl; import com.intellij.execution.ui.ConsoleView; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.ShortcutSet; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vcs.changes.issueLinks.LinkMouseListenerBase; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.SimpleColoredText; import com.intellij.ui.SimpleTextAttributes; import com.intellij.util.Consumer; import com.intellij.util.concurrency.EdtExecutorService; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.XDebuggerUtil; import com.intellij.xdebugger.XSourcePosition; import com.intellij.xdebugger.evaluation.ExpressionInfo; import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider; import com.intellij.xdebugger.evaluation.XDebuggerEvaluator; import com.intellij.xdebugger.frame.XDebuggerTreeNodeHyperlink; import com.intellij.xdebugger.frame.XFullValueEvaluator; import com.intellij.xdebugger.frame.XValue; import com.intellij.xdebugger.frame.XValuePlace; import com.intellij.xdebugger.frame.presentation.XValuePresentation; import com.intellij.xdebugger.impl.XDebugSessionImpl; import com.intellij.xdebugger.impl.actions.handlers.XDebuggerEvaluateActionHandler; import com.intellij.xdebugger.impl.evaluate.quick.common.AbstractValueHint; import com.intellij.xdebugger.impl.evaluate.quick.common.ValueHintType; import com.intellij.xdebugger.impl.frame.XValueMarkers; import com.intellij.xdebugger.impl.ui.DebuggerUIUtil; import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants; import com.intellij.xdebugger.impl.ui.tree.nodes.XEvaluationCallbackBase; import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl; import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodePresentationConfigurator; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * @author nik */ public class XValueHint extends AbstractValueHint { private static final Logger LOG = Logger.getInstance(XValueHint.class); private final XDebuggerEditorsProvider myEditorsProvider; private final XDebuggerEvaluator myEvaluator; private final XDebugSession myDebugSession; private final boolean myFromKeyboard; private final String myExpression; private final String myValueName; private final XSourcePosition myExpressionPosition; private Disposable myDisposable; private static final Key<XValueHint> HINT_KEY = Key.create("allows only one value hint per editor"); public XValueHint(@NotNull Project project, @NotNull Editor editor, @NotNull Point point, @NotNull ValueHintType type, @NotNull ExpressionInfo expressionInfo, @NotNull XDebuggerEvaluator evaluator, @NotNull XDebugSession session, boolean fromKeyboard) { this(project, session.getDebugProcess().getEditorsProvider(), editor, point, type, expressionInfo, evaluator, session, fromKeyboard); } protected XValueHint(@NotNull Project project, @NotNull XDebuggerEditorsProvider editorsProvider, @NotNull Editor editor, @NotNull Point point, @NotNull ValueHintType type, @NotNull ExpressionInfo expressionInfo, @NotNull XDebuggerEvaluator evaluator, boolean fromKeyboard) { this(project, editorsProvider, editor, point, type, expressionInfo, evaluator, null, fromKeyboard); } private XValueHint(@NotNull Project project, @NotNull XDebuggerEditorsProvider editorsProvider, @NotNull Editor editor, @NotNull Point point, @NotNull ValueHintType type, @NotNull ExpressionInfo expressionInfo, @NotNull XDebuggerEvaluator evaluator, @Nullable XDebugSession session, boolean fromKeyboard) { super(project, editor, point, type, expressionInfo.getTextRange()); myEditorsProvider = editorsProvider; myEvaluator = evaluator; myDebugSession = session; myFromKeyboard = fromKeyboard; myExpression = XDebuggerEvaluateActionHandler.getExpressionText(expressionInfo, editor.getDocument()); myValueName = XDebuggerEvaluateActionHandler.getDisplayText(expressionInfo, editor.getDocument()); VirtualFile file; ConsoleView consoleView = ConsoleViewImpl.CONSOLE_VIEW_IN_EDITOR_VIEW.get(editor); if (consoleView instanceof LanguageConsoleView) { LanguageConsoleView console = ((LanguageConsoleView)consoleView); file = console.getHistoryViewer() == editor ? console.getVirtualFile() : null; } else { file = FileDocumentManager.getInstance().getFile(editor.getDocument()); } myExpressionPosition = file != null ? XDebuggerUtil.getInstance().createPositionByOffset(file, expressionInfo.getTextRange().getStartOffset()) : null; } @Override protected boolean canShowHint() { return true; } @Override protected boolean showHint(final JComponent component) { boolean result = super.showHint(component); if (result) { XValueHint prev = getEditor().getUserData(HINT_KEY); if (prev != null) { prev.hideHint(); } getEditor().putUserData(HINT_KEY, this); } return result; } @Override protected void onHintHidden() { super.onHintHidden(); XValueHint prev = getEditor().getUserData(HINT_KEY); if (prev == this) { getEditor().putUserData(HINT_KEY, null); } disposeVisibleHint(); } @Override public void hideHint() { super.hideHint(); disposeVisibleHint(); } @Override protected void evaluateAndShowHint() { AtomicBoolean showEvaluating = new AtomicBoolean(true); EdtExecutorService.getScheduledExecutorInstance().schedule(() -> { if (myCurrentHint == null && showEvaluating.get()) { SimpleColoredComponent component = HintUtil.createInformationComponent(); component.append(XDebuggerUIConstants.EVALUATING_EXPRESSION_MESSAGE); showHint(component); } }, 200, TimeUnit.MILLISECONDS); myEvaluator.evaluate(myExpression, new XEvaluationCallbackBase() { @Override public void evaluated(@NotNull final XValue result) { result.computePresentation(new XValueNodePresentationConfigurator.ConfigurableXValueNodeImpl() { private XFullValueEvaluator myFullValueEvaluator; private boolean myShown = false; @Override public void applyPresentation(@Nullable Icon icon, @NotNull XValuePresentation valuePresenter, boolean hasChildren) { showEvaluating.set(false); if (isHintHidden()) { return; } SimpleColoredText text = new SimpleColoredText(); XValueNodeImpl.buildText(valuePresenter, text, false); if (!hasChildren) { JComponent component = createHintComponent(text, valuePresenter, myFullValueEvaluator); showHint(component); } else if (getType() == ValueHintType.MOUSE_CLICK_HINT) { if (!myShown) { showTree(result); } } else { if (getType() == ValueHintType.MOUSE_OVER_HINT) { if (myFromKeyboard) { text.insert(0, "(" + KeymapUtil.getFirstKeyboardShortcutText("ShowErrorDescription") + ") ", SimpleTextAttributes.GRAYED_ATTRIBUTES); } // first remove a shortcut created for any previous presentation (like "Collecting data...") disposeVisibleHint(); myDisposable = Disposer.newDisposable(); ShortcutSet shortcut = ActionManager.getInstance().getAction("ShowErrorDescription").getShortcutSet(); DumbAwareAction.create(e -> showTree(result)) .registerCustomShortcutSet(shortcut, getEditor().getContentComponent(), myDisposable); } showHint(createExpandableHintComponent(text, () -> showTree(result))); } myShown = true; } @Override public void setFullValueEvaluator(@NotNull XFullValueEvaluator fullValueEvaluator) { myFullValueEvaluator = fullValueEvaluator; } @Override public boolean isObsolete() { return isHintHidden(); } }, XValuePlace.TOOLTIP); } @Override public void errorOccurred(@NotNull final String errorMessage) { showEvaluating.set(false); ApplicationManager.getApplication().invokeLater(() -> { if (getType() == ValueHintType.MOUSE_CLICK_HINT) { showHint(HintUtil.createErrorLabel(errorMessage)); } else if (myCurrentHint != null) { myCurrentHint.hide(); } }); LOG.debug("Cannot evaluate '" + myExpression + "':" + errorMessage); } }, myExpressionPosition); } @NotNull protected JComponent createHintComponent(@NotNull SimpleColoredText text, @NotNull XValuePresentation presentation, @Nullable XFullValueEvaluator evaluator) { SimpleColoredComponent component = HintUtil.createInformationComponent(); text.appendToComponent(component); if (evaluator != null) { component.append( evaluator.getLinkText(), XDebuggerTreeNodeHyperlink.TEXT_ATTRIBUTES, (Consumer<MouseEvent>)event -> DebuggerUIUtil.showValuePopup(evaluator, event, getProject(), getEditor()) ); LinkMouseListenerBase.installSingleTagOn(component); } return component; } private void disposeVisibleHint() { if (myDisposable != null) { Disposer.dispose(myDisposable); myDisposable = null; } } private void showTree(@NotNull XValue value) { if (myCurrentHint != null) { myCurrentHint.hide(); } XValueMarkers<?,?> valueMarkers = myDebugSession == null ? null : ((XDebugSessionImpl)myDebugSession).getValueMarkers(); XSourcePosition position = myDebugSession == null ? null : myDebugSession.getCurrentPosition(); XDebuggerTreeCreator creator = new XDebuggerTreeCreator(getProject(), myEditorsProvider, position, valueMarkers); showTreePopup(creator, Pair.create(value, myValueName)); } @Override public String toString() { return myExpression; } }
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.internal.gosu.parser.expressions; import gw.internal.gosu.parser.*; import gw.lang.parser.MemberAccessKind; import gw.lang.parser.IExpressionRuntime; import gw.lang.parser.expressions.IBeanMethodCallExpression; import gw.lang.reflect.*; import gw.lang.reflect.gs.IGosuClass; import gw.lang.reflect.gs.IGosuMethodInfo; import gw.lang.reflect.java.JavaTypes; /** * An expression representing a bean method call: * <pre> * <i>bean-method-call-expression</i> * &lt;member-access&gt; <b>(</b> [&lt;argument-list&gt;] <b>)</b> * <p/> * <i>member-access</i> * &lt;root-expression&gt;.&lt;member&gt; * &lt;root-expression&gt;*.&lt;member&gt; * &lt;root-expression&gt;[member-name] * <p/> * <i>root-expression</i> * &lt;bean-reference&gt; * &lt;type-literal&gt; * <p/> * <i>member</i> * &lt;member-access&gt; * &lt;identifier&gt; * <p/> * <i>bean-reference</i> * &lt;primary-expression&gt; * <p/> * <i>member-name</i> * &lt;expression&gt; * </pre> * * @see gw.lang.parser.IGosuParser */ public final class BeanMethodCallExpression extends Expression implements IBeanMethodCallExpression, IHasOperatorLineNumber { private Expression _rootExpression; private IType[] _argTypes; private String _accessPath; private Expression[] _args; private IMethodInfo _md; private IFunctionType _funcType; private MemberAccessKind _kind; private int[] _namedArgOrder; private int _iArgPos; /** * Start offset of array list (without leading '.') */ protected int _startOffset; private static final IType[] EMPTY_ARG_TYPES = new IType[0]; private IExpressionRuntime _expressionRuntime; private int _opLineNum; public IFunctionType getFunctionType() { return _funcType; } public void setFunctionType(IFunctionType funcType) { _funcType = funcType; } public Expression getRootExpression() { return _rootExpression; } public void setRootExpression(Expression rootExpression) { _rootExpression = rootExpression; } /** * @return An array of IIntrinsicITyperguments of the method call. */ public IType[] getArgTypes() { return _argTypes; } /** * @param argTypes An array of IIntrinsicType for the arguments of the method call. */ public void setArgTypes(IType[] argTypes) { _argTypes = argTypes.length == 0 ? EMPTY_ARG_TYPES : argTypes; } /** * @return A list of Strings representing the member access path. Note the * member access path for the expression Root.foo.bar() is {foo, bar}. */ public String getMemberName() { return _accessPath; } /** * @param accessPath A list of Strings representing the member access path. */ public void setAccessPath(String accessPath) { assert accessPath != null; _accessPath = StringCache.get(accessPath); } public String getAccessPath() { return _accessPath; } public int getStartOffset() { return _startOffset; } public void setExpressionRuntime(IExpressionRuntime expressionRuntime) { _expressionRuntime = expressionRuntime; } @Override public IPropertyInfo getPropertyInfo() { return null; //To change body of implemented methods use File | Settings | File Templates. } public IExpressionRuntime getExpressionRuntime() { return _expressionRuntime; } public void setStartOffset(int startOffset) { _startOffset = startOffset; } /** * @return An array of expressions for corresponding to the arguments in the * expression. */ public Expression[] getArgs() { return _args; } /** * @param args An array of expressions for corresponding to the arguments in * the expression. */ public void setArgs(Expression[] args) { _args = args == null || args.length == 0 ? null : args; } public int[] getNamedArgOrder() { return _namedArgOrder; } public void setNamedArgOrder( int[] namedArgOrder ) { _namedArgOrder = namedArgOrder; } public void setMethodDescriptor(IMethodInfo md) { _md = md; if (md != null) { IType type = JavaTypes.IGOSU_OBJECT(); if (_md.getOwnersType() == IGosuClassInternal.Util.getGosuClassFrom(type)) { _md = type.getTypeInfo().getMethod(_md.getDisplayName(), ((FunctionType) ((IGosuMethodInfo) _md).getDfs().getType()).getParameterTypes()); } } } public IMethodInfo getMethodDescriptor() { return _md; } public IMethodInfo getGenericMethodDescriptor() { if (_md instanceof GosuMethodInfo) { ReducedDynamicFunctionSymbol dfs = ((GosuMethodInfo) _md).getDfs(); if (dfs instanceof ReducedParameterizedDynamicFunctionSymbol) { return (IMethodInfo) ((ReducedParameterizedDynamicFunctionSymbol) dfs).getBackingDfs().getMethodOrConstructorInfo(); } } return _md; } /** */ public IType getRootType() { IType rootType = getRootExpression().getType(); rootType = IGosuClass.ProxyUtil.isProxy(rootType) && rootType instanceof IGosuClass ? ((IGosuClass) rootType).getJavaType() : rootType; return rootType; } public MemberAccessKind getMemberAccessKind() { return _kind; } public void setMemberAccessKind( MemberAccessKind kind ) { _kind = kind; } @Override public boolean isNullSafe() { return getMemberAccessKind() == MemberAccessKind.NULL_SAFE || isExpansion(); } public boolean isExpansion() { return _kind == MemberAccessKind.EXPANSION; } /** * Evaluates the bean method call. * * @return The value of the expression. */ public Object evaluate() { if (!isCompileTimeConstant() ) { return super.evaluate(); } throw new CannotExecuteGosuException(); } @Override public String toString() { String strOut = getRootExpression().toString(); if (_accessPath != null) { strOut += "." + _accessPath; } strOut += "("; if (_args != null && _args.length > 0) { strOut += " "; for (int i = 0; i < _args.length; i++) { if (i != 0) { strOut += ", "; } strOut += _args[i].toString(); } strOut += " "; } return strOut += ")"; } public int getArgPosition() { return _iArgPos; } public void setArgPosition(int iArgPos) { _iArgPos = iArgPos; } @Override public int getOperatorLineNumber() { return _opLineNum; } @Override public void setOperatorLineNumber( int operatorLineNumber ) { _opLineNum = operatorLineNumber; } }
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.datamodeling; import static org.junit.Assert.assertEquals; import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Set; import java.util.TreeSet; import java.util.UUID; import org.junit.Assert; import org.junit.Test; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import com.amazonaws.services.dynamodbv2.pojos.SubClass; import com.amazonaws.services.dynamodbv2.pojos.TestClass; import com.amazonaws.services.dynamodbv2.pojos.UnannotatedSubClass; public class StandardModelFactoriesV2CompatibleTest { protected static final DynamoDBMapperConfig CONFIG = new DynamoDBMapperConfig.Builder() .withTypeConverterFactory(DynamoDBMapperConfig.DEFAULT.getTypeConverterFactory()) .withConversionSchema(ConversionSchemas.V2_COMPATIBLE) .build(); private static final DynamoDBMapperModelFactory factory = StandardModelFactories.of(S3Link.Factory.of(null)); private static final DynamoDBMapperModelFactory.TableFactory models = factory.getTableFactory(CONFIG); protected <T> AttributeValue convert(Class<T> clazz, Method getter, Object value) { final StandardAnnotationMaps.FieldMap<Object> map = StandardAnnotationMaps.of(getter, null); return models.getTable(clazz).field(map.attributeName()).convert(value); } @Test public void testBoolean() { assertEquals("1", convert("getBoolean", true).getN()); assertEquals("0", convert("getBoolean", false).getN()); assertEquals("1", convert("getBoxedBoolean", true).getN()); assertEquals("0", convert("getBoxedBoolean", false).getN()); assertEquals(true, convert("getNativeBoolean", true).getBOOL()); assertEquals(false, convert("getNativeBoolean", false).getBOOL()); } @Test public void testString() { assertEquals("abc", convert("getString", "abc").getS()); assertEquals(RandomUUIDMarshaller.randomUUID, convert("getCustomString", "abc").getS()); } @Test public void testUuid() { UUID uuid = UUID.randomUUID(); assertEquals(uuid.toString(), convert("getUuid", uuid).getS()); } @Test public void testDate() { assertEquals("1970-01-01T00:00:00.000Z", convert("getDate", new Date(0)).getS()); Calendar c = GregorianCalendar.getInstance(); c.setTimeInMillis(0); assertEquals("1970-01-01T00:00:00.000Z", convert("getCalendar", c).getS()); } @Test public void testNumbers() { assertEquals("0", convert("getByte", (byte) 0).getN()); assertEquals("1", convert("getByte", (byte) 1).getN()); assertEquals("0", convert("getBoxedByte", (byte) 0).getN()); assertEquals("1", convert("getBoxedByte", (byte) 1).getN()); assertEquals("0", convert("getShort", (short) 0).getN()); assertEquals("1", convert("getShort", (short) 1).getN()); assertEquals("0", convert("getBoxedShort", (short) 0).getN()); assertEquals("1", convert("getBoxedShort", (short) 1).getN()); assertEquals("0", convert("getInt", 0).getN()); assertEquals("1", convert("getInt", 1).getN()); assertEquals("0", convert("getBoxedInt", 0).getN()); assertEquals("1", convert("getBoxedInt", 1).getN()); assertEquals("0", convert("getLong", 0l).getN()); assertEquals("1", convert("getLong", 1l).getN()); assertEquals("0", convert("getBoxedLong", 0l).getN()); assertEquals("1", convert("getBoxedLong", 1l).getN()); assertEquals("0", convert("getBigInt", BigInteger.ZERO).getN()); assertEquals("1", convert("getBigInt", BigInteger.ONE).getN()); assertEquals("0.0", convert("getFloat", 0f).getN()); assertEquals("1.0", convert("getFloat", 1f).getN()); assertEquals("0.0", convert("getBoxedFloat", 0f).getN()); assertEquals("1.0", convert("getBoxedFloat", 1f).getN()); assertEquals("0.0", convert("getDouble", 0d).getN()); assertEquals("1.0", convert("getDouble", 1d).getN()); assertEquals("0.0", convert("getBoxedDouble", 0d).getN()); assertEquals("1.0", convert("getBoxedDouble", 1d).getN()); assertEquals("0", convert("getBigDecimal", BigDecimal.ZERO).getN()); assertEquals("1", convert("getBigDecimal", BigDecimal.ONE).getN()); } @Test public void testBinary() { ByteBuffer value = ByteBuffer.wrap("value".getBytes()); assertEquals(value.slice(), convert("getByteArray", "value".getBytes()).getB()); assertEquals(value.slice(), convert("getByteBuffer", value.slice()).getB()); } @Test public void testBooleanSet() { assertEquals(Collections.singletonList("1"), convert("getBooleanSet", Collections.singleton(true)).getNS()); assertEquals(Collections.singletonList("0"), convert("getBooleanSet", Collections.singleton(false)).getNS()); assertEquals(Arrays.asList("0", "1"), convert("getBooleanSet", new TreeSet<Boolean>() {{ add(true); add(false); }}).getNS()); } @Test public void testStringSet() { assertEquals(Collections.singletonList("a"), convert("getStringSet", Collections.singleton("a")).getSS()); assertEquals(Collections.singletonList("b"), convert("getStringSet", Collections.singleton("b")).getSS()); assertEquals(Arrays.asList("a", "b", "c"), convert("getStringSet", new TreeSet<String>() {{ add("a"); add("b"); add("c"); }}).getSS()); } @Test public void testUuidSet() { final UUID one = UUID.randomUUID(); final UUID two = UUID.randomUUID(); final UUID three = UUID.randomUUID(); assertEquals(Collections.singletonList(one.toString()), convert("getUuidSet", Collections.singleton(one)).getSS()); assertEquals(Collections.singletonList(two.toString()), convert("getUuidSet", Collections.singleton(two)).getSS()); assertEquals( Arrays.asList( one.toString(), two.toString(), three.toString()), convert("getUuidSet", new LinkedHashSet<UUID>() {{ add(one); add(two); add(three); }}).getSS()); } @Test public void testDateSet() { assertEquals(Collections.singletonList("1970-01-01T00:00:00.000Z"), convert("getDateSet", Collections.singleton(new Date(0))) .getSS()); Calendar c = GregorianCalendar.getInstance(); c.setTimeInMillis(0); assertEquals(Collections.singletonList("1970-01-01T00:00:00.000Z"), convert("getCalendarSet", Collections.singleton(c)) .getSS()); } @Test public void testNumberSet() { assertEquals(Collections.singletonList("0"), convert("getByteSet", Collections.singleton((byte) 0)).getNS()); assertEquals(Collections.singletonList("0"), convert("getShortSet", Collections.singleton((short) 0)).getNS()); assertEquals(Collections.singletonList("0"), convert("getIntSet", Collections.singleton(0)).getNS()); assertEquals(Collections.singletonList("0"), convert("getLongSet", Collections.singleton(0l)).getNS()); assertEquals(Collections.singletonList("0"), convert("getBigIntegerSet", Collections.singleton(BigInteger.ZERO)) .getNS()); assertEquals(Collections.singletonList("0.0"), convert("getFloatSet", Collections.singleton(0f)).getNS()); assertEquals(Collections.singletonList("0.0"), convert("getDoubleSet", Collections.singleton(0d)).getNS()); assertEquals(Collections.singletonList("0"), convert("getBigDecimalSet", Collections.singleton(BigDecimal.ZERO)) .getNS()); assertEquals(Arrays.asList("0", "1", "2"), convert("getLongSet", new TreeSet<Number>() {{ add(0); add(1); add(2); }}).getNS()); } @Test public void testBinarySet() { final ByteBuffer test = ByteBuffer.wrap("test".getBytes()); final ByteBuffer test2 = ByteBuffer.wrap("test2".getBytes()); assertEquals(Collections.singletonList(test.slice()), convert("getByteArraySet", Collections.singleton("test".getBytes())) .getBS()); assertEquals(Collections.singletonList(test.slice()), convert("getByteBufferSet", Collections.singleton(test.slice())) .getBS()); assertEquals(Arrays.asList(test.slice(), test2.slice()), convert("getByteBufferSet",new TreeSet<ByteBuffer>() {{ add(test.slice()); add(test2.slice()); }}).getBS()); } @Test public void testObjectSet() { Object o = new Object() { @Override public String toString() { return "hello"; } }; assertEquals(Collections.singletonList("hello"), convert("getObjectSet", Collections.singleton(o)).getSS()); } @Test public void testList() { assertEquals(Arrays.asList( new AttributeValue("a"), new AttributeValue("b"), new AttributeValue("c")), convert("getList", Arrays.asList("a", "b", "c")).getL()); assertEquals(Arrays.asList(new AttributeValue().withNULL(true)), convert("getList", Collections.<String>singletonList(null)).getL()); } @Test public void testSetList() { assertEquals(Arrays.asList( new AttributeValue().withSS("a"), new AttributeValue().withSS("b"), new AttributeValue().withSS("c")), convert("getSetList", Arrays.asList( Collections.singleton("a"), Collections.singleton("b"), Collections.singleton("c"))).getL()); } @Test public void testMap() { assertEquals(new HashMap<String, AttributeValue>() {{ put("a", new AttributeValue("b")); put("c", new AttributeValue("d")); put("e", new AttributeValue("f")); }}, convert("getMap", new HashMap<String, String>() {{ put("a", "b"); put("c", "d"); put("e", "f"); }}).getM()); assertEquals(Collections.singletonMap("a", new AttributeValue().withNULL(true)), convert("getMap", Collections.<String, String>singletonMap("a", null)).getM()); } @Test public void testSetMap() { assertEquals(new HashMap<String, AttributeValue>() {{ put("a", new AttributeValue().withSS("a", "b")); }}, convert("getSetMap", new HashMap<String, Set<String>>() {{ put("a", new TreeSet<String>(Arrays.asList("a", "b"))); }}).getM()); assertEquals(new HashMap<String, AttributeValue>() {{ put("a", new AttributeValue().withSS("a")); put("b", new AttributeValue().withNULL(true)); }}, convert("getSetMap", new HashMap<String, Set<String>>() {{ put("a", new TreeSet<String>(Arrays.asList("a"))); put("b", null); }}).getM()); } @Test public void testObject() { assertEquals(new HashMap<String, AttributeValue>() {{ put("name", new AttributeValue("name")); put("value", new AttributeValue().withN("123")); }}, convert("getObject", new SubClass()).getM()); } @Test public void testUnannotatedObject() throws Exception { try { convert(UnannotatedSubClass.class, UnannotatedSubClass.class.getMethod("getChild"), new UnannotatedSubClass()); Assert.fail("Expected DynamoDBMappingException"); } catch (DynamoDBMappingException e) { } } @Test public void testS3Link() { S3ClientCache cache = new S3ClientCache((AWSCredentialsProvider) null); S3Link link = new S3Link(cache, "bucket", "key"); assertEquals("{\"s3\":{" + "\"bucket\":\"bucket\"," + "\"key\":\"key\"," + "\"region\":null}}", convert("getS3Link", link).getS()); } private AttributeValue convert(String getter, Object value) { try { return convert(TestClass.class, TestClass.class.getMethod(getter), value); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } }
/* * Copyright 2009-2011 Collaborative Research Centre SFB 632 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.sqlgen; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Set; import org.apache.commons.lang3.StringUtils; import annis.model.QueryNode; import annis.ql.parser.QueryData; import org.springframework.util.Assert; /** * Abstract base class for a complete SQL statement. * * A SQL statement consists of a mandatory SELECT and FROM clauses and optional * WHERE, GROUP BY, ORDER BY and LIMIT/OFFSET clauses. The individual clauses * are generated using helper classes which are specified by properties. * * @author Viktor Rosenfeld <rosenfel@informatik.hu-berlin.de> * * @param <T> Type into which the JDBC result set is transformed. */ public abstract class AbstractSqlGenerator<T> extends TableAccessStrategyFactory implements SqlGenerator<QueryData, T> { // generators for different SQL statement clauses private WithClauseSqlGenerator<QueryData> withClauseSqlGenerator; private SelectClauseSqlGenerator<QueryData> selectClauseSqlGenerator; private List<FromClauseSqlGenerator<QueryData>> fromClauseSqlGenerators; private List<WhereClauseSqlGenerator<QueryData>> whereClauseSqlGenerators; private GroupByClauseSqlGenerator<QueryData> groupByClauseSqlGenerator; private OrderByClauseSqlGenerator<QueryData> orderByClauseSqlGenerator; private LimitOffsetClauseSqlGenerator<QueryData> limitOffsetClauseSqlGenerator; // controls indentation public final static String TABSTOP = " "; @Override public String toSql(QueryData queryData) { return toSql(queryData, ""); } @Override public String toSql(QueryData queryData, String indent) { Assert.notEmpty(queryData.getAlternatives(), "BUG: no alternatives"); // push alternative down List<QueryNode> alternative = queryData.getAlternatives().get(0); StringBuffer sb = new StringBuffer(); sb.append(indent); sb.append(createSqlForAlternative(queryData, alternative, indent)); appendOrderByClause(sb, queryData, alternative, indent); appendLimitOffsetClause(sb, queryData, alternative, indent); return sb.toString(); } protected String createSqlForAlternative(QueryData queryData, List<QueryNode> alternative, String indent) { StringBuffer sb = new StringBuffer(); appendWithClause(sb, queryData, alternative, indent); appendSelectClause(sb, queryData, alternative, indent); appendFromClause(sb, queryData, alternative, indent); appendWhereClause(sb, queryData, alternative, indent); appendGroupByClause(sb, queryData, alternative, indent); return sb.toString(); } protected String computeIndent(int indentBy) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < indentBy; ++i) { sb.append(TABSTOP); } return sb.toString(); } private void appendWithClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { if (withClauseSqlGenerator != null) { List<String> clauses = withClauseSqlGenerator.withClauses(queryData, alternative, indent + TABSTOP); if (!clauses.isEmpty()) { sb.append(indent).append("WITH\n"); sb.append(StringUtils.join(clauses, ",\n")); sb.append("\n"); } } } private void appendSelectClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { sb.append("SELECT "); sb.append(selectClauseSqlGenerator.selectClause(queryData, alternative, indent)); sb.append("\n"); } private void appendFromClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { sb.append(indent); sb.append("FROM"); List<String> fromTables = new ArrayList<String>(); for (FromClauseSqlGenerator<QueryData> generator : fromClauseSqlGenerators) { fromTables.add(generator.fromClause(queryData, alternative, indent)); } sb.append("\n"); sb.append(indent).append(TABSTOP); sb.append(StringUtils.join(fromTables, ",\n" + indent + TABSTOP)); sb.append("\n"); } private void appendWhereClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { // check if the WHERE clause generators are really used if (whereClauseSqlGenerators == null || whereClauseSqlGenerators.isEmpty()) { return; } // treat each condition as mutable string to remove last AND List<StringBuffer> conditions = new ArrayList<StringBuffer>(); for (WhereClauseSqlGenerator<QueryData> generator : whereClauseSqlGenerators) { Set<String> whereConditions = generator.whereConditions(queryData, alternative, indent); for (String constraint : whereConditions) { conditions.add(new StringBuffer(constraint)); } } // sort conditions, group by accessed table alias Collections.sort(conditions, new Comparator<StringBuffer>() { @Override public int compare(StringBuffer o1, StringBuffer o2) { if (o1 == null && o2 == null) { return 0; } else if (o1 == null && o2 != null) { return -1; } else if (o1 != null && o2 == null) { return 1; } else if(o1 != null && o2 != null) { return o1.toString().compareTo(o2.toString()); } throw new IllegalArgumentException("Could not compare " + o1 + " with " + o2); } }); // no conditions in WHERE clause? break out if (conditions.isEmpty()) { return; } // append WHERE clause to query sb.append(indent); sb.append("WHERE"); sb.append("\n"); sb.append(indent).append(TABSTOP); sb.append(StringUtils.join(conditions, " AND\n" + indent + TABSTOP)); sb.append("\n"); } private void appendGroupByClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { if (groupByClauseSqlGenerator != null) { sb.append(indent); sb.append("GROUP BY "); sb.append(groupByClauseSqlGenerator.groupByAttributes(queryData, alternative)); sb.append("\n"); } } protected void appendOrderByClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { if (orderByClauseSqlGenerator != null) { sb.append(indent); sb.append("ORDER BY "); sb.append(orderByClauseSqlGenerator.orderByClause(queryData, alternative, indent)); sb.append("\n"); } } protected void appendLimitOffsetClause(StringBuffer sb, QueryData queryData, List<QueryNode> alternative, String indent) { if (limitOffsetClauseSqlGenerator != null) { sb.append(indent); sb.append(limitOffsetClauseSqlGenerator.limitOffsetClause(queryData, alternative, indent)); sb.append("\n"); } } ///// Getter / Setter public List<FromClauseSqlGenerator<QueryData>> getFromClauseSqlGenerators() { return fromClauseSqlGenerators; } public void setFromClauseSqlGenerators( List<FromClauseSqlGenerator<QueryData>> fromClauseSqlGenerators) { this.fromClauseSqlGenerators = fromClauseSqlGenerators; } public List<WhereClauseSqlGenerator<QueryData>> getWhereClauseSqlGenerators() { return whereClauseSqlGenerators; } public void setWhereClauseSqlGenerators( List<WhereClauseSqlGenerator<QueryData>> whereClauseSqlGenerators) { this.whereClauseSqlGenerators = whereClauseSqlGenerators; } public GroupByClauseSqlGenerator<QueryData> getGroupByClauseSqlGenerator() { return groupByClauseSqlGenerator; } public void setGroupByClauseSqlGenerator( GroupByClauseSqlGenerator<QueryData> groupByClauseSqlGenerator) { this.groupByClauseSqlGenerator = groupByClauseSqlGenerator; } public WithClauseSqlGenerator<QueryData> getWithClauseSqlGenerator() { return withClauseSqlGenerator; } public void setWithClauseSqlGenerator( WithClauseSqlGenerator<QueryData> withClauseSqlGenerator) { this.withClauseSqlGenerator = withClauseSqlGenerator; } public SelectClauseSqlGenerator<QueryData> getSelectClauseSqlGenerator() { return selectClauseSqlGenerator; } public void setSelectClauseSqlGenerator( SelectClauseSqlGenerator<QueryData> selectClauseSqlGenerator) { this.selectClauseSqlGenerator = selectClauseSqlGenerator; } public OrderByClauseSqlGenerator<QueryData> getOrderByClauseSqlGenerator() { return orderByClauseSqlGenerator; } public void setOrderByClauseSqlGenerator( OrderByClauseSqlGenerator<QueryData> orderByClauseSqlGenerator) { this.orderByClauseSqlGenerator = orderByClauseSqlGenerator; } public LimitOffsetClauseSqlGenerator<QueryData> getLimitOffsetClauseSqlGenerator() { return limitOffsetClauseSqlGenerator; } public void setLimitOffsetClauseSqlGenerator( LimitOffsetClauseSqlGenerator<QueryData> limitOffsetClauseSqlGenerator) { this.limitOffsetClauseSqlGenerator = limitOffsetClauseSqlGenerator; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.patch.cliutil; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Date; import org.apache.ranger.common.DateUtil; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.ranger.audit.provider.MiscUtil; import org.apache.ranger.biz.AssetMgr; import org.apache.ranger.biz.RangerBizUtil; import org.apache.ranger.biz.ServiceDBStore; import org.apache.ranger.biz.XUserMgr; import org.apache.ranger.common.AppConstants; import org.apache.ranger.common.MessageEnums; import org.apache.ranger.common.RESTErrorUtil; import org.apache.ranger.common.RangerConstants; import org.apache.ranger.common.SearchCriteria; import org.apache.ranger.patch.BaseLoader; import org.apache.ranger.plugin.model.RangerPolicy; import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItem; import org.apache.ranger.plugin.model.RangerService; import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.model.RangerServiceDef.RangerContextEnricherDef; import org.apache.ranger.plugin.store.PList; import org.apache.ranger.plugin.util.SearchFilter; import org.apache.ranger.util.CLIUtil; import org.apache.ranger.util.RestUtil; import org.apache.ranger.view.VXAccessAuditList; import org.apache.ranger.view.VXGroupList; import org.apache.ranger.view.VXMetricContextEnricher; import org.apache.ranger.view.VXMetricAuditDetailsCount; import org.apache.ranger.view.VXMetricServiceCount; import org.apache.ranger.view.VXMetricPolicyCount; import org.apache.ranger.view.VXMetricUserGroupCount; import org.apache.ranger.view.VXUserList; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @Component public class MetricUtil extends BaseLoader { private static final Logger logger = Logger.getLogger(MetricUtil.class); public static String metricType; @Autowired XUserMgr xUserMgr; @Autowired AssetMgr assetMgr; @Autowired ServiceDBStore svcStore; @Autowired RangerBizUtil xaBizUtil; @Autowired RESTErrorUtil restErrorUtil; public static void main(String[] args) { logger.getRootLogger().setLevel(Level.OFF); logger.info("MetricUtil : main()"); try { MetricUtil loader = (MetricUtil) CLIUtil.getBean(MetricUtil.class); loader.init(); if (args.length != 2) { System.out.println("type: Incorrect Arguments usage : -type policies | audits | usergroup | services | database | contextenrichers | denyconditions"); } else { if (!("-type".equalsIgnoreCase(args[0])) || !("policies".equalsIgnoreCase(args[1]) || "audits".equalsIgnoreCase(args[1]) || "usergroup".equalsIgnoreCase(args[1]) || "services".equalsIgnoreCase(args[1]) || "database".equalsIgnoreCase(args[1]) || "contextenrichers".equalsIgnoreCase(args[1]) || "denyconditions".equalsIgnoreCase(args[1]))) { System.out.println("type: Incorrect Arguments usage : -type policies | audits | usergroup | services | database | contextenrichers | denyconditions"); } else { metricType = args[1]; if (logger.isDebugEnabled()) { logger.debug("Metric Type : " + metricType); } } } while (loader.isMoreToProcess()) { loader.load(); } logger.info("Load complete. Exiting!!!"); System.exit(0); } catch (Exception e) { logger.error("Error loading", e); System.exit(1); } } @Override public void init() throws Exception { logger.info("==> MetricUtil.init()"); } @Override public void execLoad() { logger.info("==> MetricUtil.execLoad()"); metricCalculation(metricType); logger.info("<== MetricUtil.execLoad()"); } @Override public void printStats() { } private void metricCalculation(String caseValue) { logger.info("Metric Type : " + caseValue); try { SearchCriteria searchCriteria = new SearchCriteria(); searchCriteria.setStartIndex(0); searchCriteria.setMaxRows(100); searchCriteria.setGetCount(true); searchCriteria.setSortType("asc"); switch (caseValue.toLowerCase()) { case "usergroup": try { VXGroupList vxGroupList = xUserMgr.searchXGroups(searchCriteria); long groupCount = vxGroupList.getTotalCount(); ArrayList<String> userAdminRoleCount = new ArrayList<String>(); userAdminRoleCount.add(RangerConstants.ROLE_SYS_ADMIN); long userSysAdminCount = getUserCountBasedOnUserRole(userAdminRoleCount); ArrayList<String> userAdminAuditorRoleCount = new ArrayList<String>(); userAdminAuditorRoleCount.add(RangerConstants.ROLE_ADMIN_AUDITOR); long userSysAdminAuditorCount = getUserCountBasedOnUserRole(userAdminAuditorRoleCount); ArrayList<String> userRoleListKeyRoleAdmin = new ArrayList<String>(); userRoleListKeyRoleAdmin.add(RangerConstants.ROLE_KEY_ADMIN); long userKeyAdminCount = getUserCountBasedOnUserRole(userRoleListKeyRoleAdmin); ArrayList<String> userRoleListKeyadminAduitorRole = new ArrayList<String>(); userRoleListKeyadminAduitorRole.add(RangerConstants.ROLE_KEY_ADMIN_AUDITOR); long userKeyadminAuditorCount = getUserCountBasedOnUserRole(userRoleListKeyadminAduitorRole); ArrayList<String> userRoleListUser = new ArrayList<String>(); userRoleListUser.add(RangerConstants.ROLE_USER); long userRoleCount = getUserCountBasedOnUserRole(userRoleListUser); long userTotalCount = userSysAdminCount + userKeyAdminCount + userRoleCount + userKeyadminAuditorCount + userSysAdminAuditorCount; VXMetricUserGroupCount metricUserGroupCount = new VXMetricUserGroupCount(); metricUserGroupCount.setUserCountOfUserRole(userRoleCount); metricUserGroupCount.setUserCountOfKeyAdminRole(userKeyAdminCount); metricUserGroupCount.setUserCountOfSysAdminRole(userSysAdminCount); metricUserGroupCount.setUserCountOfKeyadminAuditorRole(userKeyadminAuditorCount); metricUserGroupCount.setUserCountOfSysAdminAuditorRole(userSysAdminAuditorCount); metricUserGroupCount.setUserTotalCount(userTotalCount); metricUserGroupCount.setGroupCount(groupCount); Gson gson = new GsonBuilder().create(); final String jsonUserGroupCount = gson.toJson(metricUserGroupCount); System.out.println(jsonUserGroupCount); } catch (Exception e) { logger.error("Error calculating Metric for usergroup : " + e.getMessage()); } break; case "audits": try{ int clientTimeOffsetInMinute = RestUtil.getClientTimeOffset(); String defaultDateFormat="MM/dd/yyyy"; DateFormat formatter = new SimpleDateFormat(defaultDateFormat); VXMetricAuditDetailsCount auditObj = new VXMetricAuditDetailsCount(); DateUtil dateUtilTwoDays = new DateUtil(); Date startDateUtilTwoDays = dateUtilTwoDays.getDateFromNow(-2); Date dStart2 = restErrorUtil.parseDate(formatter.format(startDateUtilTwoDays), "Invalid value for startDate", MessageEnums.INVALID_INPUT_DATA, null, "startDate", defaultDateFormat); Date endDateTwoDays = MiscUtil.getUTCDate(); Date dEnd2 = restErrorUtil.parseDate(formatter.format(endDateTwoDays), "Invalid value for endDate", MessageEnums.INVALID_INPUT_DATA, null, "endDate", defaultDateFormat); dEnd2 = dateUtilTwoDays.getDateFromGivenDate(dEnd2, 0, 23, 59, 59); dEnd2 = dateUtilTwoDays.addTimeOffset(dEnd2, clientTimeOffsetInMinute); VXMetricServiceCount deniedCountObj = getAuditsCount(0,dStart2,dEnd2); auditObj.setDenialEventsCountTwoDays(deniedCountObj); VXMetricServiceCount allowedCountObj = getAuditsCount(1,dStart2,dEnd2); auditObj.setAccessEventsCountTwoDays(allowedCountObj); long totalAuditsCountTwoDays = deniedCountObj.getTotalCount() + allowedCountObj.getTotalCount(); auditObj.setSolrIndexCountTwoDays(totalAuditsCountTwoDays); DateUtil dateUtilWeek = new DateUtil(); Date startDateUtilWeek = dateUtilWeek.getDateFromNow(-7); Date dStart7 = restErrorUtil.parseDate(formatter.format(startDateUtilWeek), "Invalid value for startDate", MessageEnums.INVALID_INPUT_DATA, null, "startDate", defaultDateFormat); Date endDateWeek = MiscUtil.getUTCDate(); DateUtil dateUtilweek = new DateUtil(); Date dEnd7 = restErrorUtil.parseDate(formatter.format(endDateWeek), "Invalid value for endDate", MessageEnums.INVALID_INPUT_DATA, null, "endDate", defaultDateFormat); dEnd7 = dateUtilweek.getDateFromGivenDate(dEnd7,0, 23, 59, 59 ); dEnd7 = dateUtilweek.addTimeOffset(dEnd7, clientTimeOffsetInMinute); VXMetricServiceCount deniedCountObjWeek = getAuditsCount(0,dStart7,dEnd7); auditObj.setDenialEventsCountWeek(deniedCountObjWeek); VXMetricServiceCount allowedCountObjWeek = getAuditsCount(1,dStart7,dEnd7); auditObj.setAccessEventsCountWeek(allowedCountObjWeek); long totalAuditsCountWeek = deniedCountObjWeek.getTotalCount() + allowedCountObjWeek.getTotalCount(); auditObj.setSolrIndexCountWeek(totalAuditsCountWeek); Gson gson = new GsonBuilder().create(); final String jsonAudit = gson.toJson(auditObj); System.out.println(jsonAudit); }catch (Exception e) { logger.error("Error calculating Metric for audits : "+e.getMessage()); } break; case "services" : try { SearchFilter serviceFilter = new SearchFilter(); serviceFilter.setMaxRows(200); serviceFilter.setStartIndex(0); serviceFilter.setGetCount(true); serviceFilter.setSortBy("serviceId"); serviceFilter.setSortType("asc"); VXMetricServiceCount vXMetricServiceCount = new VXMetricServiceCount(); PList<RangerService> paginatedSvcs = svcStore.getPaginatedServices(serviceFilter); long totalServiceCount = paginatedSvcs.getTotalCount(); List<RangerService> rangerServiceList = paginatedSvcs.getList(); Map<String, Long> services = new HashMap<String, Long>(); for (Object rangerService : rangerServiceList) { RangerService RangerServiceObj = (RangerService) rangerService; String serviceName = RangerServiceObj.getType(); if (!(services.containsKey(serviceName))) { serviceFilter.setParam("serviceType", serviceName); PList<RangerService> paginatedSvcscount = svcStore.getPaginatedServices(serviceFilter); services.put(serviceName, paginatedSvcscount.getTotalCount()); } } vXMetricServiceCount.setServiceBasedCountList(services); vXMetricServiceCount.setTotalCount(totalServiceCount); Gson gson = new GsonBuilder().create(); final String jsonServices = gson.toJson(vXMetricServiceCount); System.out.println(jsonServices); } catch (Exception e) { logger.error("Error calculating Metric for services : " + e.getMessage()); } break; case "policies" : try { SearchFilter policyFilter = new SearchFilter(); policyFilter.setMaxRows(200); policyFilter.setStartIndex(0); policyFilter.setGetCount(true); policyFilter.setSortBy("serviceId"); policyFilter.setSortType("asc"); VXMetricPolicyCount vXMetricPolicyCount = new VXMetricPolicyCount(); PList<RangerPolicy> paginatedSvcsList = svcStore.getPaginatedPolicies(policyFilter); vXMetricPolicyCount.setTotalCount(paginatedSvcsList.getTotalCount()); Map<String, VXMetricServiceCount> servicesWithPolicy = new HashMap<String, VXMetricServiceCount>(); for (int k = 2; k >= 0; k--) { String serviceType = String.valueOf(k); VXMetricServiceCount vXMetricServiceCount = getVXMetricServiceCount(serviceType); if (k == 2) { servicesWithPolicy.put("rowFilteringPolicies", vXMetricServiceCount); } else if (k == 1) { servicesWithPolicy.put("maskingPolicies", vXMetricServiceCount); } else if (k == 0) { servicesWithPolicy.put("resourcePolicy", vXMetricServiceCount);} } boolean tagFlag = false; if (tagFlag == false) { policyFilter.setParam("serviceType", "tag"); PList<RangerPolicy> policiestype = svcStore.getPaginatedPolicies(policyFilter); Map<String, Long> tagMap = new HashMap<String, Long>(); long tagCount = policiestype.getTotalCount(); tagMap.put("tag", tagCount); VXMetricServiceCount vXMetricServiceCount = new VXMetricServiceCount(); vXMetricServiceCount.setServiceBasedCountList(tagMap); vXMetricServiceCount.setTotalCount(tagCount); servicesWithPolicy.put("tagBasedPolicies", vXMetricServiceCount); tagFlag = true; } vXMetricPolicyCount.setPolicyCountList(servicesWithPolicy); Gson gson = new GsonBuilder().create(); final String jsonPolicies = gson.toJson(vXMetricPolicyCount); System.out.println(jsonPolicies); } catch (Exception e) { logger.error("Error calculating Metric for policies : " + e.getMessage()); } break; case "database" : try { int dbFlavor = RangerBizUtil.getDBFlavor(); String dbFlavourType = "Unknow "; if (dbFlavor == AppConstants.DB_FLAVOR_MYSQL) { dbFlavourType = "MYSQL "; } else if (dbFlavor == AppConstants.DB_FLAVOR_ORACLE) { dbFlavourType = "ORACLE "; } else if (dbFlavor == AppConstants.DB_FLAVOR_POSTGRES) { dbFlavourType = "POSTGRES "; } else if (dbFlavor == AppConstants.DB_FLAVOR_SQLANYWHERE) { dbFlavourType = "SQLANYWHERE "; } else if (dbFlavor == AppConstants.DB_FLAVOR_SQLSERVER) { dbFlavourType = "SQLSERVER "; } String dbDetail = dbFlavourType + xaBizUtil.getDBVersion(); Gson gson = new GsonBuilder().create(); final String jsonDBDetail = gson.toJson(dbDetail); System.out.println(jsonDBDetail); } catch (Exception e) { logger.error("Error calculating Metric for database : " + e.getMessage()); } break; case "contextenrichers": try { SearchFilter filter = new SearchFilter(); filter.setStartIndex(0); VXMetricContextEnricher serviceWithContextEnrichers = new VXMetricContextEnricher(); PList<RangerServiceDef> paginatedSvcDefs = svcStore.getPaginatedServiceDefs(filter); List<RangerServiceDef> repoTypeList = paginatedSvcDefs.getList(); if (repoTypeList != null) { for (RangerServiceDef repoType : repoTypeList) { RangerServiceDef rangerServiceDefObj = (RangerServiceDef) repoType; String name = rangerServiceDefObj.getName(); List<RangerContextEnricherDef> contextEnrichers = rangerServiceDefObj.getContextEnrichers(); if (contextEnrichers != null && !contextEnrichers.isEmpty()) { serviceWithContextEnrichers.setServiceName(name); serviceWithContextEnrichers.setTotalCount(contextEnrichers.size()); } } } Gson gson = new GsonBuilder().create(); final String jsonContextEnrichers = gson.toJson(serviceWithContextEnrichers); System.out.println(jsonContextEnrichers); } catch (Exception e) { logger.error("Error calculating Metric for contextenrichers : " + e.getMessage()); } break; case "denyconditions": try { SearchFilter policyFilter1 = new SearchFilter(); policyFilter1.setMaxRows(200); policyFilter1.setStartIndex(0); policyFilter1.setGetCount(true); policyFilter1.setSortBy("serviceId"); policyFilter1.setSortType("asc"); int denyCount = 0; Map<String, Integer> denyconditionsonMap = new HashMap<String, Integer>(); PList<RangerServiceDef> paginatedSvcDefs = svcStore.getPaginatedServiceDefs(policyFilter1); if (paginatedSvcDefs != null) { List<RangerServiceDef> rangerServiceDefs = paginatedSvcDefs.getList(); if (rangerServiceDefs != null && !rangerServiceDefs.isEmpty()) { for (RangerServiceDef rangerServiceDef : rangerServiceDefs) { if (rangerServiceDef != null) { String serviceDef = rangerServiceDef.getName(); if (!StringUtils.isEmpty(serviceDef)) { policyFilter1.setParam("serviceType", serviceDef); PList<RangerPolicy> policiesList = svcStore.getPaginatedPolicies(policyFilter1); if (policiesList != null && policiesList.getListSize() > 0) { int policyListCount = policiesList.getListSize(); if (policyListCount > 0 && policiesList.getList() != null) { List<RangerPolicy> policies = policiesList.getList(); for (RangerPolicy policy : policies) { if (policy != null) { List<RangerPolicyItem> policyItem = policy.getDenyPolicyItems(); if (policyItem != null && !policyItem.isEmpty()) { if (denyconditionsonMap.get(serviceDef) != null) { denyCount = denyconditionsonMap.get(serviceDef) + denyCount + policyItem.size(); } else { denyCount = denyCount + policyItem.size(); } } List<RangerPolicyItem> policyItemExclude = policy.getDenyExceptions(); if (policyItemExclude != null && !policyItemExclude.isEmpty()) { if (denyconditionsonMap.get(serviceDef) != null) { denyCount = denyconditionsonMap.get(serviceDef) + denyCount + policyItemExclude.size(); } else { denyCount = denyCount + policyItemExclude.size(); } } } } } } policyFilter1.removeParam("serviceType"); } denyconditionsonMap.put(serviceDef, denyCount); denyCount = 0; } } } } Gson gson = new GsonBuilder().create(); String jsonContextDenyCondtionOn = gson.toJson(denyconditionsonMap); System.out.println(jsonContextDenyCondtionOn); } catch (Exception e) { logger.error("Error calculating Metric for denyconditions : " + e.getMessage()); } break; default: System.out.println("type: Incorrect Arguments usage : -type policies | audits | usergroup | services | database | contextenrichers | denyconditions"); logger.info("Please enter the valid arguments for Metric Calculation"); break; } } catch(Exception e) { logger.error("Error calculating Metric : "+e.getMessage()); } } private VXMetricServiceCount getVXMetricServiceCount(String serviceType) throws Exception { SearchFilter policyFilter1 = new SearchFilter(); policyFilter1.setMaxRows(200); policyFilter1.setStartIndex(0); policyFilter1.setGetCount(true); policyFilter1.setSortBy("serviceId"); policyFilter1.setSortType("asc"); policyFilter1.setParam("policyType", serviceType); PList<RangerPolicy> policies = svcStore.getPaginatedPolicies(policyFilter1); PList<RangerService> paginatedSvcsSevice = svcStore.getPaginatedServices(policyFilter1); List<RangerService> rangerServiceList = paginatedSvcsSevice.getList(); Map<String, Long> servicesforPolicyType = new HashMap<String, Long>(); long tagCount = 0; for (Object rangerService : rangerServiceList) { RangerService rangerServiceObj = (RangerService) rangerService; String serviceName = rangerServiceObj.getType(); if (!(servicesforPolicyType.containsKey(serviceName))) { policyFilter1.setParam("serviceType", serviceName); PList<RangerPolicy> policiestype = svcStore.getPaginatedPolicies(policyFilter1); long count = policiestype.getTotalCount(); if (count != 0) { if (!"tag".equalsIgnoreCase(serviceName)) { servicesforPolicyType.put(serviceName, count); } else { tagCount = count; } } } } VXMetricServiceCount vXMetricServiceCount = new VXMetricServiceCount(); vXMetricServiceCount.setServiceBasedCountList(servicesforPolicyType); long totalCountOfPolicyType = policies.getTotalCount() - tagCount; vXMetricServiceCount.setTotalCount(totalCountOfPolicyType); return vXMetricServiceCount; } private VXMetricServiceCount getAuditsCount(int accessResult, Date startDate, Date endDate) throws Exception { long totalCountOfAudits = 0; SearchFilter filter = new SearchFilter(); filter.setStartIndex(0); Map<String, Long> servicesRepoType = new HashMap<String, Long>(); VXMetricServiceCount vXMetricServiceCount = new VXMetricServiceCount(); PList<RangerServiceDef> paginatedSvcDefs = svcStore.getPaginatedServiceDefs(filter); Iterable<RangerServiceDef> repoTypeGet = paginatedSvcDefs.getList(); for (Object repo : repoTypeGet) { RangerServiceDef rangerServiceDefObj = (RangerServiceDef) repo; long id = rangerServiceDefObj.getId(); String serviceRepoName = rangerServiceDefObj.getName(); SearchCriteria searchCriteriaWithType = new SearchCriteria(); searchCriteriaWithType.getParamList().put("repoType", id); searchCriteriaWithType.getParamList().put("accessResult", accessResult); searchCriteriaWithType.addParam("startDate", startDate); searchCriteriaWithType.addParam("endDate", endDate); VXAccessAuditList vXAccessAuditListwithType = assetMgr.getAccessLogs(searchCriteriaWithType); long toltalCountOfRepo = vXAccessAuditListwithType.getTotalCount(); if (toltalCountOfRepo != 0) { servicesRepoType.put(serviceRepoName, toltalCountOfRepo); totalCountOfAudits += toltalCountOfRepo; } } vXMetricServiceCount.setServiceBasedCountList(servicesRepoType); vXMetricServiceCount.setTotalCount(totalCountOfAudits); return vXMetricServiceCount; } private Long getUserCountBasedOnUserRole(@SuppressWarnings("rawtypes") List userRoleList) { SearchCriteria searchCriteria = new SearchCriteria(); searchCriteria.setStartIndex(0); searchCriteria.setMaxRows(100); searchCriteria.setGetCount(true); searchCriteria.setSortType("asc"); searchCriteria.addParam("userRoleList", userRoleList); VXUserList VXUserListKeyAdmin = xUserMgr.searchXUsers(searchCriteria); long userCount = VXUserListKeyAdmin.getTotalCount(); return userCount; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.project; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.physical.impl.project.OutputWidthExpression.VarLenReadExpr; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.record.RecordBatchMemoryManager; import org.apache.drill.exec.record.RecordBatchSizer; import org.apache.drill.exec.record.TypedFieldId; import org.apache.drill.exec.util.record.RecordBatchStats; import org.apache.drill.exec.util.record.RecordBatchStats.RecordBatchIOType; import org.apache.drill.exec.vector.FixedWidthVector; import org.apache.drill.exec.vector.NullableVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.physical.impl.project.OutputWidthExpression.FixedLenExpr; import org.apache.drill.exec.vector.VariableWidthVector; import org.apache.drill.exec.vector.complex.BaseRepeatedValueVector; import org.apache.drill.exec.vector.complex.RepeatedValueVector; import java.util.HashMap; import java.util.Map; /** * * ProjectMemoryManager(PMM) is used to estimate the size of rows produced by ProjectRecordBatch. * The PMM works as follows: * * Setup phase: As and when ProjectRecordBatch creates or transfers a field, it registers the field with PMM. * If the field is a variable width field, PMM records the expression that produces the variable * width field. The expression is a tree of LogicalExpressions. The PMM walks this tree of LogicalExpressions * to produce a tree of OutputWidthExpressions. The widths of Fixed width fields are just accumulated into a single * total. Note: The PMM, currently, cannot handle new complex fields, it just uses a hard-coded estimate for such fields. * * * Execution phase: Just before a batch is processed by Project, the PMM walks the tree of OutputWidthExpressions * and converts them to FixedWidthExpressions. It uses the RecordBatchSizer and the function annotations to do this conversion. * See OutputWidthVisitor for details. */ public class ProjectMemoryManager extends RecordBatchMemoryManager { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProjectMemoryManager.class); public RecordBatch getIncomingBatch() { return incomingBatch; } RecordBatch incomingBatch = null; ProjectRecordBatch outgoingBatch = null; int rowWidth = 0; Map<String, ColumnWidthInfo> outputColumnSizes; // Number of variable width columns in the batch int variableWidthColumnCount = 0; // Number of fixed width columns in the batch int fixedWidthColumnCount = 0; // Number of complex columns in the batch int complexColumnsCount = 0; // Holds sum of all fixed width column widths int totalFixedWidthColumnWidth = 0; // Holds sum of all complex column widths // Currently, this is just a guess int totalComplexColumnWidth = 0; enum WidthType { FIXED, VARIABLE } enum OutputColumnType { TRANSFER, NEW } class ColumnWidthInfo { OutputWidthExpression outputExpression; int width; WidthType widthType; OutputColumnType outputColumnType; ValueVector outputVV; // for transfers, this is the transfer src ColumnWidthInfo(OutputWidthExpression outputWidthExpression, OutputColumnType outputColumnType, WidthType widthType, int fieldWidth, ValueVector outputVV) { this.outputExpression = outputWidthExpression; this.width = fieldWidth; this.outputColumnType = outputColumnType; this.widthType = widthType; this.outputVV = outputVV; } public OutputWidthExpression getOutputExpression() { return outputExpression; } public OutputColumnType getOutputColumnType() { return outputColumnType; } boolean isFixedWidth() { return widthType == WidthType.FIXED; } public int getWidth() { return width; } } void ShouldNotReachHere() { throw new IllegalStateException(); } private void setIncomingBatch(RecordBatch recordBatch) { incomingBatch = recordBatch; } private void setOutgoingBatch(ProjectRecordBatch outgoingBatch) { this.outgoingBatch = outgoingBatch; } public ProjectMemoryManager(int configuredOutputSize) { super(configuredOutputSize); outputColumnSizes = new HashMap<>(); } public boolean isComplex(MajorType majorType) { MinorType minorType = majorType.getMinorType(); return minorType == MinorType.MAP || minorType == MinorType.UNION || minorType == MinorType.LIST; } boolean isFixedWidth(TypedFieldId fieldId) { ValueVector vv = getOutgoingValueVector(fieldId); return isFixedWidth(vv); } public ValueVector getOutgoingValueVector(TypedFieldId fieldId) { Class<?> clazz = fieldId.getIntermediateClass(); int[] fieldIds = fieldId.getFieldIds(); return outgoingBatch.getValueAccessorById(clazz, fieldIds).getValueVector(); } static boolean isFixedWidth(ValueVector vv) { return (vv instanceof FixedWidthVector); } static int getNetWidthOfFixedWidthType(ValueVector vv) { assert isFixedWidth(vv); return ((FixedWidthVector)vv).getValueWidth(); } public static int getDataWidthOfFixedWidthType(TypeProtos.MajorType majorType) { MinorType minorType = majorType.getMinorType(); final boolean isVariableWidth = (minorType == MinorType.VARCHAR || minorType == MinorType.VAR16CHAR || minorType == MinorType.VARBINARY); if (isVariableWidth) { throw new IllegalArgumentException("getWidthOfFixedWidthType() cannot handle variable width types"); } if (minorType == MinorType.NULL) { return 0; } return TypeHelper.getSize(majorType); } void addTransferField(ValueVector vvIn, String inputColumnName, String outputColumnName) { addField(vvIn, null, OutputColumnType.TRANSFER, inputColumnName, outputColumnName); } void addNewField(ValueVector vvOut, LogicalExpression logicalExpression) { addField(vvOut, logicalExpression, OutputColumnType.NEW, null, vvOut.getField().getName()); } void addField(ValueVector vv, LogicalExpression logicalExpression, OutputColumnType outputColumnType, String inputColumnName, String outputColumnName) { if(isFixedWidth(vv)) { addFixedWidthField(vv); } else { addVariableWidthField(vv, logicalExpression, outputColumnType, inputColumnName, outputColumnName); } } private void addVariableWidthField(ValueVector vv, LogicalExpression logicalExpression, OutputColumnType outputColumnType, String inputColumnName, String outputColumnName) { variableWidthColumnCount++; ColumnWidthInfo columnWidthInfo; logger.trace("addVariableWidthField(): vv {} totalCount: {} outputColumnType: {}", printVV(vv), variableWidthColumnCount, outputColumnType); //Variable width transfers if(outputColumnType == OutputColumnType.TRANSFER) { VarLenReadExpr readExpr = new VarLenReadExpr(inputColumnName); columnWidthInfo = new ColumnWidthInfo(readExpr, outputColumnType, WidthType.VARIABLE, -1, vv); //fieldWidth has to be obtained from the RecordBatchSizer } else if (isComplex(vv.getField().getType())) { addComplexField(vv); return; } else { // Walk the tree of LogicalExpressions to get a tree of OutputWidthExpressions OutputWidthVisitorState state = new OutputWidthVisitorState(this); OutputWidthExpression outputWidthExpression = logicalExpression.accept(new OutputWidthVisitor(), state); columnWidthInfo = new ColumnWidthInfo(outputWidthExpression, outputColumnType, WidthType.VARIABLE, -1, vv); //fieldWidth has to be obtained from the OutputWidthExpression } ColumnWidthInfo existingInfo = outputColumnSizes.put(outputColumnName, columnWidthInfo); Preconditions.checkState(existingInfo == null); } public static String printVV(ValueVector vv) { String str = "null"; if (vv != null) { str = vv.getField().getName() + " " + vv.getField().getType(); } return str; } void addComplexField(ValueVector vv) { //Complex types are not yet supported. Just use a guess for the size assert vv == null || isComplex(vv.getField().getType()); complexColumnsCount++; // just a guess totalComplexColumnWidth += OutputSizeEstimateConstants.COMPLEX_FIELD_ESTIMATE; logger.trace("addComplexField(): vv {} totalCount: {} totalComplexColumnWidth: {}", printVV(vv), complexColumnsCount, totalComplexColumnWidth); } void addFixedWidthField(ValueVector vv) { assert isFixedWidth(vv); fixedWidthColumnCount++; int fixedFieldWidth = getNetWidthOfFixedWidthType(vv); totalFixedWidthColumnWidth += fixedFieldWidth; logger.trace("addFixedWidthField(): vv {} totalCount: {} totalComplexColumnWidth: {}", printVV(vv), fixedWidthColumnCount, totalFixedWidthColumnWidth); } public void init(RecordBatch incomingBatch, ProjectRecordBatch outgoingBatch) { setIncomingBatch(incomingBatch); setOutgoingBatch(outgoingBatch); reset(); RecordBatchStats.logRecordBatchStats(outgoingBatch.getRecordBatchStatsContext(), "configuredOutputSize: %d", getOutputBatchSize()); } private void reset() { rowWidth = 0; totalFixedWidthColumnWidth = 0; totalComplexColumnWidth = 0; fixedWidthColumnCount = 0; complexColumnsCount = 0; } @Override public void update() { long updateStartTime = System.currentTimeMillis(); RecordBatchSizer batchSizer = new RecordBatchSizer(incomingBatch); long batchSizerEndTime = System.currentTimeMillis(); setRecordBatchSizer(batchSizer); rowWidth = 0; int totalVariableColumnWidth = 0; for (String outputColumnName : outputColumnSizes.keySet()) { ColumnWidthInfo columnWidthInfo = outputColumnSizes.get(outputColumnName); int width = -1; if (columnWidthInfo.isFixedWidth()) { // fixed width columns are accumulated in totalFixedWidthColumnWidth ShouldNotReachHere(); } else { //Walk the tree of OutputWidthExpressions to get a FixedLenExpr //As the tree is walked, the RecordBatchSizer and function annotations //are looked-up to come up with the final FixedLenExpr OutputWidthExpression savedWidthExpr = columnWidthInfo.getOutputExpression(); OutputWidthVisitorState state = new OutputWidthVisitorState(this); OutputWidthExpression reducedExpr = savedWidthExpr.accept(new OutputWidthVisitor(), state); width = ((FixedLenExpr)reducedExpr).getDataWidth(); Preconditions.checkState(width >= 0); int metadataWidth = getMetadataWidth(columnWidthInfo.outputVV); logger.trace("update(): fieldName {} width: {} metadataWidth: {}", columnWidthInfo.outputVV.getField().getName(), width, metadataWidth); width += metadataWidth; } totalVariableColumnWidth += width; } rowWidth += totalFixedWidthColumnWidth; rowWidth += totalComplexColumnWidth; rowWidth += totalVariableColumnWidth; int outPutRowCount; if (rowWidth != 0) { //if rowWidth is not zero, set the output row count in the sizer setOutputRowCount(getOutputBatchSize(), rowWidth); // if more rows can be allowed than the incoming row count, then set the // output row count to the incoming row count. outPutRowCount = Math.min(getOutputRowCount(), batchSizer.rowCount()); } else { // if rowWidth == 0 then the memory manager does // not have sufficient information to size the batch // let the entire batch pass through. // If incoming rc == 0, all RB Sizer look-ups will have // 0 width and so total width can be 0 outPutRowCount = incomingBatch.getRecordCount(); } setOutputRowCount(outPutRowCount); long updateEndTime = System.currentTimeMillis(); logger.trace("update() : Output RC {}, BatchSizer RC {}, incoming RC {}, width {}, total fixed width {}" + ", total variable width {}, total complex width {}, batchSizer time {} ms, update time {} ms" + ", manager {}, incoming {}",outPutRowCount, batchSizer.rowCount(), incomingBatch.getRecordCount(), rowWidth, totalFixedWidthColumnWidth, totalVariableColumnWidth, totalComplexColumnWidth, (batchSizerEndTime - updateStartTime),(updateEndTime - updateStartTime), this, incomingBatch); RecordBatchStats.logRecordBatchStats(RecordBatchIOType.INPUT, getRecordBatchSizer(), outgoingBatch.getRecordBatchStatsContext()); updateIncomingStats(); } public static int getMetadataWidth(ValueVector vv) { int width = 0; if (vv instanceof NullableVector) { width += ((NullableVector)vv).getBitsVector().getPayloadByteCount(1); } if (vv instanceof VariableWidthVector) { width += ((VariableWidthVector)vv).getOffsetVector().getPayloadByteCount(1); } if (vv instanceof BaseRepeatedValueVector) { width += ((BaseRepeatedValueVector)vv).getOffsetVector().getPayloadByteCount(1); width += (getMetadataWidth(((BaseRepeatedValueVector)vv).getDataVector()) * RepeatedValueVector.DEFAULT_REPEAT_PER_RECORD); } return width; } }
package net.minecraft.util; public class LongHashMap<V> { private transient Entry<V>[] hashArray = new Entry[4096]; /** the number of elements in the hash array */ private transient int numHashElements; private int mask; /** * the maximum amount of elements in the hash (probably 3/4 the size due to meh hashing function) */ private int capacity = 3072; /** * percent of the hasharray that can be used without hash colliding probably */ private final float percentUseable = 0.75F; /** count of times elements have been added/removed */ private transient volatile int modCount; public LongHashMap() { this.mask = this.hashArray.length - 1; } /** * returns the hashed key given the original key */ private static int getHashedKey(long originalKey) { return hash((int)(originalKey ^ originalKey >>> 32)); } /** * the hash function */ private static int hash(int integer) { integer = integer ^ integer >>> 20 ^ integer >>> 12; return integer ^ integer >>> 7 ^ integer >>> 4; } /** * gets the index in the hash given the array length and the hashed key */ private static int getHashIndex(int p_76158_0_, int p_76158_1_) { return p_76158_0_ & p_76158_1_; } public int getNumHashElements() { return this.numHashElements; } /** * get the value from the map given the key */ public V getValueByKey(long p_76164_1_) { int i = getHashedKey(p_76164_1_); for (Entry<V> entry = this.hashArray[getHashIndex(i, this.mask)]; entry != null; entry = entry.nextEntry) { if (entry.key == p_76164_1_) { return entry.value; } } return (V)null; } public boolean containsItem(long p_76161_1_) { return this.getEntry(p_76161_1_) != null; } final Entry<V> getEntry(long p_76160_1_) { int i = getHashedKey(p_76160_1_); for (Entry<V> entry = this.hashArray[getHashIndex(i, this.mask)]; entry != null; entry = entry.nextEntry) { if (entry.key == p_76160_1_) { return entry; } } return null; } /** * Add a key-value pair. */ public void add(long p_76163_1_, V p_76163_3_) { int i = getHashedKey(p_76163_1_); int j = getHashIndex(i, this.mask); for (Entry<V> entry = this.hashArray[j]; entry != null; entry = entry.nextEntry) { if (entry.key == p_76163_1_) { entry.value = p_76163_3_; return; } } ++this.modCount; this.createKey(i, p_76163_1_, p_76163_3_, j); } /** * resizes the table */ private void resizeTable(int p_76153_1_) { Entry<V>[] entry = this.hashArray; int i = entry.length; if (i == 1073741824) { this.capacity = Integer.MAX_VALUE; } else { Entry<V>[] entry1 = new Entry[p_76153_1_]; this.copyHashTableTo(entry1); this.hashArray = entry1; this.mask = this.hashArray.length - 1; this.capacity = (int)((float)p_76153_1_ * this.percentUseable); } } /** * copies the hash table to the specified array */ private void copyHashTableTo(Entry<V>[] p_76154_1_) { Entry<V>[] entry = this.hashArray; int i = p_76154_1_.length; for (int j = 0; j < entry.length; ++j) { Entry<V> entry1 = entry[j]; if (entry1 != null) { entry[j] = null; while (true) { Entry<V> entry2 = entry1.nextEntry; int k = getHashIndex(entry1.hash, i - 1); entry1.nextEntry = p_76154_1_[k]; p_76154_1_[k] = entry1; entry1 = entry2; if (entry2 == null) { break; } } } } } /** * calls the removeKey method and returns removed object */ public V remove(long p_76159_1_) { Entry<V> entry = this.removeKey(p_76159_1_); return (V)(entry == null ? null : entry.value); } final Entry<V> removeKey(long p_76152_1_) { int i = getHashedKey(p_76152_1_); int j = getHashIndex(i, this.mask); Entry<V> entry = this.hashArray[j]; Entry<V> entry1; Entry<V> entry2; for (entry1 = entry; entry1 != null; entry1 = entry2) { entry2 = entry1.nextEntry; if (entry1.key == p_76152_1_) { ++this.modCount; --this.numHashElements; if (entry == entry1) { this.hashArray[j] = entry2; } else { entry.nextEntry = entry2; } return entry1; } entry = entry1; } return entry1; } /** * creates the key in the hash table */ private void createKey(int p_76156_1_, long p_76156_2_, V p_76156_4_, int p_76156_5_) { Entry<V> entry = this.hashArray[p_76156_5_]; this.hashArray[p_76156_5_] = new Entry(p_76156_1_, p_76156_2_, p_76156_4_, entry); if (this.numHashElements++ >= this.capacity) { this.resizeTable(2 * this.hashArray.length); } } static class Entry<V> { final long key; V value; Entry<V> nextEntry; final int hash; Entry(int p_i1553_1_, long p_i1553_2_, V p_i1553_4_, Entry<V> p_i1553_5_) { this.value = p_i1553_4_; this.nextEntry = p_i1553_5_; this.key = p_i1553_2_; this.hash = p_i1553_1_; } public final long getKey() { return this.key; } public final V getValue() { return this.value; } public final boolean equals(Object p_equals_1_) { if (!(p_equals_1_ instanceof Entry)) { return false; } else { Entry<V> entry = (Entry)p_equals_1_; Object object = Long.valueOf(this.getKey()); Object object1 = Long.valueOf(entry.getKey()); if (object == object1 || object != null && object.equals(object1)) { Object object2 = this.getValue(); Object object3 = entry.getValue(); if (object2 == object3 || object2 != null && object2.equals(object3)) { return true; } } return false; } } public final int hashCode() { return LongHashMap.getHashedKey(this.key); } public final String toString() { return this.getKey() + "=" + this.getValue(); } } }
// Copyright 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gwtorm.schema.sql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeNoException; import com.google.gwtorm.data.Address; import com.google.gwtorm.data.Person; import com.google.gwtorm.data.PhoneBookDb; import com.google.gwtorm.data.PhoneBookDb2; import com.google.gwtorm.jdbc.Database; import com.google.gwtorm.jdbc.JdbcExecutor; import com.google.gwtorm.jdbc.JdbcSchema; import com.google.gwtorm.jdbc.SimpleDataSource; import com.google.gwtorm.server.OrmException; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Collections; import java.util.Properties; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; public class DialectOracleSQLTest extends SqlDialectTest { private static final String ORACLE_DRIVER = "oracle.jdbc.driver.OracleDriver"; @Before public void setUp() throws Exception { try { Class.forName(ORACLE_DRIVER); } catch (Exception e) { assumeNoException(e); } final String sid = "xe"; // Oracle instance name final String user = "gwtorm"; // Oracle schema=user name=database final String pass = "gwtorm"; db = DriverManager.getConnection("jdbc:oracle:thin:@localhost:1521:" + sid, user, pass); executor = new JdbcExecutor(db); dialect = new DialectOracle().refine(db); final Properties p = new Properties(); p.setProperty("driver", ORACLE_DRIVER); p.setProperty("url", db.getMetaData().getURL()); p.setProperty("user", user); p.setProperty("password", pass); phoneBook = new Database<>(new SimpleDataSource(p), PhoneBookDb.class); phoneBook2 = new Database<>(new SimpleDataSource(p), PhoneBookDb2.class); } @After public void tearDown() { if (executor == null) { return; } // Database content must be flushed because // tests assume that the database is empty drop("SEQUENCE address_id"); drop("SEQUENCE cnt"); drop("TABLE addresses"); drop("TABLE foo"); drop("TABLE bar"); drop("TABLE people"); if (executor != null) { executor.close(); } executor = null; if (db != null) { try { db.close(); } catch (SQLException e) { throw new RuntimeException("Cannot close database", e); } } db = null; } private void drop(String drop) { try { execute("DROP " + drop); } catch (OrmException e) { } } private void execute(final String sql) throws OrmException { executor.execute(sql); } @Test public void testListSequences() throws OrmException, SQLException { assertTrue(dialect.listSequences(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listSequences(db); assertEquals(1, s.size()); assertTrue(s.contains("cnt")); assertFalse(s.contains("foo")); } @Test public void testListTables() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertFalse(s.contains("cnt")); assertTrue(s.contains("foo")); } @Test public void testListIndexes() throws OrmException, SQLException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT, bar INT, baz INT)"); execute("CREATE UNIQUE INDEX FOO_PRIMARY_IND ON foo(cnt)"); execute("CREATE INDEX FOO_SECOND_IND ON foo(bar, baz)"); Set<String> s = dialect.listIndexes(db, "foo"); assertEquals(2, s.size()); assertTrue(s.contains("foo_primary_ind")); assertTrue(s.contains("foo_second_ind")); dialect.dropIndex(executor, "foo", "foo_primary_ind"); dialect.dropIndex(executor, "foo", "foo_second_ind"); assertEquals(Collections.emptySet(), dialect.listIndexes(db, "foo")); } @Test public void testUpgradeSchema() throws SQLException, OrmException { final PhoneBookDb p = phoneBook.open(); try { p.updateSchema(executor); execute("CREATE SEQUENCE cnt"); execute("CREATE TABLE foo (cnt INT)"); execute("ALTER TABLE people ADD fake_name VARCHAR(20)"); execute("ALTER TABLE people DROP COLUMN registered"); execute("DROP TABLE addresses"); execute("DROP SEQUENCE address_id"); Set<String> sequences, tables; p.updateSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertTrue(sequences.contains("cnt")); assertTrue(tables.contains("foo")); assertTrue(sequences.contains("address_id")); assertTrue(tables.contains("addresses")); p.pruneSchema(executor); sequences = dialect.listSequences(db); tables = dialect.listTables(db); assertFalse(sequences.contains("cnt")); assertFalse(tables.contains("foo")); final Person.Key pk = new Person.Key("Bob"); final Person bob = new Person(pk, p.nextAddressId()); p.people().insert(Collections.singleton(bob)); final Address addr = new Address(new Address.Key(pk, "home"), "some place"); p.addresses().insert(Collections.singleton(addr)); } finally { p.close(); } final PhoneBookDb2 p2 = phoneBook2.open(); try { ((JdbcSchema) p2).renameField(executor, "people", "registered", "isRegistered"); } finally { p2.close(); } } @Test public void testRenameTable() throws SQLException, OrmException { assertTrue(dialect.listTables(db).isEmpty()); execute("CREATE TABLE foo (cnt INT)"); Set<String> s = dialect.listTables(db); assertEquals(1, s.size()); assertTrue(s.contains("foo")); final PhoneBookDb p = phoneBook.open(); try { ((JdbcSchema) p).renameTable(executor, "foo", "bar"); } finally { p.close(); } s = dialect.listTables(db); assertTrue(s.contains("bar")); assertFalse(s.contains("for")); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DescribeSnapshotsSetItemResponseType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * DescribeSnapshotsSetItemResponseType bean class */ public class DescribeSnapshotsSetItemResponseType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = DescribeSnapshotsSetItemResponseType Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for SnapshotId */ protected java.lang.String localSnapshotId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getSnapshotId(){ return localSnapshotId; } /** * Auto generated setter method * @param param SnapshotId */ public void setSnapshotId(java.lang.String param){ this.localSnapshotId=param; } /** * field for VolumeId */ protected java.lang.String localVolumeId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getVolumeId(){ return localVolumeId; } /** * Auto generated setter method * @param param VolumeId */ public void setVolumeId(java.lang.String param){ this.localVolumeId=param; } /** * field for Status */ protected java.lang.String localStatus ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getStatus(){ return localStatus; } /** * Auto generated setter method * @param param Status */ public void setStatus(java.lang.String param){ this.localStatus=param; } /** * field for StartTime */ protected java.util.Calendar localStartTime ; /** * Auto generated getter method * @return java.util.Calendar */ public java.util.Calendar getStartTime(){ return localStartTime; } /** * Auto generated setter method * @param param StartTime */ public void setStartTime(java.util.Calendar param){ this.localStartTime=param; } /** * field for Progress */ protected java.lang.String localProgress ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getProgress(){ return localProgress; } /** * Auto generated setter method * @param param Progress */ public void setProgress(java.lang.String param){ this.localProgress=param; } /** * field for OwnerId */ protected java.lang.String localOwnerId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getOwnerId(){ return localOwnerId; } /** * Auto generated setter method * @param param OwnerId */ public void setOwnerId(java.lang.String param){ this.localOwnerId=param; } /** * field for VolumeSize */ protected java.lang.String localVolumeSize ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getVolumeSize(){ return localVolumeSize; } /** * Auto generated setter method * @param param VolumeSize */ public void setVolumeSize(java.lang.String param){ this.localVolumeSize=param; } /** * field for Description */ protected java.lang.String localDescription ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localDescriptionTracker = false ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getDescription(){ return localDescription; } /** * Auto generated setter method * @param param Description */ public void setDescription(java.lang.String param){ if (param != null){ //update the setting tracker localDescriptionTracker = true; } else { localDescriptionTracker = false; } this.localDescription=param; } /** * field for OwnerAlias */ protected java.lang.String localOwnerAlias ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localOwnerAliasTracker = false ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getOwnerAlias(){ return localOwnerAlias; } /** * Auto generated setter method * @param param OwnerAlias */ public void setOwnerAlias(java.lang.String param){ if (param != null){ //update the setting tracker localOwnerAliasTracker = true; } else { localOwnerAliasTracker = false; } this.localOwnerAlias=param; } /** * field for TagSet */ protected com.amazon.ec2.ResourceTagSetType localTagSet ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localTagSetTracker = false ; /** * Auto generated getter method * @return com.amazon.ec2.ResourceTagSetType */ public com.amazon.ec2.ResourceTagSetType getTagSet(){ return localTagSet; } /** * Auto generated setter method * @param param TagSet */ public void setTagSet(com.amazon.ec2.ResourceTagSetType param){ if (param != null){ //update the setting tracker localTagSetTracker = true; } else { localTagSetTracker = false; } this.localTagSet=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DescribeSnapshotsSetItemResponseType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":DescribeSnapshotsSetItemResponseType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "DescribeSnapshotsSetItemResponseType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"snapshotId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"snapshotId"); } } else { xmlWriter.writeStartElement("snapshotId"); } if (localSnapshotId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!"); }else{ xmlWriter.writeCharacters(localSnapshotId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"volumeId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"volumeId"); } } else { xmlWriter.writeStartElement("volumeId"); } if (localVolumeId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!"); }else{ xmlWriter.writeCharacters(localVolumeId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"status", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"status"); } } else { xmlWriter.writeStartElement("status"); } if (localStatus==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("status cannot be null!!"); }else{ xmlWriter.writeCharacters(localStatus); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"startTime", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"startTime"); } } else { xmlWriter.writeStartElement("startTime"); } if (localStartTime==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("startTime cannot be null!!"); }else{ xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localStartTime)); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"progress", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"progress"); } } else { xmlWriter.writeStartElement("progress"); } if (localProgress==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("progress cannot be null!!"); }else{ xmlWriter.writeCharacters(localProgress); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"ownerId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"ownerId"); } } else { xmlWriter.writeStartElement("ownerId"); } if (localOwnerId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("ownerId cannot be null!!"); }else{ xmlWriter.writeCharacters(localOwnerId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"volumeSize", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"volumeSize"); } } else { xmlWriter.writeStartElement("volumeSize"); } if (localVolumeSize==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("volumeSize cannot be null!!"); }else{ xmlWriter.writeCharacters(localVolumeSize); } xmlWriter.writeEndElement(); if (localDescriptionTracker){ namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"description", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"description"); } } else { xmlWriter.writeStartElement("description"); } if (localDescription==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("description cannot be null!!"); }else{ xmlWriter.writeCharacters(localDescription); } xmlWriter.writeEndElement(); } if (localOwnerAliasTracker){ namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"ownerAlias", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"ownerAlias"); } } else { xmlWriter.writeStartElement("ownerAlias"); } if (localOwnerAlias==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("ownerAlias cannot be null!!"); }else{ xmlWriter.writeCharacters(localOwnerAlias); } xmlWriter.writeEndElement(); } if (localTagSetTracker){ if (localTagSet==null){ throw new org.apache.axis2.databinding.ADBException("tagSet cannot be null!!"); } localTagSet.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","tagSet"), factory,xmlWriter); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "snapshotId")); if (localSnapshotId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSnapshotId)); } else { throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "volumeId")); if (localVolumeId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeId)); } else { throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "status")); if (localStatus != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localStatus)); } else { throw new org.apache.axis2.databinding.ADBException("status cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "startTime")); if (localStartTime != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localStartTime)); } else { throw new org.apache.axis2.databinding.ADBException("startTime cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "progress")); if (localProgress != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localProgress)); } else { throw new org.apache.axis2.databinding.ADBException("progress cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "ownerId")); if (localOwnerId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localOwnerId)); } else { throw new org.apache.axis2.databinding.ADBException("ownerId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "volumeSize")); if (localVolumeSize != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeSize)); } else { throw new org.apache.axis2.databinding.ADBException("volumeSize cannot be null!!"); } if (localDescriptionTracker){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "description")); if (localDescription != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localDescription)); } else { throw new org.apache.axis2.databinding.ADBException("description cannot be null!!"); } } if (localOwnerAliasTracker){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "ownerAlias")); if (localOwnerAlias != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localOwnerAlias)); } else { throw new org.apache.axis2.databinding.ADBException("ownerAlias cannot be null!!"); } } if (localTagSetTracker){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "tagSet")); if (localTagSet==null){ throw new org.apache.axis2.databinding.ADBException("tagSet cannot be null!!"); } elementList.add(localTagSet); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DescribeSnapshotsSetItemResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DescribeSnapshotsSetItemResponseType object = new DescribeSnapshotsSetItemResponseType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"DescribeSnapshotsSetItemResponseType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (DescribeSnapshotsSetItemResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","snapshotId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setSnapshotId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","volumeId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setVolumeId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","status").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setStatus( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","startTime").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setStartTime( org.apache.axis2.databinding.utils.ConverterUtil.convertToDateTime(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","progress").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setProgress( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","ownerId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setOwnerId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","volumeSize").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setVolumeSize( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","description").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setDescription( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","ownerAlias").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setOwnerAlias( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","tagSet").equals(reader.getName())){ object.setTagSet(com.amazon.ec2.ResourceTagSetType.Factory.parse(reader)); reader.next(); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.regex; import java.util.HashMap; import java.util.Locale; enum UnicodeProp { ALPHABETIC { public boolean is(int ch) { return Character.isAlphabetic(ch); } }, LETTER { public boolean is(int ch) { return Character.isLetter(ch); } }, IDEOGRAPHIC { public boolean is(int ch) { return Character.isIdeographic(ch); } }, LOWERCASE { public boolean is(int ch) { return Character.isLowerCase(ch); } }, UPPERCASE { public boolean is(int ch) { return Character.isUpperCase(ch); } }, TITLECASE { public boolean is(int ch) { return Character.isTitleCase(ch); } }, WHITE_SPACE { // \p{Whitespace} public boolean is(int ch) { return ((((1 << Character.SPACE_SEPARATOR) | (1 << Character.LINE_SEPARATOR) | (1 << Character.PARAGRAPH_SEPARATOR)) >> Character.getType(ch)) & 1) != 0 || (ch >= 0x9 && ch <= 0xd) || (ch == 0x85); } }, CONTROL { // \p{gc=Control} public boolean is(int ch) { return Character.getType(ch) == Character.CONTROL; } }, PUNCTUATION { // \p{gc=Punctuation} public boolean is(int ch) { return ((((1 << Character.CONNECTOR_PUNCTUATION) | (1 << Character.DASH_PUNCTUATION) | (1 << Character.START_PUNCTUATION) | (1 << Character.END_PUNCTUATION) | (1 << Character.OTHER_PUNCTUATION) | (1 << Character.INITIAL_QUOTE_PUNCTUATION) | (1 << Character.FINAL_QUOTE_PUNCTUATION)) >> Character.getType(ch)) & 1) != 0; } }, HEX_DIGIT { // \p{gc=Decimal_Number} // \p{Hex_Digit} -> PropList.txt: Hex_Digit public boolean is(int ch) { return DIGIT.is(ch) || (ch >= 0x0030 && ch <= 0x0039) || (ch >= 0x0041 && ch <= 0x0046) || (ch >= 0x0061 && ch <= 0x0066) || (ch >= 0xFF10 && ch <= 0xFF19) || (ch >= 0xFF21 && ch <= 0xFF26) || (ch >= 0xFF41 && ch <= 0xFF46); } }, ASSIGNED { public boolean is(int ch) { return Character.getType(ch) != Character.UNASSIGNED; } }, NONCHARACTER_CODE_POINT { // PropList.txt:Noncharacter_Code_Point public boolean is(int ch) { return (ch & 0xfffe) == 0xfffe || (ch >= 0xfdd0 && ch <= 0xfdef); } }, DIGIT { // \p{gc=Decimal_Number} public boolean is(int ch) { return Character.isDigit(ch); } }, ALNUM { // \p{alpha} // \p{digit} public boolean is(int ch) { return ALPHABETIC.is(ch) || DIGIT.is(ch); } }, BLANK { // \p{Whitespace} -- // [\N{LF} \N{VT} \N{FF} \N{CR} \N{NEL} -> 0xa, 0xb, 0xc, 0xd, 0x85 // \p{gc=Line_Separator} // \p{gc=Paragraph_Separator}] public boolean is(int ch) { return Character.getType(ch) == Character.SPACE_SEPARATOR || ch == 0x9; // \N{HT} } }, GRAPH { // [^ // \p{space} // \p{gc=Control} // \p{gc=Surrogate} // \p{gc=Unassigned}] public boolean is(int ch) { return ((((1 << Character.SPACE_SEPARATOR) | (1 << Character.LINE_SEPARATOR) | (1 << Character.PARAGRAPH_SEPARATOR) | (1 << Character.CONTROL) | (1 << Character.SURROGATE) | (1 << Character.UNASSIGNED)) >> Character.getType(ch)) & 1) == 0; } }, PRINT { // \p{graph} // \p{blank} // -- \p{cntrl} public boolean is(int ch) { return (GRAPH.is(ch) || BLANK.is(ch)) && !CONTROL.is(ch); } }, WORD { // \p{alpha} // \p{gc=Mark} // \p{digit} // \p{gc=Connector_Punctuation} public boolean is(int ch) { return ALPHABETIC.is(ch) || ((((1 << Character.NON_SPACING_MARK) | (1 << Character.ENCLOSING_MARK) | (1 << Character.COMBINING_SPACING_MARK) | (1 << Character.DECIMAL_DIGIT_NUMBER) | (1 << Character.CONNECTOR_PUNCTUATION)) >> Character.getType(ch)) & 1) != 0; } }; private final static HashMap<String, String> posix = new HashMap<>(); private final static HashMap<String, String> aliases = new HashMap<>(); static { posix.put("ALPHA", "ALPHABETIC"); posix.put("LOWER", "LOWERCASE"); posix.put("UPPER", "UPPERCASE"); posix.put("SPACE", "WHITE_SPACE"); posix.put("PUNCT", "PUNCTUATION"); posix.put("XDIGIT","HEX_DIGIT"); posix.put("ALNUM", "ALNUM"); posix.put("CNTRL", "CONTROL"); posix.put("DIGIT", "DIGIT"); posix.put("BLANK", "BLANK"); posix.put("GRAPH", "GRAPH"); posix.put("PRINT", "PRINT"); aliases.put("WHITESPACE", "WHITE_SPACE"); aliases.put("HEXDIGIT","HEX_DIGIT"); aliases.put("NONCHARACTERCODEPOINT", "NONCHARACTER_CODE_POINT"); } public static UnicodeProp forName(String propName) { propName = propName.toUpperCase(Locale.ENGLISH); String alias = aliases.get(propName); if (alias != null) propName = alias; try { return valueOf (propName); } catch (IllegalArgumentException x) {} return null; } public static UnicodeProp forPOSIXName(String propName) { propName = posix.get(propName.toUpperCase(Locale.ENGLISH)); if (propName == null) return null; return valueOf (propName); } public abstract boolean is(int ch); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import javax.annotation.Generated; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; /** * Produce data to AWS Kinesis Firehose streams using AWS SDK version 2.x. * * Generated by camel build tools - do NOT edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface KinesisFirehose2EndpointBuilderFactory { /** * Builder for endpoint for the AWS 2 Kinesis Firehose component. */ public interface KinesisFirehose2EndpointBuilder extends EndpointProducerBuilder { default AdvancedKinesisFirehose2EndpointBuilder advanced() { return (AdvancedKinesisFirehose2EndpointBuilder) this; } /** * Amazon Kinesis Firehose client to use for all requests for this * endpoint. * * The option is a: * <code>software.amazon.awssdk.services.firehose.FirehoseClient</code> * type. * * Group: producer */ default KinesisFirehose2EndpointBuilder amazonKinesisFirehoseClient( Object amazonKinesisFirehoseClient) { doSetProperty("amazonKinesisFirehoseClient", amazonKinesisFirehoseClient); return this; } /** * Amazon Kinesis Firehose client to use for all requests for this * endpoint. * * The option will be converted to a * <code>software.amazon.awssdk.services.firehose.FirehoseClient</code> * type. * * Group: producer */ default KinesisFirehose2EndpointBuilder amazonKinesisFirehoseClient( String amazonKinesisFirehoseClient) { doSetProperty("amazonKinesisFirehoseClient", amazonKinesisFirehoseClient); return this; } /** * This option will set the CBOR_ENABLED property during the execution. * * The option is a: <code>boolean</code> type. * * Default: true * Group: common */ default KinesisFirehose2EndpointBuilder cborEnabled(boolean cborEnabled) { doSetProperty("cborEnabled", cborEnabled); return this; } /** * This option will set the CBOR_ENABLED property during the execution. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: common */ default KinesisFirehose2EndpointBuilder cborEnabled(String cborEnabled) { doSetProperty("cborEnabled", cborEnabled); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default KinesisFirehose2EndpointBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default KinesisFirehose2EndpointBuilder lazyStartProducer( String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * The operation to do in case the user don't want to send only a * record. * * The option is a: * <code>org.apache.camel.component.aws2.firehose.KinesisFirehose2Operations</code> type. * * Group: producer */ default KinesisFirehose2EndpointBuilder operation( KinesisFirehose2Operations operation) { doSetProperty("operation", operation); return this; } /** * The operation to do in case the user don't want to send only a * record. * * The option will be converted to a * <code>org.apache.camel.component.aws2.firehose.KinesisFirehose2Operations</code> type. * * Group: producer */ default KinesisFirehose2EndpointBuilder operation(String operation) { doSetProperty("operation", operation); return this; } /** * To define a proxy host when instantiating the Kinesis Firehose * client. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default KinesisFirehose2EndpointBuilder proxyHost(String proxyHost) { doSetProperty("proxyHost", proxyHost); return this; } /** * To define a proxy port when instantiating the Kinesis Firehose * client. * * The option is a: <code>java.lang.Integer</code> type. * * Group: producer */ default KinesisFirehose2EndpointBuilder proxyPort(Integer proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy port when instantiating the Kinesis Firehose * client. * * The option will be converted to a <code>java.lang.Integer</code> * type. * * Group: producer */ default KinesisFirehose2EndpointBuilder proxyPort(String proxyPort) { doSetProperty("proxyPort", proxyPort); return this; } /** * To define a proxy protocol when instantiating the Kinesis Firehose * client. * * The option is a: <code>software.amazon.awssdk.core.Protocol</code> * type. * * Default: HTTPS * Group: producer */ default KinesisFirehose2EndpointBuilder proxyProtocol( Protocol proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * To define a proxy protocol when instantiating the Kinesis Firehose * client. * * The option will be converted to a * <code>software.amazon.awssdk.core.Protocol</code> type. * * Default: HTTPS * Group: producer */ default KinesisFirehose2EndpointBuilder proxyProtocol( String proxyProtocol) { doSetProperty("proxyProtocol", proxyProtocol); return this; } /** * The region in which Kinesis Firehose client needs to work. When using * this parameter, the configuration will expect the lowercase name of * the region (for example ap-east-1) You'll need to use the name * Region.EU_WEST_1.id(). * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default KinesisFirehose2EndpointBuilder region(String region) { doSetProperty("region", region); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default KinesisFirehose2EndpointBuilder trustAllCertificates( boolean trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * If we want to trust all certificates in case of overriding the * endpoint. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default KinesisFirehose2EndpointBuilder trustAllCertificates( String trustAllCertificates) { doSetProperty("trustAllCertificates", trustAllCertificates); return this; } /** * Amazon AWS Access Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default KinesisFirehose2EndpointBuilder accessKey(String accessKey) { doSetProperty("accessKey", accessKey); return this; } /** * Amazon AWS Secret Key. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default KinesisFirehose2EndpointBuilder secretKey(String secretKey) { doSetProperty("secretKey", secretKey); return this; } } /** * Advanced builder for endpoint for the AWS 2 Kinesis Firehose component. */ public interface AdvancedKinesisFirehose2EndpointBuilder extends EndpointProducerBuilder { default KinesisFirehose2EndpointBuilder basic() { return (KinesisFirehose2EndpointBuilder) this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedKinesisFirehose2EndpointBuilder synchronous( boolean synchronous) { doSetProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedKinesisFirehose2EndpointBuilder synchronous( String synchronous) { doSetProperty("synchronous", synchronous); return this; } } /** * Proxy enum for * <code>org.apache.camel.component.aws2.firehose.KinesisFirehose2Operations</code> enum. */ enum KinesisFirehose2Operations { sendBatchRecord, createDeliveryStream, deleteDeliveryStream, describeDeliveryStream, updateDestination; } /** * Proxy enum for <code>software.amazon.awssdk.core.Protocol</code> enum. */ enum Protocol { HTTP, HTTPS; } public interface KinesisFirehose2Builders { /** * AWS 2 Kinesis Firehose (camel-aws2-kinesis) * Produce data to AWS Kinesis Firehose streams using AWS SDK version * 2.x. * * Category: cloud,messaging * Since: 3.2 * Maven coordinates: org.apache.camel:camel-aws2-kinesis * * Syntax: <code>aws2-kinesis-firehose:streamName</code> * * Path parameter: streamName (required) * Name of the stream * * @param path streamName */ default KinesisFirehose2EndpointBuilder aws2KinesisFirehose(String path) { return KinesisFirehose2EndpointBuilderFactory.endpointBuilder("aws2-kinesis-firehose", path); } /** * AWS 2 Kinesis Firehose (camel-aws2-kinesis) * Produce data to AWS Kinesis Firehose streams using AWS SDK version * 2.x. * * Category: cloud,messaging * Since: 3.2 * Maven coordinates: org.apache.camel:camel-aws2-kinesis * * Syntax: <code>aws2-kinesis-firehose:streamName</code> * * Path parameter: streamName (required) * Name of the stream * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path streamName */ default KinesisFirehose2EndpointBuilder aws2KinesisFirehose( String componentName, String path) { return KinesisFirehose2EndpointBuilderFactory.endpointBuilder(componentName, path); } } static KinesisFirehose2EndpointBuilder endpointBuilder( String componentName, String path) { class KinesisFirehose2EndpointBuilderImpl extends AbstractEndpointBuilder implements KinesisFirehose2EndpointBuilder, AdvancedKinesisFirehose2EndpointBuilder { public KinesisFirehose2EndpointBuilderImpl(String path) { super(componentName, path); } } return new KinesisFirehose2EndpointBuilderImpl(path); } }
/* Copyright 2016 Google Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.codegen.config; import com.google.api.codegen.CollectionConfigProto; import com.google.api.codegen.CollectionOneofProto; import com.google.api.codegen.ConfigProto; import com.google.api.codegen.FixedResourceNameValueProto; import com.google.api.codegen.InterfaceConfigProto; import com.google.api.codegen.LanguageSettingsProto; import com.google.api.codegen.LicenseHeaderProto; import com.google.api.codegen.ReleaseLevel; import com.google.api.codegen.ResourceNameTreatment; import com.google.api.tools.framework.model.Diag; import com.google.api.tools.framework.model.DiagCollector; import com.google.api.tools.framework.model.Field; import com.google.api.tools.framework.model.Interface; import com.google.api.tools.framework.model.Model; import com.google.api.tools.framework.model.ProtoFile; import com.google.api.tools.framework.model.SimpleLocation; import com.google.api.tools.framework.model.SymbolTable; import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.io.CharStreams; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.LinkedHashMap; import javax.annotation.Nullable; /** * ApiConfig represents the code-gen config for an API library contained in the {api}_gapic.yaml * configuration file. */ @AutoValue public abstract class ApiConfig { abstract ImmutableMap<String, InterfaceConfig> getInterfaceConfigMap(); /** Returns the package name. */ public abstract String getPackageName(); /** Returns the location of the domain layer, if any. */ public abstract String getDomainLayerLocation(); /** Returns the release level of the API, if any. */ public abstract ReleaseLevel getReleaseLevel(); /** Returns the resource name messages configuration. If none was specified, returns null. */ @Nullable public abstract ResourceNameMessageConfigs getResourceNameMessageConfigs(); /** Returns the lines from the configured copyright file. */ public abstract ImmutableList<String> getCopyrightLines(); /** Returns the lines from the configured license file. */ public abstract ImmutableList<String> getLicenseLines(); /** Returns a map from entity names to resource name configs. */ public abstract ImmutableMap<String, ResourceNameConfig> getResourceNameConfigs(); /** * Returns a map from fully qualified field names to FieldConfigs for all fields that have a * resource name type specified. This is the default field config for each field, and should be * used when not in the context of a particular method or flattening configuration. */ public abstract ImmutableMap<String, FieldConfig> getDefaultResourceNameFieldConfigMap(); /** * Creates an instance of ApiConfig based on ConfigProto, linking up API interface configurations * with specified interfaces in interfaceConfigMap. On errors, null will be returned, and * diagnostics are reported to the model. */ @Nullable public static ApiConfig createApiConfig(Model model, ConfigProto configProto) { // Get the proto file containing the first interface listed in the config proto, and use it as // the assigned file for generated resource names, and to get the default message namespace ProtoFile file = model.getSymbolTable().lookupInterface(configProto.getInterfaces(0).getName()).getFile(); String defaultPackage = file.getProto().getPackage(); ResourceNameMessageConfigs messageConfigs = ResourceNameMessageConfigs.createMessageResourceTypesConfig( model, configProto, defaultPackage); ImmutableMap<String, ResourceNameConfig> resourceNameConfigs = createResourceNameConfigs(model.getDiagCollector(), configProto, file); ImmutableMap<String, InterfaceConfig> interfaceConfigMap = createInterfaceConfigMap( model.getDiagCollector(), configProto, messageConfigs, resourceNameConfigs, model.getSymbolTable()); LanguageSettingsProto settings = configProto.getLanguageSettings().get(configProto.getLanguage()); if (settings == null) { settings = LanguageSettingsProto.getDefaultInstance(); } ImmutableList<String> copyrightLines = null; ImmutableList<String> licenseLines = null; try { copyrightLines = loadCopyrightLines(model.getDiagCollector(), configProto.getLicenseHeader()); licenseLines = loadLicenseLines(model.getDiagCollector(), configProto.getLicenseHeader()); } catch (Exception e) { model .getDiagCollector() .addDiag(Diag.error(SimpleLocation.TOPLEVEL, "Exception: %s", e.getMessage())); e.printStackTrace(System.err); throw new RuntimeException(e); } if (interfaceConfigMap == null || copyrightLines == null || licenseLines == null) { return null; } else { return new AutoValue_ApiConfig( interfaceConfigMap, settings.getPackageName(), settings.getDomainLayerLocation(), settings.getReleaseLevel(), messageConfigs, copyrightLines, licenseLines, resourceNameConfigs, createResponseFieldConfigMap(messageConfigs, resourceNameConfigs)); } } /** Creates an ApiConfig with no content. Exposed for testing. */ @VisibleForTesting public static ApiConfig createDummyApiConfig() { return createDummyApiConfig(ImmutableMap.<String, InterfaceConfig>of(), "", "", null); } /** Creates an ApiConfig with fixed content. Exposed for testing. */ @VisibleForTesting public static ApiConfig createDummyApiConfig( ImmutableMap<String, InterfaceConfig> interfaceConfigMap, String packageName, String domainLayerLocation, ResourceNameMessageConfigs messageConfigs) { return new AutoValue_ApiConfig( interfaceConfigMap, packageName, domainLayerLocation, ReleaseLevel.UNSET_RELEASE_LEVEL, messageConfigs, ImmutableList.<String>of(), ImmutableList.<String>of(), ImmutableMap.<String, ResourceNameConfig>of(), createResponseFieldConfigMap( messageConfigs, ImmutableMap.<String, ResourceNameConfig>of())); } private static ImmutableMap<String, InterfaceConfig> createInterfaceConfigMap( DiagCollector diagCollector, ConfigProto configProto, ResourceNameMessageConfigs messageConfigs, ImmutableMap<String, ResourceNameConfig> resourceNameConfigs, SymbolTable symbolTable) { ImmutableMap.Builder<String, InterfaceConfig> interfaceConfigMap = ImmutableMap.<String, InterfaceConfig>builder(); for (InterfaceConfigProto interfaceConfigProto : configProto.getInterfacesList()) { Interface iface = symbolTable.lookupInterface(interfaceConfigProto.getName()); if (iface == null || !iface.isReachable()) { diagCollector.addDiag( Diag.error( SimpleLocation.TOPLEVEL, "interface not found: %s", interfaceConfigProto.getName())); continue; } InterfaceConfig interfaceConfig = InterfaceConfig.createInterfaceConfig( diagCollector, configProto.getLanguage(), interfaceConfigProto, iface, messageConfigs, resourceNameConfigs); if (interfaceConfig == null) { continue; } interfaceConfigMap.put(interfaceConfigProto.getName(), interfaceConfig); } if (diagCollector.getErrorCount() > 0) { return null; } else { return interfaceConfigMap.build(); } } private static ImmutableList<String> loadCopyrightLines( DiagCollector diagCollector, LicenseHeaderProto licenseHeaderProto) throws IOException { if (licenseHeaderProto == null) { diagCollector.addDiag(Diag.error(SimpleLocation.TOPLEVEL, "license_header missing")); return null; } if (Strings.isNullOrEmpty(licenseHeaderProto.getCopyrightFile())) { diagCollector.addDiag( Diag.error(SimpleLocation.TOPLEVEL, "license_header.copyright_file missing")); return null; } return getResourceLines(licenseHeaderProto.getCopyrightFile()); } private static ImmutableList<String> loadLicenseLines( DiagCollector diagCollector, LicenseHeaderProto licenseHeaderProto) throws IOException { if (licenseHeaderProto == null) { diagCollector.addDiag(Diag.error(SimpleLocation.TOPLEVEL, "license_header missing")); return null; } if (Strings.isNullOrEmpty(licenseHeaderProto.getLicenseFile())) { diagCollector.addDiag( Diag.error(SimpleLocation.TOPLEVEL, "license_header.license_file missing")); return null; } return getResourceLines(licenseHeaderProto.getLicenseFile()); } private static ImmutableList<String> getResourceLines(String resourceFileName) throws IOException { InputStream fileStream = ConfigProto.class.getResourceAsStream(resourceFileName); InputStreamReader fileReader = new InputStreamReader(fileStream, Charsets.UTF_8); return ImmutableList.copyOf(CharStreams.readLines(fileReader)); } private static ImmutableMap<String, ResourceNameConfig> createResourceNameConfigs( DiagCollector diagCollector, ConfigProto configProto, ProtoFile file) { ImmutableMap<String, SingleResourceNameConfig> singleResourceNameConfigs = createSingleResourceNameConfigs(diagCollector, configProto, file); ImmutableMap<String, FixedResourceNameConfig> fixedResourceNameConfigs = createFixedResourceNameConfigs( diagCollector, configProto.getFixedResourceNameValuesList(), file); ImmutableMap<String, ResourceNameOneofConfig> resourceNameOneofConfigs = createResourceNameOneofConfigs( diagCollector, configProto.getCollectionOneofsList(), singleResourceNameConfigs, fixedResourceNameConfigs, file); ImmutableMap.Builder<String, ResourceNameConfig> resourceCollectionMap = ImmutableMap.builder(); resourceCollectionMap.putAll(singleResourceNameConfigs); resourceCollectionMap.putAll(resourceNameOneofConfigs); resourceCollectionMap.putAll(fixedResourceNameConfigs); return resourceCollectionMap.build(); } private static ImmutableMap<String, SingleResourceNameConfig> createSingleResourceNameConfigs( DiagCollector diagCollector, ConfigProto configProto, ProtoFile file) { LinkedHashMap<String, SingleResourceNameConfig> singleResourceNameConfigsMap = new LinkedHashMap<>(); for (CollectionConfigProto collectionConfigProto : configProto.getCollectionsList()) { createSingleResourceNameConfig( diagCollector, collectionConfigProto, singleResourceNameConfigsMap, file); } for (InterfaceConfigProto interfaceConfigProto : configProto.getInterfacesList()) { for (CollectionConfigProto collectionConfigProto : interfaceConfigProto.getCollectionsList()) { createSingleResourceNameConfig( diagCollector, collectionConfigProto, singleResourceNameConfigsMap, file); } } if (diagCollector.getErrorCount() > 0) { return null; } else { return ImmutableMap.copyOf(singleResourceNameConfigsMap); } } private static void createSingleResourceNameConfig( DiagCollector diagCollector, CollectionConfigProto collectionConfigProto, LinkedHashMap<String, SingleResourceNameConfig> singleResourceNameConfigsMap, ProtoFile file) { SingleResourceNameConfig singleResourceNameConfig = SingleResourceNameConfig.createSingleResourceName( diagCollector, collectionConfigProto, file); if (singleResourceNameConfig == null) { return; } if (singleResourceNameConfigsMap.containsKey(singleResourceNameConfig.getEntityName())) { SingleResourceNameConfig otherConfig = singleResourceNameConfigsMap.get(singleResourceNameConfig.getEntityName()); if (!singleResourceNameConfig.getNamePattern().equals(otherConfig.getNamePattern())) { diagCollector.addDiag( Diag.error( SimpleLocation.TOPLEVEL, "Inconsistent collection configs across interfaces. Entity name: " + singleResourceNameConfig.getEntityName())); } } else { singleResourceNameConfigsMap.put( singleResourceNameConfig.getEntityName(), singleResourceNameConfig); } } private static ImmutableMap<String, FixedResourceNameConfig> createFixedResourceNameConfigs( DiagCollector diagCollector, Iterable<FixedResourceNameValueProto> fixedConfigProtos, ProtoFile file) { ImmutableMap.Builder<String, FixedResourceNameConfig> fixedConfigBuilder = ImmutableMap.builder(); for (FixedResourceNameValueProto fixedConfigProto : fixedConfigProtos) { FixedResourceNameConfig fixedConfig = FixedResourceNameConfig.createFixedResourceNameConfig( diagCollector, fixedConfigProto, file); if (fixedConfig == null) { continue; } fixedConfigBuilder.put(fixedConfig.getEntityName(), fixedConfig); } return fixedConfigBuilder.build(); } private static ImmutableMap<String, ResourceNameOneofConfig> createResourceNameOneofConfigs( DiagCollector diagCollector, Iterable<CollectionOneofProto> oneofConfigProtos, ImmutableMap<String, SingleResourceNameConfig> singleResourceNameConfigs, ImmutableMap<String, FixedResourceNameConfig> fixedResourceNameConfigs, ProtoFile file) { ImmutableMap.Builder<String, ResourceNameOneofConfig> oneofConfigBuilder = ImmutableMap.builder(); for (CollectionOneofProto oneofProto : oneofConfigProtos) { ResourceNameOneofConfig oneofConfig = ResourceNameOneofConfig.createResourceNameOneof( diagCollector, oneofProto, singleResourceNameConfigs, fixedResourceNameConfigs, file); if (oneofConfig == null) { continue; } oneofConfigBuilder.put(oneofConfig.getEntityName(), oneofConfig); } return oneofConfigBuilder.build(); } private static ImmutableMap<String, FieldConfig> createResponseFieldConfigMap( ResourceNameMessageConfigs messageConfig, ImmutableMap<String, ResourceNameConfig> resourceNameConfigs) { ImmutableMap.Builder<String, FieldConfig> builder = ImmutableMap.builder(); if (messageConfig == null) { return builder.build(); } for (Field field : messageConfig.getFieldsWithResourceNamesByMessage().values()) { builder.put( field.getFullName(), FieldConfig.createMessageFieldConfig( messageConfig, resourceNameConfigs, field, ResourceNameTreatment.STATIC_TYPES)); } return builder.build(); } /** Returns the InterfaceConfig for the given API interface. */ public InterfaceConfig getInterfaceConfig(Interface iface) { return getInterfaceConfigMap().get(iface.getFullName()); } public Iterable<SingleResourceNameConfig> getSingleResourceNameConfigs() { return Iterables.filter(getResourceNameConfigs().values(), SingleResourceNameConfig.class); } /** * Returns a SingleResourceNameConfig object for the given entity name. If the entityName * corresponds to a ResourceNameOneofConfig which contains at least one SingleResourceNameConfig, * then the first of those SingleResourceNameConfigs is returned. If the entityName is neither a * SingleResourceNameConfig or ResourceNameOneofConfig containing a SingleResourceNameConfig, then * returns null. */ public SingleResourceNameConfig getSingleResourceNameConfig(String entityName) { ResourceNameConfig resourceNameConfig = getResourceNameConfigs().get(entityName); if (resourceNameConfig != null && resourceNameConfig instanceof SingleResourceNameConfig) { return (SingleResourceNameConfig) resourceNameConfig; } if (resourceNameConfig != null && resourceNameConfig instanceof ResourceNameOneofConfig) { ResourceNameOneofConfig oneofConfig = (ResourceNameOneofConfig) resourceNameConfig; if (Iterables.size(oneofConfig.getSingleResourceNameConfigs()) > 0) { return Iterables.get(oneofConfig.getSingleResourceNameConfigs(), 0); } } return null; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.datapipeline.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Contains the parameters for SetStatus. * </p> */ public class SetStatusRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the pipeline that contains the objects. * </p> */ private String pipelineId; /** * <p> * The IDs of the objects. The corresponding objects can be either physical * or components, but not a mix of both types. * </p> */ private com.amazonaws.internal.SdkInternalList<String> objectIds; /** * <p> * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. * </p> */ private String status; /** * <p> * The ID of the pipeline that contains the objects. * </p> * * @param pipelineId * The ID of the pipeline that contains the objects. */ public void setPipelineId(String pipelineId) { this.pipelineId = pipelineId; } /** * <p> * The ID of the pipeline that contains the objects. * </p> * * @return The ID of the pipeline that contains the objects. */ public String getPipelineId() { return this.pipelineId; } /** * <p> * The ID of the pipeline that contains the objects. * </p> * * @param pipelineId * The ID of the pipeline that contains the objects. * @return Returns a reference to this object so that method calls can be * chained together. */ public SetStatusRequest withPipelineId(String pipelineId) { setPipelineId(pipelineId); return this; } /** * <p> * The IDs of the objects. The corresponding objects can be either physical * or components, but not a mix of both types. * </p> * * @return The IDs of the objects. The corresponding objects can be either * physical or components, but not a mix of both types. */ public java.util.List<String> getObjectIds() { if (objectIds == null) { objectIds = new com.amazonaws.internal.SdkInternalList<String>(); } return objectIds; } /** * <p> * The IDs of the objects. The corresponding objects can be either physical * or components, but not a mix of both types. * </p> * * @param objectIds * The IDs of the objects. The corresponding objects can be either * physical or components, but not a mix of both types. */ public void setObjectIds(java.util.Collection<String> objectIds) { if (objectIds == null) { this.objectIds = null; return; } this.objectIds = new com.amazonaws.internal.SdkInternalList<String>( objectIds); } /** * <p> * The IDs of the objects. The corresponding objects can be either physical * or components, but not a mix of both types. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setObjectIds(java.util.Collection)} or * {@link #withObjectIds(java.util.Collection)} if you want to override the * existing values. * </p> * * @param objectIds * The IDs of the objects. The corresponding objects can be either * physical or components, but not a mix of both types. * @return Returns a reference to this object so that method calls can be * chained together. */ public SetStatusRequest withObjectIds(String... objectIds) { if (this.objectIds == null) { setObjectIds(new com.amazonaws.internal.SdkInternalList<String>( objectIds.length)); } for (String ele : objectIds) { this.objectIds.add(ele); } return this; } /** * <p> * The IDs of the objects. The corresponding objects can be either physical * or components, but not a mix of both types. * </p> * * @param objectIds * The IDs of the objects. The corresponding objects can be either * physical or components, but not a mix of both types. * @return Returns a reference to this object so that method calls can be * chained together. */ public SetStatusRequest withObjectIds(java.util.Collection<String> objectIds) { setObjectIds(objectIds); return this; } /** * <p> * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. * </p> * * @param status * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. */ public void setStatus(String status) { this.status = status; } /** * <p> * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. * </p> * * @return The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. */ public String getStatus() { return this.status; } /** * <p> * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. * </p> * * @param status * The status to be set on all the objects specified in * <code>objectIds</code>. For components, use <code>PAUSE</code> or * <code>RESUME</code>. For instances, use <code>TRY_CANCEL</code>, * <code>RERUN</code>, or <code>MARK_FINISHED</code>. * @return Returns a reference to this object so that method calls can be * chained together. */ public SetStatusRequest withStatus(String status) { setStatus(status); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPipelineId() != null) sb.append("PipelineId: " + getPipelineId() + ","); if (getObjectIds() != null) sb.append("ObjectIds: " + getObjectIds() + ","); if (getStatus() != null) sb.append("Status: " + getStatus()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SetStatusRequest == false) return false; SetStatusRequest other = (SetStatusRequest) obj; if (other.getPipelineId() == null ^ this.getPipelineId() == null) return false; if (other.getPipelineId() != null && other.getPipelineId().equals(this.getPipelineId()) == false) return false; if (other.getObjectIds() == null ^ this.getObjectIds() == null) return false; if (other.getObjectIds() != null && other.getObjectIds().equals(this.getObjectIds()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPipelineId() == null) ? 0 : getPipelineId().hashCode()); hashCode = prime * hashCode + ((getObjectIds() == null) ? 0 : getObjectIds().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); return hashCode; } @Override public SetStatusRequest clone() { return (SetStatusRequest) super.clone(); } }
/* * Copyright 2009 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.integration.android; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import android.app.Activity; import android.app.AlertDialog; import android.content.ActivityNotFoundException; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.Uri; import android.os.Bundle; import android.util.Log; /** * <p>A utility class which helps ease integration with Barcode Scanner via {@link Intent}s. This is a simple * way to invoke barcode scanning and receive the result, without any need to integrate, modify, or learn the * project's source code.</p> * * <h2>Initiating a barcode scan</h2> * * <p>To integrate, create an instance of {@code IntentIntegrator} and call {@link #initiateScan()} and wait * for the result in your app.</p> * * <p>It does require that the Barcode Scanner (or work-alike) application is installed. The * {@link #initiateScan()} method will prompt the user to download the application, if needed.</p> * * <p>There are a few steps to using this integration. First, your {@link Activity} must implement * the method {@link Activity#onActivityResult(int, int, Intent)} and include a line of code like this:</p> * * <pre>{@code * public void onActivityResult(int requestCode, int resultCode, Intent intent) { * IntentResult scanResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, intent); * if (scanResult != null) { * // handle scan result * } * // else continue with any other code you need in the method * ... * } * }</pre> * * <p>This is where you will handle a scan result.</p> * * <p>Second, just call this in response to a user action somewhere to begin the scan process:</p> * * <pre>{@code * IntentIntegrator integrator = new IntentIntegrator(yourActivity); * integrator.initiateScan(); * }</pre> * * <p>Note that {@link #initiateScan()} returns an {@link AlertDialog} which is non-null if the * user was prompted to download the application. This lets the calling app potentially manage the dialog. * In particular, ideally, the app dismisses the dialog if it's still active in its {@link Activity#onPause()} * method.</p> * * <p>You can use {@link #setTitle(String)} to customize the title of this download prompt dialog (or, use * {@link #setTitleByID(int)} to set the title by string resource ID.) Likewise, the prompt message, and * yes/no button labels can be changed.</p> * * <p>Finally, you can use {@link #addExtra(String, Object)} to add more parameters to the Intent used * to invoke the scanner. This can be used to set additional options not directly exposed by this * simplified API.</p> * * <p>By default, this will only allow applications that are known to respond to this intent correctly * do so. The apps that are allowed to response can be set with {@link #setTargetApplications(Collection)}. * For example, set to {@link #TARGET_BARCODE_SCANNER_ONLY} to only target the Barcode Scanner app itself.</p> * * <h2>Sharing text via barcode</h2> * * <p>To share text, encoded as a QR Code on-screen, similarly, see {@link #shareText(CharSequence)}.</p> * * <p>Some code, particularly download integration, was contributed from the Anobiit application.</p> * * <h2>Enabling experimental barcode formats</h2> * * <p>Some formats are not enabled by default even when scanning with {@link #ALL_CODE_TYPES}, such as * {@link com.google.zxing.BarcodeFormat#PDF_417}. Use {@link #initiateScan(java.util.Collection)} with * a collection containing the names of formats to scan for explicitly, like "PDF_417", to use such * formats.</p> * * @author Sean Owen * @author Fred Lin * @author Isaac Potoczny-Jones * @author Brad Drehmer * @author gcstang */ public class IntentIntegrator { public static final int REQUEST_CODE = 0x0000c0de; // Only use bottom 16 bits private static final String TAG = IntentIntegrator.class.getSimpleName(); public static final String DEFAULT_TITLE = "Install Barcode Scanner?"; public static final String DEFAULT_MESSAGE = "This application requires Barcode Scanner. Would you like to install it?"; public static final String DEFAULT_YES = "Yes"; public static final String DEFAULT_NO = "No"; private static final String BS_PACKAGE = "com.google.zxing.client.android"; private static final String BSPLUS_PACKAGE = "com.srowen.bs.android"; // supported barcode formats public static final Collection<String> PRODUCT_CODE_TYPES = list("UPC_A", "UPC_E", "EAN_8", "EAN_13", "RSS_14"); public static final Collection<String> ONE_D_CODE_TYPES = list("UPC_A", "UPC_E", "EAN_8", "EAN_13", "CODE_39", "CODE_93", "CODE_128", "ITF", "RSS_14", "RSS_EXPANDED"); public static final Collection<String> QR_CODE_TYPES = Collections.singleton("QR_CODE"); public static final Collection<String> DATA_MATRIX_TYPES = Collections.singleton("DATA_MATRIX"); public static final Collection<String> ALL_CODE_TYPES = null; public static final Collection<String> TARGET_BARCODE_SCANNER_ONLY = Collections.singleton(BS_PACKAGE); public static final Collection<String> TARGET_ALL_KNOWN = list( BS_PACKAGE, // Barcode Scanner BSPLUS_PACKAGE, // Barcode Scanner+ BSPLUS_PACKAGE + ".simple" // Barcode Scanner+ Simple // What else supports this intent? ); private final Activity activity; private String title; private String message; private String buttonYes; private String buttonNo; private Collection<String> targetApplications; private final Map<String,Object> moreExtras; public IntentIntegrator(Activity activity) { this.activity = activity; title = DEFAULT_TITLE; message = DEFAULT_MESSAGE; buttonYes = DEFAULT_YES; buttonNo = DEFAULT_NO; targetApplications = TARGET_ALL_KNOWN; moreExtras = new HashMap<String,Object>(3); } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public void setTitleByID(int titleID) { title = activity.getString(titleID); } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public void setMessageByID(int messageID) { message = activity.getString(messageID); } public String getButtonYes() { return buttonYes; } public void setButtonYes(String buttonYes) { this.buttonYes = buttonYes; } public void setButtonYesByID(int buttonYesID) { buttonYes = activity.getString(buttonYesID); } public String getButtonNo() { return buttonNo; } public void setButtonNo(String buttonNo) { this.buttonNo = buttonNo; } public void setButtonNoByID(int buttonNoID) { buttonNo = activity.getString(buttonNoID); } public Collection<String> getTargetApplications() { return targetApplications; } public void setTargetApplications(Collection<String> targetApplications) { this.targetApplications = targetApplications; } public void setSingleTargetApplication(String targetApplication) { this.targetApplications = Collections.singleton(targetApplication); } public Map<String,?> getMoreExtras() { return moreExtras; } public void addExtra(String key, Object value) { moreExtras.put(key, value); } /** * Initiates a scan for all known barcode types. */ public AlertDialog initiateScan() { return initiateScan(ALL_CODE_TYPES); } /** * Initiates a scan only for a certain set of barcode types, given as strings corresponding * to their names in ZXing's {@code BarcodeFormat} class like "UPC_A". You can supply constants * like {@link #PRODUCT_CODE_TYPES} for example. * * @return the {@link AlertDialog} that was shown to the user prompting them to download the app * if a prompt was needed, or null otherwise */ public AlertDialog initiateScan(Collection<String> desiredBarcodeFormats) { Intent intentScan = new Intent(BS_PACKAGE + ".SCAN"); intentScan.addCategory(Intent.CATEGORY_DEFAULT); // check which types of codes to scan for if (desiredBarcodeFormats != null) { // set the desired barcode types StringBuilder joinedByComma = new StringBuilder(); for (String format : desiredBarcodeFormats) { if (joinedByComma.length() > 0) { joinedByComma.append(','); } joinedByComma.append(format); } intentScan.putExtra("SCAN_FORMATS", joinedByComma.toString()); } String targetAppPackage = findTargetAppPackage(intentScan); if (targetAppPackage == null) { return showDownloadDialog(); } intentScan.setPackage(targetAppPackage); intentScan.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); intentScan.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); attachMoreExtras(intentScan); startActivityForResult(intentScan, REQUEST_CODE); return null; } /** * Start an activity.<br> * This method is defined to allow different methods of activity starting for * newer versions of Android and for compatibility library. * * @param intent Intent to start. * @param code Request code for the activity * @see android.app.Activity#startActivityForResult(Intent, int) * @see android.app.Fragment#startActivityForResult(Intent, int) */ protected void startActivityForResult(Intent intent, int code) { activity.startActivityForResult(intent, code); } private String findTargetAppPackage(Intent intent) { PackageManager pm = activity.getPackageManager(); List<ResolveInfo> availableApps = pm.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY); if (availableApps != null) { for (ResolveInfo availableApp : availableApps) { String packageName = availableApp.activityInfo.packageName; if (targetApplications.contains(packageName)) { return packageName; } } } return null; } private AlertDialog showDownloadDialog() { AlertDialog.Builder downloadDialog = new AlertDialog.Builder(activity); downloadDialog.setTitle(title); downloadDialog.setMessage(message); downloadDialog.setNegativeButton(buttonYes, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { Uri uri = Uri.parse("market://details?id=" + BS_PACKAGE); Intent intent = new Intent(Intent.ACTION_VIEW, uri); try { activity.startActivity(intent); } catch (ActivityNotFoundException anfe) { // Hmm, market is not installed Log.w(TAG, "Android Market is not installed; cannot install Barcode Scanner"); } } }); downloadDialog.setPositiveButton(buttonNo, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) {} }); return downloadDialog.show(); } /** * <p>Call this from your {@link Activity}'s * {@link Activity#onActivityResult(int, int, Intent)} method.</p> * * @return null if the event handled here was not related to this class, or * else an {@link IntentResult} containing the result of the scan. If the user cancelled scanning, * the fields will be null. */ public static IntentResult parseActivityResult(int requestCode, int resultCode, Intent intent) { if (requestCode == REQUEST_CODE) { if (resultCode == Activity.RESULT_OK) { String contents = intent.getStringExtra("SCAN_RESULT"); String formatName = intent.getStringExtra("SCAN_RESULT_FORMAT"); byte[] rawBytes = intent.getByteArrayExtra("SCAN_RESULT_BYTES"); int intentOrientation = intent.getIntExtra("SCAN_RESULT_ORIENTATION", Integer.MIN_VALUE); Integer orientation = intentOrientation == Integer.MIN_VALUE ? null : intentOrientation; String errorCorrectionLevel = intent.getStringExtra("SCAN_RESULT_ERROR_CORRECTION_LEVEL"); return new IntentResult(contents, formatName, rawBytes, orientation, errorCorrectionLevel); } return new IntentResult(); } return null; } /** * Defaults to type "TEXT_TYPE". * @see #shareText(CharSequence, CharSequence) */ public AlertDialog shareText(CharSequence text) { return shareText(text, "TEXT_TYPE"); } /** * Shares the given text by encoding it as a barcode, such that another user can * scan the text off the screen of the device. * * @param text the text string to encode as a barcode * @param type type of data to encode. See {@code com.google.zxing.client.android.Contents.Type} constants. * @return the {@link AlertDialog} that was shown to the user prompting them to download the app * if a prompt was needed, or null otherwise */ public AlertDialog shareText(CharSequence text, CharSequence type) { Intent intent = new Intent(); intent.addCategory(Intent.CATEGORY_DEFAULT); intent.setAction(BS_PACKAGE + ".ENCODE"); intent.putExtra("ENCODE_TYPE", type); intent.putExtra("ENCODE_DATA", text); String targetAppPackage = findTargetAppPackage(intent); if (targetAppPackage == null) { return showDownloadDialog(); } intent.setPackage(targetAppPackage); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); attachMoreExtras(intent); activity.startActivity(intent); return null; } private static Collection<String> list(String... values) { return Collections.unmodifiableCollection(Arrays.asList(values)); } private void attachMoreExtras(Intent intent) { for (Map.Entry<String,Object> entry : moreExtras.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); // Kind of hacky if (value instanceof Integer) { intent.putExtra(key, (Integer) value); } else if (value instanceof Long) { intent.putExtra(key, (Long) value); } else if (value instanceof Boolean) { intent.putExtra(key, (Boolean) value); } else if (value instanceof Double) { intent.putExtra(key, (Double) value); } else if (value instanceof Float) { intent.putExtra(key, (Float) value); } else if (value instanceof Bundle) { intent.putExtra(key, (Bundle) value); } else { intent.putExtra(key, value.toString()); } } } }
/** * Copyright (C) [2013] [The FURTHeR Project] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.utah.further.fqe.ds.api.domain; import static edu.utah.further.core.api.collections.CollectionUtil.newMap; import static edu.utah.further.core.api.text.ToStringCustomStyles.SHORT_WITH_SPACES_STYLE; import static edu.utah.further.core.api.time.TimeUtil.getDateAsTime; import static org.slf4j.LoggerFactory.getLogger; import java.sql.Timestamp; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.UUID; import javax.xml.bind.annotation.XmlTransient; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.ToStringBuilder; import org.slf4j.Logger; import edu.utah.further.core.api.collections.CollectionUtil; import edu.utah.further.core.api.constant.Constants; import edu.utah.further.core.api.context.DefaultImplementation; import edu.utah.further.core.api.exception.BusinessRuleException; import edu.utah.further.core.api.lang.Final; import edu.utah.further.core.api.time.TimeService; import edu.utah.further.core.query.domain.SearchQuery; import edu.utah.further.fqe.ds.api.domain.plan.Plan; import edu.utah.further.fqe.ds.api.service.results.ResultType; import edu.utah.further.fqe.ds.api.to.plan.PlanToImpl; import edu.utah.further.fqe.ds.api.util.FqeDsApiResourceLocator; /** * A base class of {@link QueryContext} implementations. Using an internal * {@link QueryHandler} enum implementation rather than a full-blown friend pattern. If in * the future sub-classes need to to extend specific reusable handlers, we may * re-implement handlers using the friend pattern. * <p> * -----------------------------------------------------------------------------------<br> * (c) 2008-2013 FURTHeR Project, AVP Health Sciences IT Office, University of Utah<br> * Contact: {@code <further@utah.edu>}<br> * Biomedical Informatics, 26 South 2000 East<br> * Room 5775 HSEB, Salt Lake City, UT 84112<br> * Day Phone: 1-801-581-4080<br> * ----------------------------------------------------------------------------------- * * @author Oren E. Livne {@code <oren.livne@utah.edu>} * @version Mar 19, 2009 */ @DefaultImplementation(QueryContext.class) @XmlTransient public abstract class AbstractQueryContext implements QueryContext { // ========================= CONSTANTS ================================= /** * @serial Serializable version identifier. */ private static final long serialVersionUID = 1L; /** * A logger that helps identify this class' printouts. */ private static final Logger log = getLogger(AbstractQueryContext.class); // ========================= FIELDS ==================================== /** * Executes private methods; implemented by sub-classes. */ @XmlTransient private final PrivateSection privateSectionExecutor; // ========================= CONSTRUCTORS ============================== /** * Default constructor, for JAXB-related implementations. No fields are set. */ protected AbstractQueryContext() { super(); this.privateSectionExecutor = getPrivateSectionExecutor(); } // /** // * A copy-constructor. // * // * @param other // * other object to deep-copy fields from // */ // protected AbstractQueryContext(final QueryContext other) // { // this(); // copyFrom(other); // } // ========================= IMPLEMENTATION: Object ==================== /** * @see java.lang.Object#equals(java.lang.Object) */ @Override public final boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final QueryContext that = (QueryContext) obj; return new EqualsBuilder().append(this.getId(), that.getId()) // .append(this.getExecutionId(), that.getExecutionId()) // see QueryJob.equals() .append(this.getState(), that.getState()) .isEquals(); } /** * @see java.lang.Object#hashCode() */ @Override public int hashCode() { return new HashCodeBuilder().append(getId()).append(getState()).toHashCode(); } /** * @see java.lang.Object#toString() */ @Override public final String toString() { final ToStringBuilder builder = new ToStringBuilder(this, SHORT_WITH_SPACES_STYLE) .append("id", getId()) .append("executionId", getExecutionId()) .append("originId", getOriginId()) .append("dataSourceId", getDataSourceId()) .append("state", getState()) .append("queryType", getQueryType()); if (getParent() != null) { builder.append("parent ID", getParent().getId()).append("DS ID", getDataSourceId()); } builder.append("numRecords", getNumRecords()); return builder.toString(); } // ========================= IMPLEMENTATION: CopyableFrom ============== /** * @param other * @return * @see edu.utah.further.core.api.lang.CopyableFrom#copyFrom(java.lang.Object) */ @Override public AbstractQueryContext copyFrom(final QueryContext other) { if (other == null) { return this; } setOriginId(other.getOriginId()); setExecutionId(other.getExecutionId()); setDataSourceId(other.getDataSourceId()); setIdentityResolutionType(other.getIdentityResolutionType()); setTargetNamespaceId(other.getTargetNamespaceId()); setQueryType(other.getQueryType()); setMinRespondingDataSources(other.getMinRespondingDataSources()); setMaxRespondingDataSources(other.getMaxRespondingDataSources()); setQueries(other.getQueries()); setStaleDateTime(other.getStaleDateTime()); setQueueDate(other.getQueueDate()); // If the other result view is empty, set ours to null to eliminate empty // resultView XML element copyResultViews(other); // Soft-copy plan setPlan(other.getPlan()); return this; } // ========================= IMPLEMENTATION: QueryContext ============== /** * Return the first query. * * @return the query */ @Override public final SearchQuery getQuery() { return getQueries().isEmpty() ? null : getQueries().get(0); } /** * Return a search query with a specified QID (unique SQ identifier within this QC). * * @param qid * search query ID within this QC * @return the query */ @Override public final SearchQuery getQueryByQid(final Long qid) { if (qid == null) { return null; } // Slow implementation; may use a Map in the future if query list becomes large for (final SearchQuery query : getQueries()) { if (qid.equals(query.getId())) { return query; } } return null; } /** * Set a new single query on this object. This means that the list of queries will now * consist of one element. * * @param query * the query to set */ @Override public final void setQuery(final SearchQuery query) { getQueries().clear(); addQuery(query); } /** * Add a query to this object. * * @param query * the searvh query to to add */ @Override public final void addQuery(final SearchQuery query) { getQueries().add(newSearchQuery(query)); } /** * @see edu.utah.further.fqe.ds.api.domain.QueryContext#setQueries(java.util.Collection) * @param searchQueries */ @Override public void setQueries(final Collection<? extends SearchQuery> searchQueries) { getQueries().clear(); addQueries(searchQueries); } /** * @param queries * @see edu.utah.further.fqe.ds.api.domain.QueryContext#addQueries(Collection) */ @Override public final void addQueries(final Collection<? extends SearchQuery> queries) { for (final SearchQuery child : queries) { addQuery(child); } } /** * @return */ @Override public final int getNumQueries() { return getQueries().size(); } /** * Returns a sorted set of permissible actions based upon the current state. Delegates * to the current state handler. * <p> * Note: do not override this method! It is non-final to allow dynamic proxying of * persistent entities (e.g. in Hibernate). * * @return sorted set of permissible actions from the current state * @see edu.utah.further.core.api.state.StateMachine#getActions() */ @Override @Final public final SortedSet<QueryAction> getActions() { return getHandler().getActions(this); } /** * @see edu.utah.further.fqe.ds.api.domain.QueryActor#fail() */ @Override @Final public final void fail() { getHandler().fail(this); } /** * @see edu.utah.further.fqe.ds.api.domain.QueryActor#finish() */ @Override @Final public final void finish() { getHandler().finish(this); } /** * @see edu.utah.further.fqe.ds.api.domain.QueryActor#queue() */ @Override @Final public final void queue() { getHandler().queue(this); } /** * @return * @see edu.utah.further.core.api.state.Switch#isStarted() */ @Override @Final public final boolean isStarted() { return getHandler().isStarted(this); } /** * @return * @see edu.utah.further.core.api.state.Switch#isStopped() */ @Override @Final public boolean isStopped() { return getHandler().isStopped(this); } /** * @return * @see edu.utah.further.fqe.ds.api.domain.QueryContext#isFailed() */ @Override @Final public final boolean isFailed() { return getHandler().isFailed(this); } /** * @return * @see edu.utah.further.fqe.ds.api.domain.QueryContext#isInFinalState() */ @Override @Final public final boolean isInFinalState() { return getActions().isEmpty(); } /** * @see edu.utah.further.core.api.state.Switch#start() */ @Override @Final public final void start() { getHandler().start(this); } /** * @see edu.utah.further.core.api.state.Switch#stop() */ @Override @Final public final void stop() { getHandler().stop(this); } /** * @return * @see edu.utah.further.fqe.ds.api.domain.QueryContext#getQueueTime() */ @Override public final Long getQueueTime() { return getDateAsTime(getQueueDate()); } /** * @return * @see edu.utah.further.fqe.ds.api.to.QueryContextTo#getStartTime() */ @Override public final Long getStartTime() { return getDateAsTime(getStartDate()); } /** * @return * @see edu.utah.further.fqe.ds.api.domain.QueryContext#getEndTime() */ @Override public final Long getEndTime() { return getDateAsTime(getEndDate()); } /** * @return * @see edu.utah.further.fqe.ds.api.domain.HasNumRecords#getNumRecords() */ @Override public final long getNumRecords() { final ResultContext rsmd = getResultContext(); return (rsmd == null) ? Constants.INVALID_VALUE_LONG : rsmd.getNumRecords(); } /** * @param numRecords * @see edu.utah.further.fqe.ds.api.domain.HasNumRecords#setNumRecords(long) */ @Override public final void setNumRecords(final long numRecords) { final ResultContext rsmd = getResultContext(); if (rsmd == null) { setResultContext(newResultContext()); } getResultContext().setNumRecords(numRecords); } /** * Return the plan property. * * @return the plan * @see edu.utah.further.fqe.ds.api.domain.QueryContext#getPlan() */ @Override public Plan getPlan() { throw new UnsupportedOperationException( "getPlan(): Must override in selected sub-classes that support this features"); } /** * Set a new value for the plan property. No defensive copy is made at the moment. * * @param plan * the plan to set * @see edu.utah.further.fqe.ds.api.domain.QueryContext#setPlan(PlanToImpl) */ @Override public void setPlan(final Plan plan) { throw new UnsupportedOperationException( "setPlan(): Must override in selected sub-classes that support this features"); } // ========================= STATE HANDLERS ============================ /** * Set the query collection on this object. * * @param queries * the search queries to set */ protected abstract void setQueries(final List<? extends SearchQuery> queries); /** * @param type * @param intersectionIndex * @return */ protected ResultContextKey newKey(final ResultType type) { return new ResultContextKeyImpl(type); } /** * Default query context state handlers. Each handler corresponds to a state and * implements its state behavior within an enumerated constant inner class. * Sub-classes can override this behaviour by supplying their implementation of * {@link QueryContext#getHandler()}. * <p> * ----------------------------------------------------------------------------------- * <br> * (c) 2008-2013 FURTHeR Project, Health Sciences IT, University of Utah<br> * Contact: {@code <further@utah.edu>}<br> * Biomedical Informatics, 26 South 2000 East<br> * Room 5775 HSEB, Salt Lake City, UT 84112<br> * Day Phone: 1-801-581-4080<br> * ----------------------------------------------------------------------------------- * * @author Oren E. Livne {@code <oren.livne@utah.edu>} * @version May 28, 2009 */ private enum DefaultQueryHandler implements QueryHandler { // ========================= ENUMERATED CONSTANTS ====================== /** * The query's context is created and transient. */ SUBMITTED(QueryState.SUBMITTED, false, false) { /** * Persist query to query database and add it to the execution queue. * * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#queue(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void queue(final AbstractQueryContext target) { target.updateState(QueryState.QUEUED); if (target.getStaleDateTime() == null) { target.setStaleDateTime(FqeDsApiResourceLocator .getInstance() .getStaleDateTimeFactory() .getStaleDateTime()); } } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.toSortedSet(QueryAction.QUEUE); } }, /** * The query's context has been persisted to the database. The query is queued for * processing by the FQE. */ QUEUED(QueryState.QUEUED, false, false) { /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#enterState(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public void enterState(final QueryContext target) { final AbstractQueryContext aqc = (AbstractQueryContext) target; aqc.privateSectionExecutor.setQueueDate(aqc, now()); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#start(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void start(final AbstractQueryContext target) { target.updateState(QueryState.EXECUTING); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#fail(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void fail(final AbstractQueryContext target) { target.updateState(QueryState.FAILED); } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.toSortedSet(QueryAction.START, QueryAction.FAIL); } }, /** * The query is currently running. */ EXECUTING(QueryState.EXECUTING, true, false) { /** * Update parent state. TODO: replace by the observer pattern by having the * parent listen to child context changes. For the time being. the following * is simpler but a little less maintainable. * * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#enterState(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public void enterState(final QueryContext target) { final AbstractQueryContext aqc = (AbstractQueryContext) target; if (aqc.getStartDate() == null) { aqc.privateSectionExecutor.setStartDate(aqc, now()); } aqc.privateSectionExecutor.setEndDate(aqc, null); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#stop(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void stop(final AbstractQueryContext target) { target.updateState(QueryState.STOPPED); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#finish(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void finish(final AbstractQueryContext target) { target.updateState(QueryState.COMPLETED); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#fail(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void fail(final AbstractQueryContext target) { target.updateState(QueryState.FAILED); } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.toSortedSet(QueryAction.STOP, QueryAction.FINISH, QueryAction.FAIL); } }, // /** // * A sub-state of {@link #EXECUTING} for the execution phase of translating the // * logical query to physical queries. In principle, this needs to be a separate // class // * and may have different values for each data source. TODO: move to a separate // state // * class and make it data-source-dependent. // */ // TRANSLATING_QUERY, // // /** // * A sub-state of {@link #EXECUTING} for the execution phase of translating the // * physical result sets to logical result set. In principle, this needs to be a // * separate class and may have different values for each data source. TODO: move // to // a // * separate state class and make it data-source-dependent. // */ // TRANSLATING_RESULT, /** * Query execution is stopped. This is a recoverable state. */ STOPPED(QueryState.STOPPED, false, false) { /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#enterState(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public void enterState(final QueryContext target) { final AbstractQueryContext aqc = (AbstractQueryContext) target; aqc.privateSectionExecutor.setEndDate(aqc, now()); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#start(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void start(final AbstractQueryContext target) { target.updateState(QueryState.EXECUTING); } /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#fail(edu.utah.further.fqe.ds.api.domain.AbstractQueryContext) */ @Override public void fail(final AbstractQueryContext target) { target.updateState(QueryState.FAILED); } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.toSortedSet(QueryAction.FAIL, QueryAction.START); } }, /** * Query execution is complete. This is an unrecoverable state. */ COMPLETED(QueryState.COMPLETED, false, false) { /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#enterState(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public void enterState(final QueryContext target) { final AbstractQueryContext aqc = (AbstractQueryContext) target; aqc.privateSectionExecutor.setEndDate(aqc, now()); } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.<QueryAction> emptySortedSet(); } }, /** * Query execution failed. This is an unrecoverable state. */ FAILED(QueryState.FAILED, false, true) { /** * @param target * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#enterState(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public void enterState(final QueryContext target) { final AbstractQueryContext aqc = (AbstractQueryContext) target; aqc.privateSectionExecutor.setEndDate(aqc, now()); } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.fqe.ds.api.domain.AbstractQueryContext.DefaultQueryHandler#getActions(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { return CollectionUtil.<QueryAction> emptySortedSet(); } }; // ========================= CONSTANTS ================================= /** * A cached map of states to their handlers. */ private static final Map<QueryState, DefaultQueryHandler> handlers = newMap(); static { for (final DefaultQueryHandler handler : values()) { handlers.put(handler.getId(), handler); } } // ========================= FIELDS ==================================== /** * The state that this handler handles. */ private final QueryState state; /** * Is query execution started in this state. */ private final boolean started; /** * Is the query in a failed state. */ private final boolean failed; // ========================= CONSTRUCTORS ============================== /** * @param state * @param started * @param failed */ private DefaultQueryHandler(final QueryState state, final boolean started, final boolean failed) { this.state = state; this.started = started; this.failed = failed; } // ========================= IMPLEMENTATION: HasIdentifier<QueryState> = /** * @return * @see edu.utah.further.core.api.discrete.HasIdentifier#getId() */ @Override public final QueryState getId() { return state; } // ========================= IMPLEMENTATION: StateHandler ============== /** * Perform operations upon entering this handler's state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.core.api.state.StateHandler#enterState(edu.utah.further.core.api.state.StateMachine) */ @Override public void enterState(final QueryContext target) { // Method stub } /** * Perform operations upon exiting this handler's state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.core.api.state.StateHandler#exitState(edu.utah.further.core.api.state.StateMachine) */ @Override public void exitState(final QueryContext target) { // Method stub } /** * Return the list of actions that can be performed from the current state. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @throws UnsupportedOperationException * as the default behavior * @see edu.utah.further.core.api.state.Handler#getActions(edu.utah.further.core.api.state.StateMachine) */ @Override public SortedSet<QueryAction> getActions(final QueryContext target) { throw new UnsupportedOperationException("Action list for state " + this + " is not yet supported"); } // ========================= CONTEXTUAL ACTOR METHODS ================== /** * Persist query to query database and add it to the execution queue. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for */ public void queue(final AbstractQueryContext target) { // Method stub throwBusinessRuleException(getId(), "queue()"); } /** * Abnormally terminate query execution. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for */ public void fail(final AbstractQueryContext target) { // Method stub throwBusinessRuleException(getId(), "fail()"); } /** * Normally Complete query execution. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for */ public void finish(final AbstractQueryContext target) { // Method stub throwBusinessRuleException(getId(), "finish()"); } /** * Is query execution started. * * @param target * The target state machine that this handler executes actions for * @return * @see edu.utah.further.fqe.ds.api.domain.QueryState#isStarted(edu.utah.further.fqe.ds.api.domain.QueryContext) */ @Override public final boolean isStarted(final AbstractQueryContext target) { return started; } /** * Is query execution stopped. * * @param target * The target state machine that this handler executes actions for * @return * @see edu.utah.further.core.api.state.ContextualSwitch#isStopped(java.lang.Object) */ @Override public final boolean isStopped(final AbstractQueryContext target) { return !isStarted(target); } /** * Is query in a failed state. * * @param target * The target query state machine that this handler executes actions * for * @return <code>true</code> if and only if the query is in failed state */ public final boolean isFailed(final AbstractQueryContext target) { return failed; } /** * Start the query. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.core.api.state.ContextualSwitch#start(java.lang.Object) */ @Override public void start(final AbstractQueryContext target) { // Method stub throwBusinessRuleException(getId(), "start()"); } /** * Stop the query. * <p> * Meant to be overridden by handler sub-classes determine specific state * behavior. * * @param target * The target state machine that this handler executes actions for * @see edu.utah.further.core.api.state.ContextualSwitch#stop(java.lang.Object) */ @Override public void stop(final AbstractQueryContext target) { // Method stub throwBusinessRuleException(getId(), "stop()"); } // ========================= METHODS =================================== /** * Return the handler of a state. * * @param state * query state * @return corresponding handler */ public static DefaultQueryHandler valueOf(final QueryState state) { return handlers.get(state); } // ========================= PRIVATE METHODS =========================== /** * Throw a {@link BusinessRuleException} on an unsupported acction. * * @param state * current state * @param action * action method name */ private static void throwBusinessRuleException(final QueryState state, final String action) { throw new BusinessRuleException("Can't " + action + " from state " + state); } } // ========================= PRIVATE METHODS =========================== /** * Generate a random child ID. * <p> * TODO: could be replaced by a builder in the future. * * @param instance * instance to set child ID on */ protected static final void generateExecutionId(final AbstractQueryContext instance) { instance.setExecutionId(UUID.randomUUID().toString()); } /** * Return a deep copy of a search query suitable for insertion into the * {@link #getQueries()} collection. * * @param query * original search query * @return search query copy */ abstract protected SearchQuery newSearchQuery(final SearchQuery query); /** * Return an empty instance (using no-argument constructor) of the implementation type * of the result set field. * * @return */ abstract protected ResultContext newResultContext(); /** * Return the implementation of the private method section of this class. * * @return the implementation of the private method section of this class */ abstract protected PrivateSection getPrivateSectionExecutor(); /** * Only to be used in constructors/copy instance methods. Set a new value for the * executionId property. * * @param executionId * the executionId to set */ abstract protected void setExecutionId(final String executionId); /** * Set a new value for the queueDate property. Should be invoked by state classes * only. * * @param queueDate * the queueDate to set */ abstract protected void setQueueDate(final Date queueDate); /** * @param other * @return */ protected final void copyResultViews(final QueryContext other) { final Map<ResultType, ResultContext> otherResultViews = other.getResultViews(); clearResultViews(); if (otherResultViews != null) { for (final Map.Entry<ResultType, ResultContext> entry : otherResultViews .entrySet()) { addResultView(entry.getKey(), entry.getValue()); } } } /** * This section contains methods that should be called only within this class. It is * implemented by sub-classes by passing in an anonymous inner class that implements * {@link AbstractQueryContext#privateSectionExecutor} to all of their constructors. * <p> * Caution: since this interface has to have at least protected visibility, it can be * called by classes in by sub-classes as well.<br/> * DO NOT CALL METHODS IN THIS INTERFACE IN SUB-CLASSES! <br/> * One exception is copy constructors where a class' state has to be initialized and * managed fields accordingly set, which can only be done correctly via the * {@link #setState(AbstractQueryContext, QueryState)} method. */ /* protected */public// a sad OSGI requirement so that implementations that lie in // other bundles don't encounter ClassNotFoundException: PrivateSection static interface PrivateSection { /** * Set the state field of this object to a new value. It is used internally by * <code>updateState()</code>, which should only be called as a call-back by * {@link DefaultQueryHandler} instances. * * @param newState * new state value * @param target * the encapsulating object, for call-back. */ void setState(final AbstractQueryContext target, final QueryState newState); /** * Set a new value for the queue date. * * @param queueDate * the queueDate to set */ void setQueueDate(final AbstractQueryContext target, Timestamp queueDate); /** * Set a new value for the start date. * * @param startDate * the startDate to set */ void setStartDate(final AbstractQueryContext target, Timestamp startDate); /** * Set a new value for the end date. * * @param endDate * the endDate to set */ void setEndDate(final AbstractQueryContext target, Timestamp endDate); } /** * Update the state of this object. * * @param newState * new state value */ private void updateState(final QueryState newState) { final QueryState oldState = getState(); if (log.isDebugEnabled()) { log.debug(this + " updating state: old " + oldState + " new " + newState); } // Exit previous state (if exists) if (oldState != null) { getHandler().exitState(this); } // Set and enter new state privateSectionExecutor.setState(this, newState); getHandler().enterState(this); } /** * Return the handler of the current state * * @return curren state's handler instance */ private DefaultQueryHandler getHandler() { return DefaultQueryHandler.valueOf(getState()); } /** * @return */ private static Timestamp now() { return new Timestamp(TimeService.getMillis()); } /** * Compute the stale date of this query context upon its creation. Returns * <code>null</code> if no resource locator is set up in the current context. * * @return stale date of this query context */ protected static Date computeStaleDateTime() { final FqeDsApiResourceLocator instance = FqeDsApiResourceLocator.getInstance(); return (instance == null) ? null : instance .getStaleDateTimeFactory() .getStaleDateTime(); } }
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gcardone.junidecode; /** * Character map for Unicode characters with codepoint U+30xx. * @author Giuseppe Cardone * @version 0.1 */ class X30 { public static final String[] map = new String[]{ " ", // 0x00 ", ", // 0x01 ". ", // 0x02 "\"", // 0x03 "[JIS]", // 0x04 "\"", // 0x05 "/", // 0x06 "0", // 0x07 "<", // 0x08 "> ", // 0x09 "<<", // 0x0a ">> ", // 0x0b "[", // 0x0c "] ", // 0x0d "{", // 0x0e "} ", // 0x0f "[(", // 0x10 ")] ", // 0x11 "@", // 0x12 "X ", // 0x13 "[", // 0x14 "] ", // 0x15 "[[", // 0x16 "]] ", // 0x17 "((", // 0x18 ")) ", // 0x19 "[[", // 0x1a "]] ", // 0x1b "~ ", // 0x1c "``", // 0x1d "\'\'", // 0x1e ",,", // 0x1f "@", // 0x20 "1", // 0x21 "2", // 0x22 "3", // 0x23 "4", // 0x24 "5", // 0x25 "6", // 0x26 "7", // 0x27 "8", // 0x28 "9", // 0x29 "", // 0x2a "", // 0x2b "", // 0x2c "", // 0x2d "", // 0x2e "", // 0x2f "~", // 0x30 "+", // 0x31 "+", // 0x32 "+", // 0x33 "+", // 0x34 "", // 0x35 "@", // 0x36 " // ", // 0x37 "+10+", // 0x38 "+20+", // 0x39 "+30+", // 0x3a "[?]", // 0x3b "[?]", // 0x3c "[?]", // 0x3d "", // 0x3e "", // 0x3f "[?]", // 0x40 "a", // 0x41 "a", // 0x42 "i", // 0x43 "i", // 0x44 "u", // 0x45 "u", // 0x46 "e", // 0x47 "e", // 0x48 "o", // 0x49 "o", // 0x4a "ka", // 0x4b "ga", // 0x4c "ki", // 0x4d "gi", // 0x4e "ku", // 0x4f "gu", // 0x50 "ke", // 0x51 "ge", // 0x52 "ko", // 0x53 "go", // 0x54 "sa", // 0x55 "za", // 0x56 "si", // 0x57 "zi", // 0x58 "su", // 0x59 "zu", // 0x5a "se", // 0x5b "ze", // 0x5c "so", // 0x5d "zo", // 0x5e "ta", // 0x5f "da", // 0x60 "ti", // 0x61 "di", // 0x62 "tu", // 0x63 "tu", // 0x64 "du", // 0x65 "te", // 0x66 "de", // 0x67 "to", // 0x68 "do", // 0x69 "na", // 0x6a "ni", // 0x6b "nu", // 0x6c "ne", // 0x6d "no", // 0x6e "ha", // 0x6f "ba", // 0x70 "pa", // 0x71 "hi", // 0x72 "bi", // 0x73 "pi", // 0x74 "hu", // 0x75 "bu", // 0x76 "pu", // 0x77 "he", // 0x78 "be", // 0x79 "pe", // 0x7a "ho", // 0x7b "bo", // 0x7c "po", // 0x7d "ma", // 0x7e "mi", // 0x7f "mu", // 0x80 "me", // 0x81 "mo", // 0x82 "ya", // 0x83 "ya", // 0x84 "yu", // 0x85 "yu", // 0x86 "yo", // 0x87 "yo", // 0x88 "ra", // 0x89 "ri", // 0x8a "ru", // 0x8b "re", // 0x8c "ro", // 0x8d "wa", // 0x8e "wa", // 0x8f "wi", // 0x90 "we", // 0x91 "wo", // 0x92 "n", // 0x93 "vu", // 0x94 "[?]", // 0x95 "[?]", // 0x96 "[?]", // 0x97 "[?]", // 0x98 "", // 0x99 "", // 0x9a "", // 0x9b "", // 0x9c "\"", // 0x9d "\"", // 0x9e "[?]", // 0x9f "[?]", // 0xa0 "a", // 0xa1 "a", // 0xa2 "i", // 0xa3 "i", // 0xa4 "u", // 0xa5 "u", // 0xa6 "e", // 0xa7 "e", // 0xa8 "o", // 0xa9 "o", // 0xaa "ka", // 0xab "ga", // 0xac "ki", // 0xad "gi", // 0xae "ku", // 0xaf "gu", // 0xb0 "ke", // 0xb1 "ge", // 0xb2 "ko", // 0xb3 "go", // 0xb4 "sa", // 0xb5 "za", // 0xb6 "si", // 0xb7 "zi", // 0xb8 "su", // 0xb9 "zu", // 0xba "se", // 0xbb "ze", // 0xbc "so", // 0xbd "zo", // 0xbe "ta", // 0xbf "da", // 0xc0 "ti", // 0xc1 "di", // 0xc2 "tu", // 0xc3 "tu", // 0xc4 "du", // 0xc5 "te", // 0xc6 "de", // 0xc7 "to", // 0xc8 "do", // 0xc9 "na", // 0xca "ni", // 0xcb "nu", // 0xcc "ne", // 0xcd "no", // 0xce "ha", // 0xcf "ba", // 0xd0 "pa", // 0xd1 "hi", // 0xd2 "bi", // 0xd3 "pi", // 0xd4 "hu", // 0xd5 "bu", // 0xd6 "pu", // 0xd7 "he", // 0xd8 "be", // 0xd9 "pe", // 0xda "ho", // 0xdb "bo", // 0xdc "po", // 0xdd "ma", // 0xde "mi", // 0xdf "mu", // 0xe0 "me", // 0xe1 "mo", // 0xe2 "ya", // 0xe3 "ya", // 0xe4 "yu", // 0xe5 "yu", // 0xe6 "yo", // 0xe7 "yo", // 0xe8 "ra", // 0xe9 "ri", // 0xea "ru", // 0xeb "re", // 0xec "ro", // 0xed "wa", // 0xee "wa", // 0xef "wi", // 0xf0 "we", // 0xf1 "wo", // 0xf2 "n", // 0xf3 "vu", // 0xf4 "ka", // 0xf5 "ke", // 0xf6 "va", // 0xf7 "vi", // 0xf8 "ve", // 0xf9 "vo", // 0xfa "", // 0xfb "", // 0xfc "\"", // 0xfd "\"" // 0xfe }; }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.io; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionInputStream; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class TestMapFile { private static final Logger LOG = LoggerFactory.getLogger(TestMapFile.class); private static final Path TEST_DIR = new Path(GenericTestUtils.getTempPath( TestMapFile.class.getSimpleName())); private static Configuration conf = new Configuration(); @Before public void setup() throws Exception { LocalFileSystem fs = FileSystem.getLocal(conf); if (fs.exists(TEST_DIR) && !fs.delete(TEST_DIR, true)) { Assert.fail("Can't clean up test root dir"); } fs.mkdirs(TEST_DIR); } private static final Progressable defaultProgressable = new Progressable() { @Override public void progress() { } }; private static final CompressionCodec defaultCodec = new CompressionCodec() { @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { return mock(CompressionOutputStream.class); } @Override public CompressionOutputStream createOutputStream(OutputStream out, Compressor compressor) throws IOException { return mock(CompressionOutputStream.class); } @Override public Class<? extends Compressor> getCompressorType() { return null; } @Override public Compressor createCompressor() { return null; } @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { return null; } @Override public CompressionInputStream createInputStream(InputStream in, Decompressor decompressor) throws IOException { return null; } @Override public Class<? extends Decompressor> getDecompressorType() { return null; } @Override public Decompressor createDecompressor() { return null; } @Override public String getDefaultExtension() { return null; } }; private MapFile.Writer createWriter(String fileName, Class<? extends WritableComparable<?>> keyClass, Class<? extends Writable> valueClass) throws IOException { Path dirName = new Path(TEST_DIR, fileName); MapFile.Writer.setIndexInterval(conf, 4); return new MapFile.Writer(conf, dirName, MapFile.Writer.keyClass(keyClass), MapFile.Writer.valueClass(valueClass)); } private MapFile.Reader createReader(String fileName, Class<? extends WritableComparable<?>> keyClass) throws IOException { Path dirName = new Path(TEST_DIR, fileName); return new MapFile.Reader(dirName, conf, MapFile.Reader.comparator(new WritableComparator(keyClass))); } /** * test {@code MapFile.Reader.getClosest()} method * */ @Test public void testGetClosestOnCurrentApi() throws Exception { final String TEST_PREFIX = "testGetClosestOnCurrentApi.mapfile"; MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = createWriter(TEST_PREFIX, Text.class, Text.class); int FIRST_KEY = 1; // Test keys: 11,21,31,...,91 for (int i = FIRST_KEY; i < 100; i += 10) { Text t = new Text(Integer.toString(i)); writer.append(t, t); } writer.close(); reader = createReader(TEST_PREFIX, Text.class); Text key = new Text("55"); Text value = new Text(); // Test get closest with step forward Text closest = (Text) reader.getClosest(key, value); assertEquals(new Text("61"), closest); // Test get closest with step back closest = (Text) reader.getClosest(key, value, true); assertEquals(new Text("51"), closest); // Test get closest when we pass explicit key final Text explicitKey = new Text("21"); closest = (Text) reader.getClosest(explicitKey, value); assertEquals(new Text("21"), explicitKey); // Test what happens at boundaries. Assert if searching a key that is // less than first key in the mapfile, that the first key is returned. key = new Text("00"); closest = (Text) reader.getClosest(key, value); assertEquals(FIRST_KEY, Integer.parseInt(closest.toString())); // Assert that null is returned if key is > last entry in mapfile. key = new Text("92"); closest = (Text) reader.getClosest(key, value); assertNull("Not null key in testGetClosestWithNewCode", closest); // If we were looking for the key before, we should get the last key closest = (Text) reader.getClosest(key, value, true); assertEquals(new Text("91"), closest); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Reader.midKey() } method */ @Test public void testMidKeyOnCurrentApi() throws Exception { // Write a mapfile of simple data: keys are final String TEST_PREFIX = "testMidKeyOnCurrentApi.mapfile"; MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = createWriter(TEST_PREFIX, IntWritable.class, IntWritable.class); // 0,1,....9 int SIZE = 10; for (int i = 0; i < SIZE; i++) writer.append(new IntWritable(i), new IntWritable(i)); writer.close(); reader = createReader(TEST_PREFIX, IntWritable.class); assertEquals(new IntWritable((SIZE - 1) / 2), reader.midKey()); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Writer.rename()} method */ @Test public void testRename() { final String NEW_FILE_NAME = "test-new.mapfile"; final String OLD_FILE_NAME = "test-old.mapfile"; MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class); writer.close(); MapFile.rename(fs, new Path(TEST_DIR, OLD_FILE_NAME).toString(), new Path(TEST_DIR, NEW_FILE_NAME).toString()); MapFile.delete(fs, new Path(TEST_DIR, NEW_FILE_NAME).toString()); } catch (IOException ex) { fail("testRename error " + ex); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@code MapFile.rename()} * method with throwing {@code IOException} */ @Test public void testRenameWithException() { final String ERROR_MESSAGE = "Can't rename file"; final String NEW_FILE_NAME = "test-new.mapfile"; final String OLD_FILE_NAME = "test-old.mapfile"; MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); FileSystem spyFs = spy(fs); writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class); writer.close(); Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME); Path newDir = new Path(TEST_DIR, NEW_FILE_NAME); when(spyFs.rename(oldDir, newDir)).thenThrow( new IOException(ERROR_MESSAGE)); MapFile.rename(spyFs, oldDir.toString(), newDir.toString()); fail("testRenameWithException no exception error !!!"); } catch (IOException ex) { assertEquals("testRenameWithException invalid IOExceptionMessage !!!", ex.getMessage(), ERROR_MESSAGE); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } @Test public void testRenameWithFalse() { final String ERROR_MESSAGE = "Could not rename"; final String NEW_FILE_NAME = "test-new.mapfile"; final String OLD_FILE_NAME = "test-old.mapfile"; MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); FileSystem spyFs = spy(fs); writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class); writer.close(); Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME); Path newDir = new Path(TEST_DIR, NEW_FILE_NAME); when(spyFs.rename(oldDir, newDir)).thenReturn(false); MapFile.rename(spyFs, oldDir.toString(), newDir.toString()); fail("testRenameWithException no exception error !!!"); } catch (IOException ex) { assertTrue("testRenameWithFalse invalid IOExceptionMessage error !!!", ex .getMessage().startsWith(ERROR_MESSAGE)); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test throwing {@code IOException} in {@code MapFile.Writer} constructor */ @Test public void testWriteWithFailDirCreation() { String ERROR_MESSAGE = "Mkdirs failed to create directory"; Path dirName = new Path(TEST_DIR, "fail.mapfile"); MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); FileSystem spyFs = spy(fs); Path pathSpy = spy(dirName); when(pathSpy.getFileSystem(conf)).thenReturn(spyFs); when(spyFs.mkdirs(dirName)).thenReturn(false); writer = new MapFile.Writer(conf, pathSpy, MapFile.Writer.keyClass(IntWritable.class), MapFile.Writer.valueClass(Text.class)); fail("testWriteWithFailDirCreation error !!!"); } catch (IOException ex) { assertTrue("testWriteWithFailDirCreation ex error !!!", ex.getMessage() .startsWith(ERROR_MESSAGE)); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@code MapFile.Reader.finalKey()} method */ @Test public void testOnFinalKey() { final String TEST_METHOD_KEY = "testOnFinalKey.mapfile"; int SIZE = 10; MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = createWriter(TEST_METHOD_KEY, IntWritable.class, IntWritable.class); for (int i = 0; i < SIZE; i++) writer.append(new IntWritable(i), new IntWritable(i)); writer.close(); reader = createReader(TEST_METHOD_KEY, IntWritable.class); IntWritable expectedKey = new IntWritable(0); reader.finalKey(expectedKey); assertEquals("testOnFinalKey not same !!!", expectedKey, new IntWritable( 9)); } catch (IOException ex) { fail("testOnFinalKey error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Writer} constructor with key, value * and validate it with {@code keyClass(), valueClass()} methods */ @Test public void testKeyValueClasses() { Class<? extends WritableComparable<?>> keyClass = IntWritable.class; Class<?> valueClass = Text.class; try { createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class) .close(); assertNotNull("writer key class null error !!!", MapFile.Writer.keyClass(keyClass)); assertNotNull("writer value class null error !!!", MapFile.Writer.valueClass(valueClass)); } catch (IOException ex) { fail(ex.getMessage()); } } /** * test {@code MapFile.Reader.getClosest() } with wrong class key */ @Test public void testReaderGetClosest() throws Exception { final String TEST_METHOD_KEY = "testReaderWithWrongKeyClass.mapfile"; MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class); for (int i = 0; i < 10; i++) writer.append(new IntWritable(i), new Text("value" + i)); writer.close(); reader = createReader(TEST_METHOD_KEY, Text.class); reader.getClosest(new Text("2"), new Text("")); fail("no excepted exception in testReaderWithWrongKeyClass !!!"); } catch (IOException ex) { /* Should be thrown to pass the test */ } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Writer.append() } with wrong key class */ @Test public void testReaderWithWrongValueClass() { final String TEST_METHOD_KEY = "testReaderWithWrongValueClass.mapfile"; MapFile.Writer writer = null; try { writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class); writer.append(new IntWritable(0), new IntWritable(0)); fail("no excepted exception in testReaderWithWrongKeyClass !!!"); } catch (IOException ex) { /* Should be thrown to pass the test */ } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@code MapFile.Reader.next(key, value)} for iteration. */ @Test public void testReaderKeyIteration() { final String TEST_METHOD_KEY = "testReaderKeyIteration.mapfile"; int SIZE = 10; int ITERATIONS = 5; MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class); int start = 0; for (int i = 0; i < SIZE; i++) writer.append(new IntWritable(i), new Text("Value:" + i)); writer.close(); reader = createReader(TEST_METHOD_KEY, IntWritable.class); // test iteration Writable startValue = new Text("Value:" + start); int i = 0; while (i++ < ITERATIONS) { IntWritable key = new IntWritable(start); Writable value = startValue; while (reader.next(key, value)) { assertNotNull(key); assertNotNull(value); } reader.reset(); } assertTrue("reader seek error !!!", reader.seek(new IntWritable(SIZE / 2))); assertFalse("reader seek error !!!", reader.seek(new IntWritable(SIZE * 2))); } catch (IOException ex) { fail("reader seek error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Writer.testFix} method */ @Test public void testFix() { final String INDEX_LESS_MAP_FILE = "testFix.mapfile"; int PAIR_SIZE = 20; MapFile.Writer writer = null; try { FileSystem fs = FileSystem.getLocal(conf); Path dir = new Path(TEST_DIR, INDEX_LESS_MAP_FILE); writer = createWriter(INDEX_LESS_MAP_FILE, IntWritable.class, Text.class); for (int i = 0; i < PAIR_SIZE; i++) writer.append(new IntWritable(0), new Text("value")); writer.close(); File indexFile = new File(".", "." + INDEX_LESS_MAP_FILE + "/index"); boolean isDeleted = false; if (indexFile.exists()) isDeleted = indexFile.delete(); if (isDeleted) assertTrue("testFix error !!!", MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE); } catch (Exception ex) { fail("testFix error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@link MapFile#fix(FileSystem, Path, Class<? extends Writable>, * Class<? extends Writable>, boolean, Configuration)} * method in case of BLOCK compression */ @Test public void testFixBlockCompress() throws Exception { final String indexLessMapFile = "testFixBlockCompress.mapfile"; final int compressBlocksize = 100; final int indexInterval = 4; final int noBlocks = 4; final String value = "value-"; final int size = noBlocks * compressBlocksize / (4 + value.length()); conf.setInt("io.seqfile.compress.blocksize", compressBlocksize); MapFile.Writer.setIndexInterval(conf, indexInterval); FileSystem fs = FileSystem.getLocal(conf); Path dir = new Path(TEST_DIR, indexLessMapFile); MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = new MapFile.Writer(conf, dir, MapFile.Writer.keyClass(IntWritable.class), MapFile.Writer.valueClass(Text.class), MapFile.Writer.compression(CompressionType.BLOCK)); for (int i = 0; i < size; i++) { writer.append(new IntWritable(i), new Text(value + i)); } writer.close(); Path index = new Path(dir, MapFile.INDEX_FILE_NAME); fs.rename(index, index.suffix(".orig")); assertEquals("No of valid MapFile entries wrong", size, MapFile.fix(fs, dir, IntWritable.class, Text.class, false, conf)); reader = new MapFile.Reader(dir, conf); IntWritable key; Text val = new Text(); int notFound = 0; for (int i = 0; i < size; i++) { key = new IntWritable(i); if (null == reader.get(key, val)) { notFound++; } } assertEquals("With MapFile.fix-ed index, could not get entries # ", 0, notFound); } finally { IOUtils.cleanupWithLogger(null, writer, reader); if (fs.exists(dir)) { fs.delete(dir, true); } } } /** * test all available constructor for {@code MapFile.Writer} */ @Test @SuppressWarnings("deprecation") public void testDeprecatedConstructors() { String path = new Path(TEST_DIR, "writes.mapfile").toString(); MapFile.Writer writer = null; MapFile.Reader reader = null; try { FileSystem fs = FileSystem.getLocal(conf); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, SequenceFile.CompressionType.RECORD); assertNotNull(writer); writer.close(); writer = new MapFile.Writer(conf, fs, path, WritableComparator.get(Text.class), Text.class, CompressionType.RECORD, defaultProgressable); assertNotNull(writer); writer.close(); reader = new MapFile.Reader(fs, path, WritableComparator.get(IntWritable.class), conf); assertNotNull(reader); assertNotNull("reader key is null !!!", reader.getKeyClass()); assertNotNull("reader value in null", reader.getValueClass()); } catch (IOException e) { fail(e.getMessage()); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } /** * test {@code MapFile.Writer} constructor * with IllegalArgumentException * */ @Test public void testKeyLessWriterCreation() { MapFile.Writer writer = null; try { writer = new MapFile.Writer(conf, TEST_DIR); fail("fail in testKeyLessWriterCreation !!!"); } catch (IllegalArgumentException ex) { } catch (Exception e) { fail("fail in testKeyLessWriterCreation. Other ex !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@code MapFile.Writer} constructor with IOException */ @Test public void testPathExplosionWriterCreation() { Path path = new Path(TEST_DIR, "testPathExplosionWriterCreation.mapfile"); String TEST_ERROR_MESSAGE = "Mkdirs failed to create directory " + path.getName(); MapFile.Writer writer = null; try { FileSystem fsSpy = spy(FileSystem.get(conf)); Path pathSpy = spy(path); when(fsSpy.mkdirs(path)).thenThrow(new IOException(TEST_ERROR_MESSAGE)); when(pathSpy.getFileSystem(conf)).thenReturn(fsSpy); writer = new MapFile.Writer(conf, pathSpy, MapFile.Writer.keyClass(IntWritable.class), MapFile.Writer.valueClass(IntWritable.class)); fail("fail in testPathExplosionWriterCreation !!!"); } catch (IOException ex) { assertEquals("testPathExplosionWriterCreation ex message error !!!", ex.getMessage(), TEST_ERROR_MESSAGE); } catch (Exception e) { fail("fail in testPathExplosionWriterCreation. Other ex !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } /** * test {@code MapFile.Writer.append} method with desc order */ @Test public void testDescOrderWithThrowExceptionWriterAppend() { MapFile.Writer writer = null; try { writer = createWriter(".mapfile", IntWritable.class, Text.class); writer.append(new IntWritable(2), new Text("value: " + 1)); writer.append(new IntWritable(2), new Text("value: " + 2)); writer.append(new IntWritable(2), new Text("value: " + 4)); writer.append(new IntWritable(1), new Text("value: " + 3)); fail("testDescOrderWithThrowExceptionWriterAppend not expected exception error !!!"); } catch (IOException ex) { } catch (Exception e) { fail("testDescOrderWithThrowExceptionWriterAppend other ex throw !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } } @Test public void testMainMethodMapFile() { String inFile = "mainMethodMapFile.mapfile"; String path = new Path(TEST_DIR, inFile).toString(); String[] args = { path, path }; MapFile.Writer writer = null; try { writer = createWriter(inFile, IntWritable.class, Text.class); writer.append(new IntWritable(1), new Text("test_text1")); writer.append(new IntWritable(2), new Text("test_text2")); writer.close(); MapFile.main(args); } catch (Exception ex) { fail("testMainMethodMapFile error !!!"); } finally { IOUtils.cleanupWithLogger(null, writer); } } /** * Test getClosest feature. * * @throws Exception */ @Test @SuppressWarnings("deprecation") public void testGetClosest() throws Exception { // Write a mapfile of simple data: keys are Path dirName = new Path(TEST_DIR, "testGetClosest.mapfile"); FileSystem fs = FileSystem.getLocal(conf); Path qualifiedDirName = fs.makeQualified(dirName); // Make an index entry for every third insertion. MapFile.Writer.setIndexInterval(conf, 3); MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(), Text.class, Text.class); // Assert that the index interval is 1 assertEquals(3, writer.getIndexInterval()); // Add entries up to 100 in intervals of ten. final int FIRST_KEY = 10; for (int i = FIRST_KEY; i < 100; i += 10) { String iStr = Integer.toString(i); Text t = new Text("00".substring(iStr.length()) + iStr); writer.append(t, t); } writer.close(); // Now do getClosest on created mapfile. reader = new MapFile.Reader(qualifiedDirName, conf); Text key = new Text("55"); Text value = new Text(); Text closest = (Text) reader.getClosest(key, value); // Assert that closest after 55 is 60 assertEquals(new Text("60"), closest); // Get closest that falls before the passed key: 50 closest = (Text) reader.getClosest(key, value, true); assertEquals(new Text("50"), closest); // Test get closest when we pass explicit key final Text TWENTY = new Text("20"); closest = (Text) reader.getClosest(TWENTY, value); assertEquals(TWENTY, closest); closest = (Text) reader.getClosest(TWENTY, value, true); assertEquals(TWENTY, closest); // Test what happens at boundaries. Assert if searching a key that is // less than first key in the mapfile, that the first key is returned. key = new Text("00"); closest = (Text) reader.getClosest(key, value); assertEquals(FIRST_KEY, Integer.parseInt(closest.toString())); // If we're looking for the first key before, and we pass in a key before // the first key in the file, we should get null closest = (Text) reader.getClosest(key, value, true); assertNull(closest); // Assert that null is returned if key is > last entry in mapfile. key = new Text("99"); closest = (Text) reader.getClosest(key, value); assertNull(closest); // If we were looking for the key before, we should get the last key closest = (Text) reader.getClosest(key, value, true); assertEquals(new Text("90"), closest); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } @Test @SuppressWarnings("deprecation") public void testMidKey() throws Exception { // Write a mapfile of simple data: keys are Path dirName = new Path(TEST_DIR, "testMidKey.mapfile"); FileSystem fs = FileSystem.getLocal(conf); Path qualifiedDirName = fs.makeQualified(dirName); MapFile.Writer writer = null; MapFile.Reader reader = null; try { writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(), IntWritable.class, IntWritable.class); writer.append(new IntWritable(1), new IntWritable(1)); writer.close(); // Now do getClosest on created mapfile. reader = new MapFile.Reader(qualifiedDirName, conf); assertEquals(new IntWritable(1), reader.midKey()); } finally { IOUtils.cleanupWithLogger(LOG, writer, reader); } } @Test @SuppressWarnings("deprecation") public void testMidKeyEmpty() throws Exception { // Write a mapfile of simple data: keys are Path dirName = new Path(TEST_DIR, "testMidKeyEmpty.mapfile"); FileSystem fs = FileSystem.getLocal(conf); Path qualifiedDirName = fs.makeQualified(dirName); MapFile.Writer writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(), IntWritable.class, IntWritable.class); writer.close(); // Now do getClosest on created mapfile. MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf); try { assertEquals(null, reader.midKey()); } finally { reader.close(); } } @Test public void testMerge() throws Exception { final String TEST_METHOD_KEY = "testMerge.mapfile"; int SIZE = 10; int ITERATIONS = 5; Path[] in = new Path[5]; List<Integer> expected = new ArrayList<Integer>(); for (int j = 0; j < 5; j++) { try (MapFile.Writer writer = createWriter(TEST_METHOD_KEY + "." + j, IntWritable.class, Text.class)) { in[j] = new Path(TEST_DIR, TEST_METHOD_KEY + "." + j); for (int i = 0; i < SIZE; i++) { expected.add(i + j); writer.append(new IntWritable(i + j), new Text("Value:" + (i + j))); } } } // Sort expected values Collections.sort(expected); // Merge all 5 files MapFile.Merger merger = new MapFile.Merger(conf); merger.merge(in, true, new Path(TEST_DIR, TEST_METHOD_KEY)); try (MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class)) { int start = 0; // test iteration Text startValue = new Text("Value:" + start); int i = 0; while (i++ < ITERATIONS) { Iterator<Integer> expectedIterator = expected.iterator(); IntWritable key = new IntWritable(start); Text value = startValue; IntWritable prev = new IntWritable(start); while (reader.next(key, value)) { assertTrue("Next key should be always equal or more", prev.get() <= key.get()); assertEquals(expectedIterator.next().intValue(), key.get()); prev.set(key.get()); } reader.reset(); } } // inputs should be deleted for (int j = 0; j < in.length; j++) { Path path = in[j]; assertFalse("inputs should be deleted", path.getFileSystem(conf).exists(path)); } } }
/* * JBoss, Home of Professional Open Source. * Copyright 2015 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.common.cpu; import static java.security.AccessController.doPrivileged; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.StandardCharsets; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Locale; /** * A class which exposes any available cache line information for the current CPU. * * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public final class CacheInfo { private static final CacheLevelInfo[] cacheLevels; /** * Get the number of CPU cache level entries. If no cache information could be gathered, 0 is returned. * * @return the number of CPU cache levels, or 0 if unknown */ public static int getLevelEntryCount() { return cacheLevels.length; } /** * Get the CPU cache level information for a cache level. The {@code index} argument must be greater than zero and * less than the number of levels returned by {@link #getLevelEntryCount()}. * * @param index the cache level index * @return the CPU cache level information */ public static CacheLevelInfo getCacheLevelInfo(int index) { return cacheLevels[index]; } /** * Get the smallest known data cache line size. If no cache line sizes are known, 0 is returned. Note that smaller * cache lines may exist if one or more cache line sizes are unknown. * * @return the smallest cache line size, or 0 if unknown */ public static int getSmallestDataCacheLineSize() { int minSize = Integer.MAX_VALUE; for (CacheLevelInfo cacheLevel : cacheLevels) { if (cacheLevel.getCacheType().isData()) { final int cacheLineSize = cacheLevel.getCacheLineSize(); if (cacheLineSize != 0 && cacheLineSize < minSize) { minSize = cacheLineSize; } } } return minSize == Integer.MAX_VALUE ? 0 : minSize; } /** * Get the smallest known instruction cache line size. If no cache line sizes are known, 0 is returned. Note that smaller * cache lines may exist if one or more cache line sizes are unknown. * * @return the smallest cache line size, or 0 if unknown */ public static int getSmallestInstructionCacheLineSize() { int minSize = Integer.MAX_VALUE; for (CacheLevelInfo cacheLevel : cacheLevels) { if (cacheLevel.getCacheType().isInstruction()) { final int cacheLineSize = cacheLevel.getCacheLineSize(); if (cacheLineSize != 0 && cacheLineSize < minSize) { minSize = cacheLineSize; } } } return minSize == Integer.MAX_VALUE ? 0 : minSize; } static { cacheLevels = doPrivileged((PrivilegedAction<CacheLevelInfo[]>) () -> { try { String osArch = System.getProperty("os.name", "unknown").toLowerCase(Locale.US); if (osArch.contains("linux")) { // try to read /sys fs final File cpu0 = new File("/sys/devices/system/cpu/cpu0/cache"); if (cpu0.exists()) { // great! final File[] files = cpu0.listFiles(); if (files != null) { ArrayList<File> indexes = new ArrayList<File>(); for (File file : files) { if (file.getName().startsWith("index")) { indexes.add(file); } } final CacheLevelInfo[] levelInfoArray = new CacheLevelInfo[indexes.size()]; for (int i = 0; i < indexes.size(); i++) { File file = indexes.get(i); int index = parseIntFile(new File(file, "level")); final CacheType type; switch (parseStringFile(new File(file, "type"))) { case "Data": type = CacheType.DATA; break; case "Instruction": type = CacheType.INSTRUCTION; break; case "Unified": type = CacheType.UNIFIED; break; default: type = CacheType.UNKNOWN; break; } int size = parseIntKBFile(new File(file, "size")); int lineSize = parseIntFile(new File(file, "coherency_line_size")); levelInfoArray[i] = new CacheLevelInfo(index, type, size, lineSize); } return levelInfoArray; } } } else if (osArch.contains("mac os x")) { // cache line size final int lineSize = safeParseInt(parseProcessOutput("/usr/sbin/sysctl", "-n", "hw.cachelinesize")); if (lineSize != 0) { // cache sizes final int l1d = safeParseInt(parseProcessOutput("/usr/sbin/sysctl", "-n", "hw.l1dcachesize")); final int l1i = safeParseInt(parseProcessOutput("/usr/sbin/sysctl", "-n", "hw.l1icachesize")); final int l2 = safeParseInt(parseProcessOutput("/usr/sbin/sysctl", "-n", "hw.l2cachesize")); final int l3 = safeParseInt(parseProcessOutput("/usr/sbin/sysctl", "-n", "hw.l3cachesize")); ArrayList<CacheLevelInfo> list = new ArrayList<CacheLevelInfo>(); if (l1d != 0) { list.add(new CacheLevelInfo(1, CacheType.DATA, l1d / 1024, lineSize)); } if (l1i != 0) { list.add(new CacheLevelInfo(1, CacheType.INSTRUCTION, l1i / 1024, lineSize)); } if (l2 != 0) { list.add(new CacheLevelInfo(2, CacheType.UNIFIED, l2 / 1024, lineSize)); } if (l3 != 0) { list.add(new CacheLevelInfo(3, CacheType.UNIFIED, l3 / 1024, lineSize)); } if (list.size() > 0) { return list.toArray(new CacheLevelInfo[list.size()]); } } } else if (osArch.contains("windows")) { // TODO: use the wmic utility to get cache line info } } catch (Throwable ignored) {} // all has failed return new CacheLevelInfo[0]; }); } static int parseIntFile(final File file) { return safeParseInt(parseStringFile(file)); } static int safeParseInt(final String string) { try { return Integer.parseInt(string); } catch (Throwable ignored) { return 0; } } static int parseIntKBFile(final File file) { try { final String s = parseStringFile(file); if (s.endsWith("K")) { return Integer.parseInt(s.substring(0, s.length() - 1)); } else if (s.endsWith("M")) { return Integer.parseInt(s.substring(0, s.length() - 1)) * 1024; } else if (s.endsWith("G")) { return Integer.parseInt(s.substring(0, s.length() - 1)) * 1024 * 1024; } else { return Integer.parseInt(s); } } catch (Throwable ignored) { return 0; } } static String parseStringFile(final File file) { try (FileInputStream is = new FileInputStream(file)) { return parseStringStream(is); } catch (Throwable ignored) { return ""; } } static String parseStringStream(final InputStream is) { try (Reader r = new InputStreamReader(is, StandardCharsets.UTF_8)) { StringBuilder b = new StringBuilder(); char[] cb = new char[64]; int res; while ((res = r.read(cb)) != -1) { b.append(cb, 0, res); } return b.toString().trim(); } catch (Throwable ignored) { return ""; } } static String parseProcessOutput(final String... args) { final ProcessBuilder processBuilder = new ProcessBuilder(args); try { final Process process = processBuilder.start(); process.getOutputStream().close(); final InputStream errorStream = process.getErrorStream(); final Thread errorThread = new Thread(null, new StreamConsumer(errorStream), "Process thread", 32768L); errorThread.start(); final String result; try (final InputStream inputStream = process.getInputStream()) { result = parseStringStream(inputStream); } boolean intr = false; try { process.waitFor(); } catch (InterruptedException e) { intr = true; return null; } finally { try { errorThread.join(); } catch (InterruptedException e) { intr = true; } finally { if (intr) { Thread.currentThread().interrupt(); } } } return result; } catch (IOException e) { return ""; } } static class StreamConsumer implements Runnable { private final InputStream stream; StreamConsumer(final InputStream stream) { this.stream = stream; } public void run() { byte[] buffer = new byte[128]; try { while (stream.read(buffer) != -1) ; } catch (IOException ignored) { } finally { try { stream.close(); } catch (IOException ignored) { } } } } public static void main(String[] args) { System.out.println("Detected cache info:"); for (CacheLevelInfo levelInfo : cacheLevels) { System.out.printf("Level %d cache: type %s, size %d KiB, cache line is %d bytes%n", Integer.valueOf(levelInfo.getCacheLevel()), levelInfo.getCacheType(), Integer.valueOf(levelInfo.getCacheLevelSizeKB()), Integer.valueOf(levelInfo.getCacheLineSize()) ); } } }
// Generated from /home/simon/PhD/src/jim/jim/src/uk/ac/open/crc/jim/parser/java17/Java.g4 by ANTLR 4.1 package uk.ac.open.crc.jim.parser.java17; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class JavaLexer extends Lexer { protected static final DFA[] _decisionToDFA; protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int ABSTRACT=1, ASSERT=2, BOOLEAN=3, BREAK=4, BYTE=5, CASE=6, CATCH=7, CHAR=8, CLASS=9, CONST=10, CONTINUE=11, DEFAULT=12, DO=13, DOUBLE=14, ELSE=15, ENUM=16, EXTENDS=17, FINAL=18, FINALLY=19, FLOAT=20, FOR=21, IF=22, GOTO=23, IMPLEMENTS=24, IMPORT=25, INSTANCEOF=26, INT=27, INTERFACE=28, LONG=29, NATIVE=30, NEW=31, PACKAGE=32, PRIVATE=33, PROTECTED=34, PUBLIC=35, RETURN=36, SHORT=37, STATIC=38, STRICTFP=39, SUPER=40, SWITCH=41, SYNCHRONIZED=42, THIS=43, THROW=44, THROWS=45, TRANSIENT=46, TRY=47, VOID=48, VOLATILE=49, WHILE=50, IntegerLiteral=51, FloatingPointLiteral=52, BooleanLiteral=53, CharacterLiteral=54, StringLiteral=55, NullLiteral=56, LPAREN=57, RPAREN=58, LBRACE=59, RBRACE=60, LBRACK=61, RBRACK=62, SEMI=63, COMMA=64, DOT=65, ASSIGN=66, GT=67, LT=68, BANG=69, TILDE=70, QUESTION=71, COLON=72, EQUAL=73, LE=74, GE=75, NOTEQUAL=76, AND=77, OR=78, INC=79, DEC=80, ADD=81, SUB=82, MUL=83, DIV=84, BITAND=85, BITOR=86, CARET=87, MOD=88, ADD_ASSIGN=89, SUB_ASSIGN=90, MUL_ASSIGN=91, DIV_ASSIGN=92, AND_ASSIGN=93, OR_ASSIGN=94, XOR_ASSIGN=95, MOD_ASSIGN=96, LSHIFT_ASSIGN=97, RSHIFT_ASSIGN=98, URSHIFT_ASSIGN=99, Identifier=100, AT=101, ELLIPSIS=102, WS=103, COMMENT=104, LINE_COMMENT=105; public static String[] modeNames = { "DEFAULT_MODE" }; public static final String[] tokenNames = { "<INVALID>", "'abstract'", "'assert'", "'boolean'", "'break'", "'byte'", "'case'", "'catch'", "'char'", "'class'", "'const'", "'continue'", "'default'", "'do'", "'double'", "'else'", "'enum'", "'extends'", "'final'", "'finally'", "'float'", "'for'", "'if'", "'goto'", "'implements'", "'import'", "'instanceof'", "'int'", "'interface'", "'long'", "'native'", "'new'", "'package'", "'private'", "'protected'", "'public'", "'return'", "'short'", "'static'", "'strictfp'", "'super'", "'switch'", "'synchronized'", "'this'", "'throw'", "'throws'", "'transient'", "'try'", "'void'", "'volatile'", "'while'", "IntegerLiteral", "FloatingPointLiteral", "BooleanLiteral", "CharacterLiteral", "StringLiteral", "'null'", "'('", "')'", "'{'", "'}'", "'['", "']'", "';'", "','", "'.'", "'='", "'>'", "'<'", "'!'", "'~'", "'?'", "':'", "'=='", "'<='", "'>='", "'!='", "'&&'", "'||'", "'++'", "'--'", "'+'", "'-'", "'*'", "'/'", "'&'", "'|'", "'^'", "'%'", "'+='", "'-='", "'*='", "'/='", "'&='", "'|='", "'^='", "'%='", "'<<='", "'>>='", "'>>>='", "Identifier", "'@'", "'...'", "WS", "COMMENT", "LINE_COMMENT" }; public static final String[] ruleNames = { "ABSTRACT", "ASSERT", "BOOLEAN", "BREAK", "BYTE", "CASE", "CATCH", "CHAR", "CLASS", "CONST", "CONTINUE", "DEFAULT", "DO", "DOUBLE", "ELSE", "ENUM", "EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR", "IF", "GOTO", "IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT", "INTERFACE", "LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE", "PROTECTED", "PUBLIC", "RETURN", "SHORT", "STATIC", "STRICTFP", "SUPER", "SWITCH", "SYNCHRONIZED", "THIS", "THROW", "THROWS", "TRANSIENT", "TRY", "VOID", "VOLATILE", "WHILE", "IntegerLiteral", "DecimalIntegerLiteral", "HexIntegerLiteral", "OctalIntegerLiteral", "BinaryIntegerLiteral", "IntegerTypeSuffix", "DecimalNumeral", "Digits", "Digit", "NonZeroDigit", "DigitOrUnderscore", "Underscores", "HexNumeral", "HexDigits", "HexDigit", "HexDigitOrUnderscore", "OctalNumeral", "OctalDigits", "OctalDigit", "OctalDigitOrUnderscore", "BinaryNumeral", "BinaryDigits", "BinaryDigit", "BinaryDigitOrUnderscore", "FloatingPointLiteral", "DecimalFloatingPointLiteral", "ExponentPart", "ExponentIndicator", "SignedInteger", "Sign", "FloatTypeSuffix", "HexadecimalFloatingPointLiteral", "HexSignificand", "BinaryExponent", "BinaryExponentIndicator", "BooleanLiteral", "CharacterLiteral", "SingleCharacter", "StringLiteral", "StringCharacters", "StringCharacter", "EscapeSequence", "OctalEscape", "UnicodeEscape", "ZeroToThree", "NullLiteral", "LPAREN", "RPAREN", "LBRACE", "RBRACE", "LBRACK", "RBRACK", "SEMI", "COMMA", "DOT", "ASSIGN", "GT", "LT", "BANG", "TILDE", "QUESTION", "COLON", "EQUAL", "LE", "GE", "NOTEQUAL", "AND", "OR", "INC", "DEC", "ADD", "SUB", "MUL", "DIV", "BITAND", "BITOR", "CARET", "MOD", "ADD_ASSIGN", "SUB_ASSIGN", "MUL_ASSIGN", "DIV_ASSIGN", "AND_ASSIGN", "OR_ASSIGN", "XOR_ASSIGN", "MOD_ASSIGN", "LSHIFT_ASSIGN", "RSHIFT_ASSIGN", "URSHIFT_ASSIGN", "Identifier", "JavaLetter", "JavaLetterOrDigit", "AT", "ELLIPSIS", "WS", "COMMENT", "LINE_COMMENT" }; public JavaLexer(CharStream input) { super(input); _interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache); } @Override public String getGrammarFileName() { return "Java.g4"; } @Override public String[] getTokenNames() { return tokenNames; } @Override public String[] getRuleNames() { return ruleNames; } @Override public String[] getModeNames() { return modeNames; } @Override public ATN getATN() { return _ATN; } @Override public void action(RuleContext _localctx, int ruleIndex, int actionIndex) { switch (ruleIndex) { case 144: WS_action((RuleContext)_localctx, actionIndex); break; case 145: COMMENT_action((RuleContext)_localctx, actionIndex); break; case 146: LINE_COMMENT_action((RuleContext)_localctx, actionIndex); break; } } private void LINE_COMMENT_action(RuleContext _localctx, int actionIndex) { switch (actionIndex) { case 2: skip(); break; } } private void WS_action(RuleContext _localctx, int actionIndex) { switch (actionIndex) { case 0: skip(); break; } } private void COMMENT_action(RuleContext _localctx, int actionIndex) { switch (actionIndex) { case 1: skip(); break; } } @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 140: return JavaLetter_sempred((RuleContext)_localctx, predIndex); case 141: return JavaLetterOrDigit_sempred((RuleContext)_localctx, predIndex); } return true; } private boolean JavaLetterOrDigit_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: return Character.isJavaIdentifierPart(_input.LA(-1)); case 3: return Character.isJavaIdentifierPart(Character.toCodePoint((char)_input.LA(-2), (char)_input.LA(-1))); } return true; } private boolean JavaLetter_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 0: return Character.isJavaIdentifierStart(_input.LA(-1)); case 1: return Character.isJavaIdentifierStart(Character.toCodePoint((char)_input.LA(-2), (char)_input.LA(-1))); } return true; } public static final String _serializedATN = "\3\uacf5\uee8c\u4f5d\u8b0d\u4a45\u78bd\u1b2f\u3378\2k\u042e\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+ "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t="+ "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k\t"+ "k\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4"+ "w\tw\4x\tx\4y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080"+ "\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084\t\u0084\4\u0085"+ "\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089"+ "\4\u008a\t\u008a\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e"+ "\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091\4\u0092\t\u0092"+ "\4\u0093\t\u0093\4\u0094\t\u0094\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3\2\3"+ "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5"+ "\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3"+ "\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13"+ "\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r"+ "\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3"+ "\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3"+ "\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3"+ "\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\27\3\27\3"+ "\27\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3"+ "\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3"+ "\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3"+ "\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3"+ "\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3"+ "\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3"+ "$\3$\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3"+ "\'\3(\3(\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3"+ "+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3"+ "-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3/\3/\3/\3\60\3\60\3\60\3"+ "\60\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3"+ "\62\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\5\64\u0281\n\64"+ "\3\65\3\65\5\65\u0285\n\65\3\66\3\66\5\66\u0289\n\66\3\67\3\67\5\67\u028d"+ "\n\67\38\38\58\u0291\n8\39\39\3:\3:\3:\5:\u0298\n:\3:\3:\3:\5:\u029d\n"+ ":\5:\u029f\n:\3;\3;\7;\u02a3\n;\f;\16;\u02a6\13;\3;\5;\u02a9\n;\3<\3<"+ "\5<\u02ad\n<\3=\3=\3>\3>\5>\u02b3\n>\3?\6?\u02b6\n?\r?\16?\u02b7\3@\3"+ "@\3@\3@\3A\3A\7A\u02c0\nA\fA\16A\u02c3\13A\3A\5A\u02c6\nA\3B\3B\3C\3C"+ "\5C\u02cc\nC\3D\3D\5D\u02d0\nD\3D\3D\3E\3E\7E\u02d6\nE\fE\16E\u02d9\13"+ "E\3E\5E\u02dc\nE\3F\3F\3G\3G\5G\u02e2\nG\3H\3H\3H\3H\3I\3I\7I\u02ea\n"+ "I\fI\16I\u02ed\13I\3I\5I\u02f0\nI\3J\3J\3K\3K\5K\u02f6\nK\3L\3L\5L\u02fa"+ "\nL\3M\3M\3M\5M\u02ff\nM\3M\5M\u0302\nM\3M\5M\u0305\nM\3M\3M\3M\5M\u030a"+ "\nM\3M\5M\u030d\nM\3M\3M\3M\5M\u0312\nM\3M\3M\3M\5M\u0317\nM\3N\3N\3N"+ "\3O\3O\3P\5P\u031f\nP\3P\3P\3Q\3Q\3R\3R\3S\3S\3S\5S\u032a\nS\3T\3T\5T"+ "\u032e\nT\3T\3T\3T\5T\u0333\nT\3T\3T\5T\u0337\nT\3U\3U\3U\3V\3V\3W\3W"+ "\3W\3W\3W\3W\3W\3W\3W\5W\u0347\nW\3X\3X\3X\3X\3X\3X\3X\3X\5X\u0351\nX"+ "\3Y\3Y\3Z\3Z\5Z\u0357\nZ\3Z\3Z\3[\6[\u035c\n[\r[\16[\u035d\3\\\3\\\5\\"+ "\u0362\n\\\3]\3]\3]\3]\5]\u0368\n]\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\5"+ "^\u0375\n^\3_\3_\3_\3_\3_\3_\3_\3`\3`\3a\3a\3a\3a\3a\3b\3b\3c\3c\3d\3"+ "d\3e\3e\3f\3f\3g\3g\3h\3h\3i\3i\3j\3j\3k\3k\3l\3l\3m\3m\3n\3n\3o\3o\3"+ "p\3p\3q\3q\3r\3r\3r\3s\3s\3s\3t\3t\3t\3u\3u\3u\3v\3v\3v\3w\3w\3w\3x\3"+ "x\3x\3y\3y\3y\3z\3z\3{\3{\3|\3|\3}\3}\3~\3~\3\177\3\177\3\u0080\3\u0080"+ "\3\u0081\3\u0081\3\u0082\3\u0082\3\u0082\3\u0083\3\u0083\3\u0083\3\u0084"+ "\3\u0084\3\u0084\3\u0085\3\u0085\3\u0085\3\u0086\3\u0086\3\u0086\3\u0087"+ "\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0089\3\u0089\3\u0089\3\u008a"+ "\3\u008a\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b\3\u008b\3\u008c\3\u008c"+ "\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d\7\u008d\u03f4\n\u008d\f\u008d"+ "\16\u008d\u03f7\13\u008d\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e"+ "\5\u008e\u03ff\n\u008e\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f"+ "\5\u008f\u0407\n\u008f\3\u0090\3\u0090\3\u0091\3\u0091\3\u0091\3\u0091"+ "\3\u0092\6\u0092\u0410\n\u0092\r\u0092\16\u0092\u0411\3\u0092\3\u0092"+ "\3\u0093\3\u0093\3\u0093\3\u0093\7\u0093\u041a\n\u0093\f\u0093\16\u0093"+ "\u041d\13\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0094\3\u0094"+ "\3\u0094\3\u0094\7\u0094\u0428\n\u0094\f\u0094\16\u0094\u042b\13\u0094"+ "\3\u0094\3\u0094\3\u041b\u0095\3\3\1\5\4\1\7\5\1\t\6\1\13\7\1\r\b\1\17"+ "\t\1\21\n\1\23\13\1\25\f\1\27\r\1\31\16\1\33\17\1\35\20\1\37\21\1!\22"+ "\1#\23\1%\24\1\'\25\1)\26\1+\27\1-\30\1/\31\1\61\32\1\63\33\1\65\34\1"+ "\67\35\19\36\1;\37\1= \1?!\1A\"\1C#\1E$\1G%\1I&\1K\'\1M(\1O)\1Q*\1S+\1"+ "U,\1W-\1Y.\1[/\1]\60\1_\61\1a\62\1c\63\1e\64\1g\65\1i\2\1k\2\1m\2\1o\2"+ "\1q\2\1s\2\1u\2\1w\2\1y\2\1{\2\1}\2\1\177\2\1\u0081\2\1\u0083\2\1\u0085"+ "\2\1\u0087\2\1\u0089\2\1\u008b\2\1\u008d\2\1\u008f\2\1\u0091\2\1\u0093"+ "\2\1\u0095\2\1\u0097\66\1\u0099\2\1\u009b\2\1\u009d\2\1\u009f\2\1\u00a1"+ "\2\1\u00a3\2\1\u00a5\2\1\u00a7\2\1\u00a9\2\1\u00ab\2\1\u00ad\67\1\u00af"+ "8\1\u00b1\2\1\u00b39\1\u00b5\2\1\u00b7\2\1\u00b9\2\1\u00bb\2\1\u00bd\2"+ "\1\u00bf\2\1\u00c1:\1\u00c3;\1\u00c5<\1\u00c7=\1\u00c9>\1\u00cb?\1\u00cd"+ "@\1\u00cfA\1\u00d1B\1\u00d3C\1\u00d5D\1\u00d7E\1\u00d9F\1\u00dbG\1\u00dd"+ "H\1\u00dfI\1\u00e1J\1\u00e3K\1\u00e5L\1\u00e7M\1\u00e9N\1\u00ebO\1\u00ed"+ "P\1\u00efQ\1\u00f1R\1\u00f3S\1\u00f5T\1\u00f7U\1\u00f9V\1\u00fbW\1\u00fd"+ "X\1\u00ffY\1\u0101Z\1\u0103[\1\u0105\\\1\u0107]\1\u0109^\1\u010b_\1\u010d"+ "`\1\u010fa\1\u0111b\1\u0113c\1\u0115d\1\u0117e\1\u0119f\1\u011b\2\1\u011d"+ "\2\1\u011fg\1\u0121h\1\u0123i\2\u0125j\3\u0127k\4\3\2\30\4\2NNnn\3\2\63"+ ";\4\2ZZzz\5\2\62;CHch\3\2\629\4\2DDdd\3\2\62\63\4\2GGgg\4\2--//\6\2FF"+ "HHffhh\4\2RRrr\4\2))^^\4\2$$^^\n\2$$))^^ddhhppttvv\3\2\62\65\6\2&&C\\"+ "aac|\4\2\2\u0101\ud802\udc01\3\2\ud802\udc01\3\2\udc02\ue001\7\2&&\62"+ ";C\\aac|\5\2\13\f\16\17\"\"\4\2\f\f\17\17\u043c\2\3\3\2\2\2\2\5\3\2\2"+ "\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21"+ "\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2"+ "\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3"+ "\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3"+ "\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3"+ "\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2"+ "\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2"+ "Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3"+ "\2\2\2\2g\3\2\2\2\2\u0097\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b3"+ "\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2"+ "\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00cf\3\2\2\2\2\u00d1"+ "\3\2\2\2\2\u00d3\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2"+ "\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3"+ "\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2"+ "\2\2\u00ed\3\2\2\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5"+ "\3\2\2\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2"+ "\2\2\u00ff\3\2\2\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107"+ "\3\2\2\2\2\u0109\3\2\2\2\2\u010b\3\2\2\2\2\u010d\3\2\2\2\2\u010f\3\2\2"+ "\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2\2\2\u0117\3\2\2\2\2\u0119"+ "\3\2\2\2\2\u011f\3\2\2\2\2\u0121\3\2\2\2\2\u0123\3\2\2\2\2\u0125\3\2\2"+ "\2\2\u0127\3\2\2\2\3\u0129\3\2\2\2\5\u0132\3\2\2\2\7\u0139\3\2\2\2\t\u0141"+ "\3\2\2\2\13\u0147\3\2\2\2\r\u014c\3\2\2\2\17\u0151\3\2\2\2\21\u0157\3"+ "\2\2\2\23\u015c\3\2\2\2\25\u0162\3\2\2\2\27\u0168\3\2\2\2\31\u0171\3\2"+ "\2\2\33\u0179\3\2\2\2\35\u017c\3\2\2\2\37\u0183\3\2\2\2!\u0188\3\2\2\2"+ "#\u018d\3\2\2\2%\u0195\3\2\2\2\'\u019b\3\2\2\2)\u01a3\3\2\2\2+\u01a9\3"+ "\2\2\2-\u01ad\3\2\2\2/\u01b0\3\2\2\2\61\u01b5\3\2\2\2\63\u01c0\3\2\2\2"+ "\65\u01c7\3\2\2\2\67\u01d2\3\2\2\29\u01d6\3\2\2\2;\u01e0\3\2\2\2=\u01e5"+ "\3\2\2\2?\u01ec\3\2\2\2A\u01f0\3\2\2\2C\u01f8\3\2\2\2E\u0200\3\2\2\2G"+ "\u020a\3\2\2\2I\u0211\3\2\2\2K\u0218\3\2\2\2M\u021e\3\2\2\2O\u0225\3\2"+ "\2\2Q\u022e\3\2\2\2S\u0234\3\2\2\2U\u023b\3\2\2\2W\u0248\3\2\2\2Y\u024d"+ "\3\2\2\2[\u0253\3\2\2\2]\u025a\3\2\2\2_\u0264\3\2\2\2a\u0268\3\2\2\2c"+ "\u026d\3\2\2\2e\u0276\3\2\2\2g\u0280\3\2\2\2i\u0282\3\2\2\2k\u0286\3\2"+ "\2\2m\u028a\3\2\2\2o\u028e\3\2\2\2q\u0292\3\2\2\2s\u029e\3\2\2\2u\u02a0"+ "\3\2\2\2w\u02ac\3\2\2\2y\u02ae\3\2\2\2{\u02b2\3\2\2\2}\u02b5\3\2\2\2\177"+ "\u02b9\3\2\2\2\u0081\u02bd\3\2\2\2\u0083\u02c7\3\2\2\2\u0085\u02cb\3\2"+ "\2\2\u0087\u02cd\3\2\2\2\u0089\u02d3\3\2\2\2\u008b\u02dd\3\2\2\2\u008d"+ "\u02e1\3\2\2\2\u008f\u02e3\3\2\2\2\u0091\u02e7\3\2\2\2\u0093\u02f1\3\2"+ "\2\2\u0095\u02f5\3\2\2\2\u0097\u02f9\3\2\2\2\u0099\u0316\3\2\2\2\u009b"+ "\u0318\3\2\2\2\u009d\u031b\3\2\2\2\u009f\u031e\3\2\2\2\u00a1\u0322\3\2"+ "\2\2\u00a3\u0324\3\2\2\2\u00a5\u0326\3\2\2\2\u00a7\u0336\3\2\2\2\u00a9"+ "\u0338\3\2\2\2\u00ab\u033b\3\2\2\2\u00ad\u0346\3\2\2\2\u00af\u0350\3\2"+ "\2\2\u00b1\u0352\3\2\2\2\u00b3\u0354\3\2\2\2\u00b5\u035b\3\2\2\2\u00b7"+ "\u0361\3\2\2\2\u00b9\u0367\3\2\2\2\u00bb\u0374\3\2\2\2\u00bd\u0376\3\2"+ "\2\2\u00bf\u037d\3\2\2\2\u00c1\u037f\3\2\2\2\u00c3\u0384\3\2\2\2\u00c5"+ "\u0386\3\2\2\2\u00c7\u0388\3\2\2\2\u00c9\u038a\3\2\2\2\u00cb\u038c\3\2"+ "\2\2\u00cd\u038e\3\2\2\2\u00cf\u0390\3\2\2\2\u00d1\u0392\3\2\2\2\u00d3"+ "\u0394\3\2\2\2\u00d5\u0396\3\2\2\2\u00d7\u0398\3\2\2\2\u00d9\u039a\3\2"+ "\2\2\u00db\u039c\3\2\2\2\u00dd\u039e\3\2\2\2\u00df\u03a0\3\2\2\2\u00e1"+ "\u03a2\3\2\2\2\u00e3\u03a4\3\2\2\2\u00e5\u03a7\3\2\2\2\u00e7\u03aa\3\2"+ "\2\2\u00e9\u03ad\3\2\2\2\u00eb\u03b0\3\2\2\2\u00ed\u03b3\3\2\2\2\u00ef"+ "\u03b6\3\2\2\2\u00f1\u03b9\3\2\2\2\u00f3\u03bc\3\2\2\2\u00f5\u03be\3\2"+ "\2\2\u00f7\u03c0\3\2\2\2\u00f9\u03c2\3\2\2\2\u00fb\u03c4\3\2\2\2\u00fd"+ "\u03c6\3\2\2\2\u00ff\u03c8\3\2\2\2\u0101\u03ca\3\2\2\2\u0103\u03cc\3\2"+ "\2\2\u0105\u03cf\3\2\2\2\u0107\u03d2\3\2\2\2\u0109\u03d5\3\2\2\2\u010b"+ "\u03d8\3\2\2\2\u010d\u03db\3\2\2\2\u010f\u03de\3\2\2\2\u0111\u03e1\3\2"+ "\2\2\u0113\u03e4\3\2\2\2\u0115\u03e8\3\2\2\2\u0117\u03ec\3\2\2\2\u0119"+ "\u03f1\3\2\2\2\u011b\u03fe\3\2\2\2\u011d\u0406\3\2\2\2\u011f\u0408\3\2"+ "\2\2\u0121\u040a\3\2\2\2\u0123\u040f\3\2\2\2\u0125\u0415\3\2\2\2\u0127"+ "\u0423\3\2\2\2\u0129\u012a\7c\2\2\u012a\u012b\7d\2\2\u012b\u012c\7u\2"+ "\2\u012c\u012d\7v\2\2\u012d\u012e\7t\2\2\u012e\u012f\7c\2\2\u012f\u0130"+ "\7e\2\2\u0130\u0131\7v\2\2\u0131\4\3\2\2\2\u0132\u0133\7c\2\2\u0133\u0134"+ "\7u\2\2\u0134\u0135\7u\2\2\u0135\u0136\7g\2\2\u0136\u0137\7t\2\2\u0137"+ "\u0138\7v\2\2\u0138\6\3\2\2\2\u0139\u013a\7d\2\2\u013a\u013b\7q\2\2\u013b"+ "\u013c\7q\2\2\u013c\u013d\7n\2\2\u013d\u013e\7g\2\2\u013e\u013f\7c\2\2"+ "\u013f\u0140\7p\2\2\u0140\b\3\2\2\2\u0141\u0142\7d\2\2\u0142\u0143\7t"+ "\2\2\u0143\u0144\7g\2\2\u0144\u0145\7c\2\2\u0145\u0146\7m\2\2\u0146\n"+ "\3\2\2\2\u0147\u0148\7d\2\2\u0148\u0149\7{\2\2\u0149\u014a\7v\2\2\u014a"+ "\u014b\7g\2\2\u014b\f\3\2\2\2\u014c\u014d\7e\2\2\u014d\u014e\7c\2\2\u014e"+ "\u014f\7u\2\2\u014f\u0150\7g\2\2\u0150\16\3\2\2\2\u0151\u0152\7e\2\2\u0152"+ "\u0153\7c\2\2\u0153\u0154\7v\2\2\u0154\u0155\7e\2\2\u0155\u0156\7j\2\2"+ "\u0156\20\3\2\2\2\u0157\u0158\7e\2\2\u0158\u0159\7j\2\2\u0159\u015a\7"+ "c\2\2\u015a\u015b\7t\2\2\u015b\22\3\2\2\2\u015c\u015d\7e\2\2\u015d\u015e"+ "\7n\2\2\u015e\u015f\7c\2\2\u015f\u0160\7u\2\2\u0160\u0161\7u\2\2\u0161"+ "\24\3\2\2\2\u0162\u0163\7e\2\2\u0163\u0164\7q\2\2\u0164\u0165\7p\2\2\u0165"+ "\u0166\7u\2\2\u0166\u0167\7v\2\2\u0167\26\3\2\2\2\u0168\u0169\7e\2\2\u0169"+ "\u016a\7q\2\2\u016a\u016b\7p\2\2\u016b\u016c\7v\2\2\u016c\u016d\7k\2\2"+ "\u016d\u016e\7p\2\2\u016e\u016f\7w\2\2\u016f\u0170\7g\2\2\u0170\30\3\2"+ "\2\2\u0171\u0172\7f\2\2\u0172\u0173\7g\2\2\u0173\u0174\7h\2\2\u0174\u0175"+ "\7c\2\2\u0175\u0176\7w\2\2\u0176\u0177\7n\2\2\u0177\u0178\7v\2\2\u0178"+ "\32\3\2\2\2\u0179\u017a\7f\2\2\u017a\u017b\7q\2\2\u017b\34\3\2\2\2\u017c"+ "\u017d\7f\2\2\u017d\u017e\7q\2\2\u017e\u017f\7w\2\2\u017f\u0180\7d\2\2"+ "\u0180\u0181\7n\2\2\u0181\u0182\7g\2\2\u0182\36\3\2\2\2\u0183\u0184\7"+ "g\2\2\u0184\u0185\7n\2\2\u0185\u0186\7u\2\2\u0186\u0187\7g\2\2\u0187 "+ "\3\2\2\2\u0188\u0189\7g\2\2\u0189\u018a\7p\2\2\u018a\u018b\7w\2\2\u018b"+ "\u018c\7o\2\2\u018c\"\3\2\2\2\u018d\u018e\7g\2\2\u018e\u018f\7z\2\2\u018f"+ "\u0190\7v\2\2\u0190\u0191\7g\2\2\u0191\u0192\7p\2\2\u0192\u0193\7f\2\2"+ "\u0193\u0194\7u\2\2\u0194$\3\2\2\2\u0195\u0196\7h\2\2\u0196\u0197\7k\2"+ "\2\u0197\u0198\7p\2\2\u0198\u0199\7c\2\2\u0199\u019a\7n\2\2\u019a&\3\2"+ "\2\2\u019b\u019c\7h\2\2\u019c\u019d\7k\2\2\u019d\u019e\7p\2\2\u019e\u019f"+ "\7c\2\2\u019f\u01a0\7n\2\2\u01a0\u01a1\7n\2\2\u01a1\u01a2\7{\2\2\u01a2"+ "(\3\2\2\2\u01a3\u01a4\7h\2\2\u01a4\u01a5\7n\2\2\u01a5\u01a6\7q\2\2\u01a6"+ "\u01a7\7c\2\2\u01a7\u01a8\7v\2\2\u01a8*\3\2\2\2\u01a9\u01aa\7h\2\2\u01aa"+ "\u01ab\7q\2\2\u01ab\u01ac\7t\2\2\u01ac,\3\2\2\2\u01ad\u01ae\7k\2\2\u01ae"+ "\u01af\7h\2\2\u01af.\3\2\2\2\u01b0\u01b1\7i\2\2\u01b1\u01b2\7q\2\2\u01b2"+ "\u01b3\7v\2\2\u01b3\u01b4\7q\2\2\u01b4\60\3\2\2\2\u01b5\u01b6\7k\2\2\u01b6"+ "\u01b7\7o\2\2\u01b7\u01b8\7r\2\2\u01b8\u01b9\7n\2\2\u01b9\u01ba\7g\2\2"+ "\u01ba\u01bb\7o\2\2\u01bb\u01bc\7g\2\2\u01bc\u01bd\7p\2\2\u01bd\u01be"+ "\7v\2\2\u01be\u01bf\7u\2\2\u01bf\62\3\2\2\2\u01c0\u01c1\7k\2\2\u01c1\u01c2"+ "\7o\2\2\u01c2\u01c3\7r\2\2\u01c3\u01c4\7q\2\2\u01c4\u01c5\7t\2\2\u01c5"+ "\u01c6\7v\2\2\u01c6\64\3\2\2\2\u01c7\u01c8\7k\2\2\u01c8\u01c9\7p\2\2\u01c9"+ "\u01ca\7u\2\2\u01ca\u01cb\7v\2\2\u01cb\u01cc\7c\2\2\u01cc\u01cd\7p\2\2"+ "\u01cd\u01ce\7e\2\2\u01ce\u01cf\7g\2\2\u01cf\u01d0\7q\2\2\u01d0\u01d1"+ "\7h\2\2\u01d1\66\3\2\2\2\u01d2\u01d3\7k\2\2\u01d3\u01d4\7p\2\2\u01d4\u01d5"+ "\7v\2\2\u01d58\3\2\2\2\u01d6\u01d7\7k\2\2\u01d7\u01d8\7p\2\2\u01d8\u01d9"+ "\7v\2\2\u01d9\u01da\7g\2\2\u01da\u01db\7t\2\2\u01db\u01dc\7h\2\2\u01dc"+ "\u01dd\7c\2\2\u01dd\u01de\7e\2\2\u01de\u01df\7g\2\2\u01df:\3\2\2\2\u01e0"+ "\u01e1\7n\2\2\u01e1\u01e2\7q\2\2\u01e2\u01e3\7p\2\2\u01e3\u01e4\7i\2\2"+ "\u01e4<\3\2\2\2\u01e5\u01e6\7p\2\2\u01e6\u01e7\7c\2\2\u01e7\u01e8\7v\2"+ "\2\u01e8\u01e9\7k\2\2\u01e9\u01ea\7x\2\2\u01ea\u01eb\7g\2\2\u01eb>\3\2"+ "\2\2\u01ec\u01ed\7p\2\2\u01ed\u01ee\7g\2\2\u01ee\u01ef\7y\2\2\u01ef@\3"+ "\2\2\2\u01f0\u01f1\7r\2\2\u01f1\u01f2\7c\2\2\u01f2\u01f3\7e\2\2\u01f3"+ "\u01f4\7m\2\2\u01f4\u01f5\7c\2\2\u01f5\u01f6\7i\2\2\u01f6\u01f7\7g\2\2"+ "\u01f7B\3\2\2\2\u01f8\u01f9\7r\2\2\u01f9\u01fa\7t\2\2\u01fa\u01fb\7k\2"+ "\2\u01fb\u01fc\7x\2\2\u01fc\u01fd\7c\2\2\u01fd\u01fe\7v\2\2\u01fe\u01ff"+ "\7g\2\2\u01ffD\3\2\2\2\u0200\u0201\7r\2\2\u0201\u0202\7t\2\2\u0202\u0203"+ "\7q\2\2\u0203\u0204\7v\2\2\u0204\u0205\7g\2\2\u0205\u0206\7e\2\2\u0206"+ "\u0207\7v\2\2\u0207\u0208\7g\2\2\u0208\u0209\7f\2\2\u0209F\3\2\2\2\u020a"+ "\u020b\7r\2\2\u020b\u020c\7w\2\2\u020c\u020d\7d\2\2\u020d\u020e\7n\2\2"+ "\u020e\u020f\7k\2\2\u020f\u0210\7e\2\2\u0210H\3\2\2\2\u0211\u0212\7t\2"+ "\2\u0212\u0213\7g\2\2\u0213\u0214\7v\2\2\u0214\u0215\7w\2\2\u0215\u0216"+ "\7t\2\2\u0216\u0217\7p\2\2\u0217J\3\2\2\2\u0218\u0219\7u\2\2\u0219\u021a"+ "\7j\2\2\u021a\u021b\7q\2\2\u021b\u021c\7t\2\2\u021c\u021d\7v\2\2\u021d"+ "L\3\2\2\2\u021e\u021f\7u\2\2\u021f\u0220\7v\2\2\u0220\u0221\7c\2\2\u0221"+ "\u0222\7v\2\2\u0222\u0223\7k\2\2\u0223\u0224\7e\2\2\u0224N\3\2\2\2\u0225"+ "\u0226\7u\2\2\u0226\u0227\7v\2\2\u0227\u0228\7t\2\2\u0228\u0229\7k\2\2"+ "\u0229\u022a\7e\2\2\u022a\u022b\7v\2\2\u022b\u022c\7h\2\2\u022c\u022d"+ "\7r\2\2\u022dP\3\2\2\2\u022e\u022f\7u\2\2\u022f\u0230\7w\2\2\u0230\u0231"+ "\7r\2\2\u0231\u0232\7g\2\2\u0232\u0233\7t\2\2\u0233R\3\2\2\2\u0234\u0235"+ "\7u\2\2\u0235\u0236\7y\2\2\u0236\u0237\7k\2\2\u0237\u0238\7v\2\2\u0238"+ "\u0239\7e\2\2\u0239\u023a\7j\2\2\u023aT\3\2\2\2\u023b\u023c\7u\2\2\u023c"+ "\u023d\7{\2\2\u023d\u023e\7p\2\2\u023e\u023f\7e\2\2\u023f\u0240\7j\2\2"+ "\u0240\u0241\7t\2\2\u0241\u0242\7q\2\2\u0242\u0243\7p\2\2\u0243\u0244"+ "\7k\2\2\u0244\u0245\7|\2\2\u0245\u0246\7g\2\2\u0246\u0247\7f\2\2\u0247"+ "V\3\2\2\2\u0248\u0249\7v\2\2\u0249\u024a\7j\2\2\u024a\u024b\7k\2\2\u024b"+ "\u024c\7u\2\2\u024cX\3\2\2\2\u024d\u024e\7v\2\2\u024e\u024f\7j\2\2\u024f"+ "\u0250\7t\2\2\u0250\u0251\7q\2\2\u0251\u0252\7y\2\2\u0252Z\3\2\2\2\u0253"+ "\u0254\7v\2\2\u0254\u0255\7j\2\2\u0255\u0256\7t\2\2\u0256\u0257\7q\2\2"+ "\u0257\u0258\7y\2\2\u0258\u0259\7u\2\2\u0259\\\3\2\2\2\u025a\u025b\7v"+ "\2\2\u025b\u025c\7t\2\2\u025c\u025d\7c\2\2\u025d\u025e\7p\2\2\u025e\u025f"+ "\7u\2\2\u025f\u0260\7k\2\2\u0260\u0261\7g\2\2\u0261\u0262\7p\2\2\u0262"+ "\u0263\7v\2\2\u0263^\3\2\2\2\u0264\u0265\7v\2\2\u0265\u0266\7t\2\2\u0266"+ "\u0267\7{\2\2\u0267`\3\2\2\2\u0268\u0269\7x\2\2\u0269\u026a\7q\2\2\u026a"+ "\u026b\7k\2\2\u026b\u026c\7f\2\2\u026cb\3\2\2\2\u026d\u026e\7x\2\2\u026e"+ "\u026f\7q\2\2\u026f\u0270\7n\2\2\u0270\u0271\7c\2\2\u0271\u0272\7v\2\2"+ "\u0272\u0273\7k\2\2\u0273\u0274\7n\2\2\u0274\u0275\7g\2\2\u0275d\3\2\2"+ "\2\u0276\u0277\7y\2\2\u0277\u0278\7j\2\2\u0278\u0279\7k\2\2\u0279\u027a"+ "\7n\2\2\u027a\u027b\7g\2\2\u027bf\3\2\2\2\u027c\u0281\5i\65\2\u027d\u0281"+ "\5k\66\2\u027e\u0281\5m\67\2\u027f\u0281\5o8\2\u0280\u027c\3\2\2\2\u0280"+ "\u027d\3\2\2\2\u0280\u027e\3\2\2\2\u0280\u027f\3\2\2\2\u0281h\3\2\2\2"+ "\u0282\u0284\5s:\2\u0283\u0285\5q9\2\u0284\u0283\3\2\2\2\u0284\u0285\3"+ "\2\2\2\u0285j\3\2\2\2\u0286\u0288\5\177@\2\u0287\u0289\5q9\2\u0288\u0287"+ "\3\2\2\2\u0288\u0289\3\2\2\2\u0289l\3\2\2\2\u028a\u028c\5\u0087D\2\u028b"+ "\u028d\5q9\2\u028c\u028b\3\2\2\2\u028c\u028d\3\2\2\2\u028dn\3\2\2\2\u028e"+ "\u0290\5\u008fH\2\u028f\u0291\5q9\2\u0290\u028f\3\2\2\2\u0290\u0291\3"+ "\2\2\2\u0291p\3\2\2\2\u0292\u0293\t\2\2\2\u0293r\3\2\2\2\u0294\u029f\7"+ "\62\2\2\u0295\u029c\5y=\2\u0296\u0298\5u;\2\u0297\u0296\3\2\2\2\u0297"+ "\u0298\3\2\2\2\u0298\u029d\3\2\2\2\u0299\u029a\5}?\2\u029a\u029b\5u;\2"+ "\u029b\u029d\3\2\2\2\u029c\u0297\3\2\2\2\u029c\u0299\3\2\2\2\u029d\u029f"+ "\3\2\2\2\u029e\u0294\3\2\2\2\u029e\u0295\3\2\2\2\u029ft\3\2\2\2\u02a0"+ "\u02a8\5w<\2\u02a1\u02a3\5{>\2\u02a2\u02a1\3\2\2\2\u02a3\u02a6\3\2\2\2"+ "\u02a4\u02a2\3\2\2\2\u02a4\u02a5\3\2\2\2\u02a5\u02a7\3\2\2\2\u02a6\u02a4"+ "\3\2\2\2\u02a7\u02a9\5w<\2\u02a8\u02a4\3\2\2\2\u02a8\u02a9\3\2\2\2\u02a9"+ "v\3\2\2\2\u02aa\u02ad\7\62\2\2\u02ab\u02ad\5y=\2\u02ac\u02aa\3\2\2\2\u02ac"+ "\u02ab\3\2\2\2\u02adx\3\2\2\2\u02ae\u02af\t\3\2\2\u02afz\3\2\2\2\u02b0"+ "\u02b3\5w<\2\u02b1\u02b3\7a\2\2\u02b2\u02b0\3\2\2\2\u02b2\u02b1\3\2\2"+ "\2\u02b3|\3\2\2\2\u02b4\u02b6\7a\2\2\u02b5\u02b4\3\2\2\2\u02b6\u02b7\3"+ "\2\2\2\u02b7\u02b5\3\2\2\2\u02b7\u02b8\3\2\2\2\u02b8~\3\2\2\2\u02b9\u02ba"+ "\7\62\2\2\u02ba\u02bb\t\4\2\2\u02bb\u02bc\5\u0081A\2\u02bc\u0080\3\2\2"+ "\2\u02bd\u02c5\5\u0083B\2\u02be\u02c0\5\u0085C\2\u02bf\u02be\3\2\2\2\u02c0"+ "\u02c3\3\2\2\2\u02c1\u02bf\3\2\2\2\u02c1\u02c2\3\2\2\2\u02c2\u02c4\3\2"+ "\2\2\u02c3\u02c1\3\2\2\2\u02c4\u02c6\5\u0083B\2\u02c5\u02c1\3\2\2\2\u02c5"+ "\u02c6\3\2\2\2\u02c6\u0082\3\2\2\2\u02c7\u02c8\t\5\2\2\u02c8\u0084\3\2"+ "\2\2\u02c9\u02cc\5\u0083B\2\u02ca\u02cc\7a\2\2\u02cb\u02c9\3\2\2\2\u02cb"+ "\u02ca\3\2\2\2\u02cc\u0086\3\2\2\2\u02cd\u02cf\7\62\2\2\u02ce\u02d0\5"+ "}?\2\u02cf\u02ce\3\2\2\2\u02cf\u02d0\3\2\2\2\u02d0\u02d1\3\2\2\2\u02d1"+ "\u02d2\5\u0089E\2\u02d2\u0088\3\2\2\2\u02d3\u02db\5\u008bF\2\u02d4\u02d6"+ "\5\u008dG\2\u02d5\u02d4\3\2\2\2\u02d6\u02d9\3\2\2\2\u02d7\u02d5\3\2\2"+ "\2\u02d7\u02d8\3\2\2\2\u02d8\u02da\3\2\2\2\u02d9\u02d7\3\2\2\2\u02da\u02dc"+ "\5\u008bF\2\u02db\u02d7\3\2\2\2\u02db\u02dc\3\2\2\2\u02dc\u008a\3\2\2"+ "\2\u02dd\u02de\t\6\2\2\u02de\u008c\3\2\2\2\u02df\u02e2\5\u008bF\2\u02e0"+ "\u02e2\7a\2\2\u02e1\u02df\3\2\2\2\u02e1\u02e0\3\2\2\2\u02e2\u008e\3\2"+ "\2\2\u02e3\u02e4\7\62\2\2\u02e4\u02e5\t\7\2\2\u02e5\u02e6\5\u0091I\2\u02e6"+ "\u0090\3\2\2\2\u02e7\u02ef\5\u0093J\2\u02e8\u02ea\5\u0095K\2\u02e9\u02e8"+ "\3\2\2\2\u02ea\u02ed\3\2\2\2\u02eb\u02e9\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec"+ "\u02ee\3\2\2\2\u02ed\u02eb\3\2\2\2\u02ee\u02f0\5\u0093J\2\u02ef\u02eb"+ "\3\2\2\2\u02ef\u02f0\3\2\2\2\u02f0\u0092\3\2\2\2\u02f1\u02f2\t\b\2\2\u02f2"+ "\u0094\3\2\2\2\u02f3\u02f6\5\u0093J\2\u02f4\u02f6\7a\2\2\u02f5\u02f3\3"+ "\2\2\2\u02f5\u02f4\3\2\2\2\u02f6\u0096\3\2\2\2\u02f7\u02fa\5\u0099M\2"+ "\u02f8\u02fa\5\u00a5S\2\u02f9\u02f7\3\2\2\2\u02f9\u02f8\3\2\2\2\u02fa"+ "\u0098\3\2\2\2\u02fb\u02fc\5u;\2\u02fc\u02fe\7\60\2\2\u02fd\u02ff\5u;"+ "\2\u02fe\u02fd\3\2\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\3\2\2\2\u0300\u0302"+ "\5\u009bN\2\u0301\u0300\3\2\2\2\u0301\u0302\3\2\2\2\u0302\u0304\3\2\2"+ "\2\u0303\u0305\5\u00a3R\2\u0304\u0303\3\2\2\2\u0304\u0305\3\2\2\2\u0305"+ "\u0317\3\2\2\2\u0306\u0307\7\60\2\2\u0307\u0309\5u;\2\u0308\u030a\5\u009b"+ "N\2\u0309\u0308\3\2\2\2\u0309\u030a\3\2\2\2\u030a\u030c\3\2\2\2\u030b"+ "\u030d\5\u00a3R\2\u030c\u030b\3\2\2\2\u030c\u030d\3\2\2\2\u030d\u0317"+ "\3\2\2\2\u030e\u030f\5u;\2\u030f\u0311\5\u009bN\2\u0310\u0312\5\u00a3"+ "R\2\u0311\u0310\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u0317\3\2\2\2\u0313"+ "\u0314\5u;\2\u0314\u0315\5\u00a3R\2\u0315\u0317\3\2\2\2\u0316\u02fb\3"+ "\2\2\2\u0316\u0306\3\2\2\2\u0316\u030e\3\2\2\2\u0316\u0313\3\2\2\2\u0317"+ "\u009a\3\2\2\2\u0318\u0319\5\u009dO\2\u0319\u031a\5\u009fP\2\u031a\u009c"+ "\3\2\2\2\u031b\u031c\t\t\2\2\u031c\u009e\3\2\2\2\u031d\u031f\5\u00a1Q"+ "\2\u031e\u031d\3\2\2\2\u031e\u031f\3\2\2\2\u031f\u0320\3\2\2\2\u0320\u0321"+ "\5u;\2\u0321\u00a0\3\2\2\2\u0322\u0323\t\n\2\2\u0323\u00a2\3\2\2\2\u0324"+ "\u0325\t\13\2\2\u0325\u00a4\3\2\2\2\u0326\u0327\5\u00a7T\2\u0327\u0329"+ "\5\u00a9U\2\u0328\u032a\5\u00a3R\2\u0329\u0328\3\2\2\2\u0329\u032a\3\2"+ "\2\2\u032a\u00a6\3\2\2\2\u032b\u032d\5\177@\2\u032c\u032e\7\60\2\2\u032d"+ "\u032c\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0337\3\2\2\2\u032f\u0330\7\62"+ "\2\2\u0330\u0332\t\4\2\2\u0331\u0333\5\u0081A\2\u0332\u0331\3\2\2\2\u0332"+ "\u0333\3\2\2\2\u0333\u0334\3\2\2\2\u0334\u0335\7\60\2\2\u0335\u0337\5"+ "\u0081A\2\u0336\u032b\3\2\2\2\u0336\u032f\3\2\2\2\u0337\u00a8\3\2\2\2"+ "\u0338\u0339\5\u00abV\2\u0339\u033a\5\u009fP\2\u033a\u00aa\3\2\2\2\u033b"+ "\u033c\t\f\2\2\u033c\u00ac\3\2\2\2\u033d\u033e\7v\2\2\u033e\u033f\7t\2"+ "\2\u033f\u0340\7w\2\2\u0340\u0347\7g\2\2\u0341\u0342\7h\2\2\u0342\u0343"+ "\7c\2\2\u0343\u0344\7n\2\2\u0344\u0345\7u\2\2\u0345\u0347\7g\2\2\u0346"+ "\u033d\3\2\2\2\u0346\u0341\3\2\2\2\u0347\u00ae\3\2\2\2\u0348\u0349\7)"+ "\2\2\u0349\u034a\5\u00b1Y\2\u034a\u034b\7)\2\2\u034b\u0351\3\2\2\2\u034c"+ "\u034d\7)\2\2\u034d\u034e\5\u00b9]\2\u034e\u034f\7)\2\2\u034f\u0351\3"+ "\2\2\2\u0350\u0348\3\2\2\2\u0350\u034c\3\2\2\2\u0351\u00b0\3\2\2\2\u0352"+ "\u0353\n\r\2\2\u0353\u00b2\3\2\2\2\u0354\u0356\7$\2\2\u0355\u0357\5\u00b5"+ "[\2\u0356\u0355\3\2\2\2\u0356\u0357\3\2\2\2\u0357\u0358\3\2\2\2\u0358"+ "\u0359\7$\2\2\u0359\u00b4\3\2\2\2\u035a\u035c\5\u00b7\\\2\u035b\u035a"+ "\3\2\2\2\u035c\u035d\3\2\2\2\u035d\u035b\3\2\2\2\u035d\u035e\3\2\2\2\u035e"+ "\u00b6\3\2\2\2\u035f\u0362\n\16\2\2\u0360\u0362\5\u00b9]\2\u0361\u035f"+ "\3\2\2\2\u0361\u0360\3\2\2\2\u0362\u00b8\3\2\2\2\u0363\u0364\7^\2\2\u0364"+ "\u0368\t\17\2\2\u0365\u0368\5\u00bb^\2\u0366\u0368\5\u00bd_\2\u0367\u0363"+ "\3\2\2\2\u0367\u0365\3\2\2\2\u0367\u0366\3\2\2\2\u0368\u00ba\3\2\2\2\u0369"+ "\u036a\7^\2\2\u036a\u0375\5\u008bF\2\u036b\u036c\7^\2\2\u036c\u036d\5"+ "\u008bF\2\u036d\u036e\5\u008bF\2\u036e\u0375\3\2\2\2\u036f\u0370\7^\2"+ "\2\u0370\u0371\5\u00bf`\2\u0371\u0372\5\u008bF\2\u0372\u0373\5\u008bF"+ "\2\u0373\u0375\3\2\2\2\u0374\u0369\3\2\2\2\u0374\u036b\3\2\2\2\u0374\u036f"+ "\3\2\2\2\u0375\u00bc\3\2\2\2\u0376\u0377\7^\2\2\u0377\u0378\7w\2\2\u0378"+ "\u0379\5\u0083B\2\u0379\u037a\5\u0083B\2\u037a\u037b\5\u0083B\2\u037b"+ "\u037c\5\u0083B\2\u037c\u00be\3\2\2\2\u037d\u037e\t\20\2\2\u037e\u00c0"+ "\3\2\2\2\u037f\u0380\7p\2\2\u0380\u0381\7w\2\2\u0381\u0382\7n\2\2\u0382"+ "\u0383\7n\2\2\u0383\u00c2\3\2\2\2\u0384\u0385\7*\2\2\u0385\u00c4\3\2\2"+ "\2\u0386\u0387\7+\2\2\u0387\u00c6\3\2\2\2\u0388\u0389\7}\2\2\u0389\u00c8"+ "\3\2\2\2\u038a\u038b\7\177\2\2\u038b\u00ca\3\2\2\2\u038c\u038d\7]\2\2"+ "\u038d\u00cc\3\2\2\2\u038e\u038f\7_\2\2\u038f\u00ce\3\2\2\2\u0390\u0391"+ "\7=\2\2\u0391\u00d0\3\2\2\2\u0392\u0393\7.\2\2\u0393\u00d2\3\2\2\2\u0394"+ "\u0395\7\60\2\2\u0395\u00d4\3\2\2\2\u0396\u0397\7?\2\2\u0397\u00d6\3\2"+ "\2\2\u0398\u0399\7@\2\2\u0399\u00d8\3\2\2\2\u039a\u039b\7>\2\2\u039b\u00da"+ "\3\2\2\2\u039c\u039d\7#\2\2\u039d\u00dc\3\2\2\2\u039e\u039f\7\u0080\2"+ "\2\u039f\u00de\3\2\2\2\u03a0\u03a1\7A\2\2\u03a1\u00e0\3\2\2\2\u03a2\u03a3"+ "\7<\2\2\u03a3\u00e2\3\2\2\2\u03a4\u03a5\7?\2\2\u03a5\u03a6\7?\2\2\u03a6"+ "\u00e4\3\2\2\2\u03a7\u03a8\7>\2\2\u03a8\u03a9\7?\2\2\u03a9\u00e6\3\2\2"+ "\2\u03aa\u03ab\7@\2\2\u03ab\u03ac\7?\2\2\u03ac\u00e8\3\2\2\2\u03ad\u03ae"+ "\7#\2\2\u03ae\u03af\7?\2\2\u03af\u00ea\3\2\2\2\u03b0\u03b1\7(\2\2\u03b1"+ "\u03b2\7(\2\2\u03b2\u00ec\3\2\2\2\u03b3\u03b4\7~\2\2\u03b4\u03b5\7~\2"+ "\2\u03b5\u00ee\3\2\2\2\u03b6\u03b7\7-\2\2\u03b7\u03b8\7-\2\2\u03b8\u00f0"+ "\3\2\2\2\u03b9\u03ba\7/\2\2\u03ba\u03bb\7/\2\2\u03bb\u00f2\3\2\2\2\u03bc"+ "\u03bd\7-\2\2\u03bd\u00f4\3\2\2\2\u03be\u03bf\7/\2\2\u03bf\u00f6\3\2\2"+ "\2\u03c0\u03c1\7,\2\2\u03c1\u00f8\3\2\2\2\u03c2\u03c3\7\61\2\2\u03c3\u00fa"+ "\3\2\2\2\u03c4\u03c5\7(\2\2\u03c5\u00fc\3\2\2\2\u03c6\u03c7\7~\2\2\u03c7"+ "\u00fe\3\2\2\2\u03c8\u03c9\7`\2\2\u03c9\u0100\3\2\2\2\u03ca\u03cb\7\'"+ "\2\2\u03cb\u0102\3\2\2\2\u03cc\u03cd\7-\2\2\u03cd\u03ce\7?\2\2\u03ce\u0104"+ "\3\2\2\2\u03cf\u03d0\7/\2\2\u03d0\u03d1\7?\2\2\u03d1\u0106\3\2\2\2\u03d2"+ "\u03d3\7,\2\2\u03d3\u03d4\7?\2\2\u03d4\u0108\3\2\2\2\u03d5\u03d6\7\61"+ "\2\2\u03d6\u03d7\7?\2\2\u03d7\u010a\3\2\2\2\u03d8\u03d9\7(\2\2\u03d9\u03da"+ "\7?\2\2\u03da\u010c\3\2\2\2\u03db\u03dc\7~\2\2\u03dc\u03dd\7?\2\2\u03dd"+ "\u010e\3\2\2\2\u03de\u03df\7`\2\2\u03df\u03e0\7?\2\2\u03e0\u0110\3\2\2"+ "\2\u03e1\u03e2\7\'\2\2\u03e2\u03e3\7?\2\2\u03e3\u0112\3\2\2\2\u03e4\u03e5"+ "\7>\2\2\u03e5\u03e6\7>\2\2\u03e6\u03e7\7?\2\2\u03e7\u0114\3\2\2\2\u03e8"+ "\u03e9\7@\2\2\u03e9\u03ea\7@\2\2\u03ea\u03eb\7?\2\2\u03eb\u0116\3\2\2"+ "\2\u03ec\u03ed\7@\2\2\u03ed\u03ee\7@\2\2\u03ee\u03ef\7@\2\2\u03ef\u03f0"+ "\7?\2\2\u03f0\u0118\3\2\2\2\u03f1\u03f5\5\u011b\u008e\2\u03f2\u03f4\5"+ "\u011d\u008f\2\u03f3\u03f2\3\2\2\2\u03f4\u03f7\3\2\2\2\u03f5\u03f3\3\2"+ "\2\2\u03f5\u03f6\3\2\2\2\u03f6\u011a\3\2\2\2\u03f7\u03f5\3\2\2\2\u03f8"+ "\u03ff\t\21\2\2\u03f9\u03fa\n\22\2\2\u03fa\u03ff\6\u008e\2\2\u03fb\u03fc"+ "\t\23\2\2\u03fc\u03fd\t\24\2\2\u03fd\u03ff\6\u008e\3\2\u03fe\u03f8\3\2"+ "\2\2\u03fe\u03f9\3\2\2\2\u03fe\u03fb\3\2\2\2\u03ff\u011c\3\2\2\2\u0400"+ "\u0407\t\25\2\2\u0401\u0402\n\22\2\2\u0402\u0407\6\u008f\4\2\u0403\u0404"+ "\t\23\2\2\u0404\u0405\t\24\2\2\u0405\u0407\6\u008f\5\2\u0406\u0400\3\2"+ "\2\2\u0406\u0401\3\2\2\2\u0406\u0403\3\2\2\2\u0407\u011e\3\2\2\2\u0408"+ "\u0409\7B\2\2\u0409\u0120\3\2\2\2\u040a\u040b\7\60\2\2\u040b\u040c\7\60"+ "\2\2\u040c\u040d\7\60\2\2\u040d\u0122\3\2\2\2\u040e\u0410\t\26\2\2\u040f"+ "\u040e\3\2\2\2\u0410\u0411\3\2\2\2\u0411\u040f\3\2\2\2\u0411\u0412\3\2"+ "\2\2\u0412\u0413\3\2\2\2\u0413\u0414\b\u0092\2\2\u0414\u0124\3\2\2\2\u0415"+ "\u0416\7\61\2\2\u0416\u0417\7,\2\2\u0417\u041b\3\2\2\2\u0418\u041a\13"+ "\2\2\2\u0419\u0418\3\2\2\2\u041a\u041d\3\2\2\2\u041b\u041c\3\2\2\2\u041b"+ "\u0419\3\2\2\2\u041c\u041e\3\2\2\2\u041d\u041b\3\2\2\2\u041e\u041f\7,"+ "\2\2\u041f\u0420\7\61\2\2\u0420\u0421\3\2\2\2\u0421\u0422\b\u0093\3\2"+ "\u0422\u0126\3\2\2\2\u0423\u0424\7\61\2\2\u0424\u0425\7\61\2\2\u0425\u0429"+ "\3\2\2\2\u0426\u0428\n\27\2\2\u0427\u0426\3\2\2\2\u0428\u042b\3\2\2\2"+ "\u0429\u0427\3\2\2\2\u0429\u042a\3\2\2\2\u042a\u042c\3\2\2\2\u042b\u0429"+ "\3\2\2\2\u042c\u042d\b\u0094\4\2\u042d\u0128\3\2\2\2\64\2\u0280\u0284"+ "\u0288\u028c\u0290\u0297\u029c\u029e\u02a4\u02a8\u02ac\u02b2\u02b7\u02c1"+ "\u02c5\u02cb\u02cf\u02d7\u02db\u02e1\u02eb\u02ef\u02f5\u02f9\u02fe\u0301"+ "\u0304\u0309\u030c\u0311\u0316\u031e\u0329\u032d\u0332\u0336\u0346\u0350"+ "\u0356\u035d\u0361\u0367\u0374\u03f5\u03fe\u0406\u0411\u041b\u0429"; public static final ATN _ATN = ATNSimulator.deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) { _decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i); } } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okhttp3.internal.http; import java.io.IOException; import java.net.CookieManager; import java.net.HttpCookie; import java.net.HttpURLConnection; import java.net.InetAddress; import java.net.URI; import java.net.URLConnection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import okhttp3.HttpUrl; import okhttp3.OkHttpClient; import okhttp3.OkUrlFactory; import okhttp3.JavaNetCookieJar; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import okhttp3.mockwebserver.RecordedRequest; import org.junit.Test; import static java.net.CookiePolicy.ACCEPT_ORIGINAL_SERVER; import static okhttp3.TestUtil.defaultClient; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** Android's CookiesTest. */ public class CookiesTest { private OkHttpClient client = defaultClient(); @Test public void testNetscapeResponse() throws Exception { CookieManager cookieManager = new CookieManager(null, ACCEPT_ORIGINAL_SERVER); client = client.newBuilder() .cookieJar(new JavaNetCookieJar(cookieManager)) .build(); MockWebServer server = new MockWebServer(); server.start(); HttpUrl urlWithIpAddress = urlWithIpAddress(server, "/path/foo"); server.enqueue(new MockResponse().addHeader("Set-Cookie: a=android; " + "expires=Fri, 31-Dec-9999 23:59:59 GMT; " + "path=/path; " + "domain=" + urlWithIpAddress.host() + "; " + "secure")); get(urlWithIpAddress); List<HttpCookie> cookies = cookieManager.getCookieStore().getCookies(); assertEquals(1, cookies.size()); HttpCookie cookie = cookies.get(0); assertEquals("a", cookie.getName()); assertEquals("android", cookie.getValue()); assertEquals(null, cookie.getComment()); assertEquals(null, cookie.getCommentURL()); assertEquals(false, cookie.getDiscard()); assertTrue(cookie.getMaxAge() > 100000000000L); assertEquals("/path", cookie.getPath()); assertEquals(true, cookie.getSecure()); assertEquals(0, cookie.getVersion()); } @Test public void testRfc2109Response() throws Exception { CookieManager cookieManager = new CookieManager(null, ACCEPT_ORIGINAL_SERVER); client = client.newBuilder() .cookieJar(new JavaNetCookieJar(cookieManager)) .build(); MockWebServer server = new MockWebServer(); server.start(); HttpUrl urlWithIpAddress = urlWithIpAddress(server, "/path/foo"); server.enqueue(new MockResponse().addHeader("Set-Cookie: a=android; " + "Comment=this cookie is delicious; " + "Domain=" + urlWithIpAddress.host() + "; " + "Max-Age=60; " + "Path=/path; " + "Secure; " + "Version=1")); get(urlWithIpAddress); List<HttpCookie> cookies = cookieManager.getCookieStore().getCookies(); assertEquals(1, cookies.size()); HttpCookie cookie = cookies.get(0); assertEquals("a", cookie.getName()); assertEquals("android", cookie.getValue()); assertEquals(null, cookie.getCommentURL()); assertEquals(false, cookie.getDiscard()); assertEquals(60.0, cookie.getMaxAge(), 1.0); // Converting to a fixed date can cause rounding! assertEquals("/path", cookie.getPath()); assertEquals(true, cookie.getSecure()); } @Test public void testQuotedAttributeValues() throws Exception { CookieManager cookieManager = new CookieManager(null, ACCEPT_ORIGINAL_SERVER); client = client.newBuilder() .cookieJar(new JavaNetCookieJar(cookieManager)) .build(); MockWebServer server = new MockWebServer(); server.start(); HttpUrl urlWithIpAddress = urlWithIpAddress(server, "/path/foo"); server.enqueue(new MockResponse().addHeader("Set-Cookie: a=\"android\"; " + "Comment=\"this cookie is delicious\"; " + "CommentURL=\"http://google.com/\"; " + "Discard; " + "Domain=" + urlWithIpAddress.host() + "; " + "Max-Age=60; " + "Path=\"/path\"; " + "Port=\"80,443," + server.getPort() + "\"; " + "Secure; " + "Version=\"1\"")); get(urlWithIpAddress); List<HttpCookie> cookies = cookieManager.getCookieStore().getCookies(); assertEquals(1, cookies.size()); HttpCookie cookie = cookies.get(0); assertEquals("a", cookie.getName()); assertEquals("android", cookie.getValue()); assertEquals(60.0, cookie.getMaxAge(), 1.0); // Converting to a fixed date can cause rounding! assertEquals("/path", cookie.getPath()); assertEquals(true, cookie.getSecure()); } @Test public void testSendingCookiesFromStore() throws Exception { MockWebServer server = new MockWebServer(); server.enqueue(new MockResponse()); server.start(); CookieManager cookieManager = new CookieManager(null, ACCEPT_ORIGINAL_SERVER); HttpCookie cookieA = new HttpCookie("a", "android"); cookieA.setDomain(server.getHostName()); cookieA.setPath("/"); cookieManager.getCookieStore().add(server.url("/").uri(), cookieA); HttpCookie cookieB = new HttpCookie("b", "banana"); cookieB.setDomain(server.getHostName()); cookieB.setPath("/"); cookieManager.getCookieStore().add(server.url("/").uri(), cookieB); client = client.newBuilder() .cookieJar(new JavaNetCookieJar(cookieManager)) .build(); get(server.url("/")); RecordedRequest request = server.takeRequest(); assertEquals("a=android; b=banana", request.getHeader("Cookie")); } @Test public void testRedirectsDoNotIncludeTooManyCookies() throws Exception { MockWebServer redirectTarget = new MockWebServer(); redirectTarget.enqueue(new MockResponse().setBody("A")); redirectTarget.start(); MockWebServer redirectSource = new MockWebServer(); redirectSource.enqueue(new MockResponse() .setResponseCode(HttpURLConnection.HTTP_MOVED_TEMP) .addHeader("Location: " + redirectTarget.url("/"))); redirectSource.start(); CookieManager cookieManager = new CookieManager(null, ACCEPT_ORIGINAL_SERVER); HttpCookie cookie = new HttpCookie("c", "cookie"); cookie.setDomain(redirectSource.getHostName()); cookie.setPath("/"); String portList = Integer.toString(redirectSource.getPort()); cookie.setPortlist(portList); cookieManager.getCookieStore().add(redirectSource.url("/").uri(), cookie); client = client.newBuilder() .cookieJar(new JavaNetCookieJar(cookieManager)) .build(); get(redirectSource.url("/")); RecordedRequest request = redirectSource.takeRequest(); assertEquals("c=cookie", request.getHeader("Cookie")); for (String header : redirectTarget.takeRequest().getHeaders().names()) { if (header.startsWith("Cookie")) { fail(header); } } } @Test public void testCookiesSentIgnoresCase() throws Exception { client = client.newBuilder() .cookieJar(new JavaNetCookieJar(new CookieManager() { @Override public Map<String, List<String>> get(URI uri, Map<String, List<String>> requestHeaders) throws IOException { Map<String, List<String>> result = new HashMap<>(); result.put("COOKIE", Collections.singletonList("Bar=bar")); result.put("cooKIE2", Collections.singletonList("Baz=baz")); return result; } })) .build(); MockWebServer server = new MockWebServer(); server.enqueue(new MockResponse()); server.start(); get(server.url("/")); RecordedRequest request = server.takeRequest(); assertEquals("Bar=bar; Baz=baz", request.getHeader("Cookie")); assertNull(request.getHeader("Cookie2")); assertNull(request.getHeader("Quux")); } private HttpUrl urlWithIpAddress(MockWebServer server, String path) throws Exception { return server.url(path) .newBuilder() .host(InetAddress.getByName(server.getHostName()).getHostAddress()) .build(); } private Map<String, List<String>> get(HttpUrl url) throws Exception { URLConnection connection = new OkUrlFactory(client).open(url.url()); Map<String, List<String>> headers = connection.getHeaderFields(); connection.getInputStream().close(); return headers; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.inference.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.ExecutorService; import java.util.function.Consumer; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; /** * Searches for and emits {@link TrainedModelDefinitionDoc}s in * order based on the {@code doc_num}. * * This is a one-use class it has internal state to track progress * and cannot be used again to load another model. * * Defaults to searching in {@link InferenceIndexConstants#INDEX_PATTERN} * if a different index is not set. */ public class ChunkedTrainedModelRestorer { private static final Logger logger = LogManager.getLogger(ChunkedTrainedModelRestorer.class); private static final int MAX_NUM_DEFINITION_DOCS = 20; private final Client client; private final NamedXContentRegistry xContentRegistry; private final ExecutorService executorService; private final String modelId; private String index = InferenceIndexConstants.INDEX_PATTERN; private int searchSize = 10; private int numDocsWritten = 0; public ChunkedTrainedModelRestorer(String modelId, Client client, ExecutorService executorService, NamedXContentRegistry xContentRegistry) { this.client = client; this.executorService = executorService; this.xContentRegistry = xContentRegistry; this.modelId = modelId; } public void setSearchSize(int searchSize) { if (searchSize > MAX_NUM_DEFINITION_DOCS) { throw new IllegalArgumentException("search size [" + searchSize + "] cannot be bigger than [" + MAX_NUM_DEFINITION_DOCS + "]"); } if (searchSize <=0) { throw new IllegalArgumentException("search size [" + searchSize + "] must be greater than 0"); } this.searchSize = searchSize; } public void setSearchIndex(String indexNameOrPattern) { this.index = indexNameOrPattern; } public int getNumDocsWritten() { return numDocsWritten; } /** * Return the model definitions one at a time on the {@code modelConsumer}. * Either {@code errorConsumer} or {@code successConsumer} will be called * when the process is finished. * * The {@code modelConsumer} has the opportunity to cancel loading by * returning false in which case the {@code successConsumer} is called * with the parameter Boolean.FALSE. * * The docs are returned in order based on {@link TrainedModelDefinitionDoc#getDocNum()} * there is no error checking for duplicate or missing docs the consumer should handle * those errors. * * Depending on the search size multiple searches may be made. * * @param modelConsumer Consumes model definition docs * @param successConsumer Called when all docs have been returned or the loading is cancelled * @param errorConsumer In the event of an error */ public void restoreModelDefinition(CheckedFunction<TrainedModelDefinitionDoc, Boolean, IOException> modelConsumer, Consumer<Boolean> successConsumer, Consumer<Exception> errorConsumer) { logger.debug("[{}] restoring model", modelId); SearchRequest searchRequest = buildSearch(client, modelId, index, searchSize); executorService.execute(() -> doSearch(searchRequest, modelConsumer, successConsumer, errorConsumer)); } private void doSearch(SearchRequest searchRequest, CheckedFunction<TrainedModelDefinitionDoc, Boolean, IOException> modelConsumer, Consumer<Boolean> successConsumer, Consumer<Exception> errorConsumer) { executeAsyncWithOrigin(client, ML_ORIGIN, SearchAction.INSTANCE, searchRequest, ActionListener.wrap( searchResponse -> { if (searchResponse.getHits().getHits().length == 0) { errorConsumer.accept(new ResourceNotFoundException( Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId))); return; } // Set lastNum to a non-zero to prevent an infinite loop of // search after requests in the absolute worse case where // it has all gone wrong. // Docs are numbered 0..N. we must have seen at least // this many docs so far. int lastNum = numDocsWritten -1; for (SearchHit hit : searchResponse.getHits().getHits()) { try { TrainedModelDefinitionDoc doc = parseModelDefinitionDocLenientlyFromSource(hit.getSourceRef(), modelId, xContentRegistry); lastNum = doc.getDocNum(); boolean continueSearching = modelConsumer.apply(doc); if (continueSearching == false) { // signal the search has finished early successConsumer.accept(Boolean.FALSE); return; } } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] error writing model definition", modelId), e); errorConsumer.accept(e); return; } } numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize || searchResponse.getHits().getTotalHits().value == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); } else { // search again with after SearchHit lastHit = searchResponse.getHits().getAt(searchResponse.getHits().getHits().length -1); SearchRequestBuilder searchRequestBuilder = buildSearchBuilder(client, modelId, index, searchSize); searchRequestBuilder.searchAfter(new Object[]{lastHit.getIndex(), lastNum}); executorService.execute(() -> doSearch(searchRequestBuilder.request(), modelConsumer, successConsumer, errorConsumer)); } }, e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { errorConsumer.accept(new ResourceNotFoundException( Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId))); } else { errorConsumer.accept(e); } } )); } private static SearchRequestBuilder buildSearchBuilder(Client client, String modelId, String index, int searchSize) { return client.prepareSearch(index) .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders .boolQuery() .filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId)) .filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME)))) .setSize(searchSize) .setTrackTotalHits(true) // First find the latest index .addSort("_index", SortOrder.DESC) // Then, sort by doc_num .addSort(SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()) .order(SortOrder.ASC) .unmappedType("long")); } public static SearchRequest buildSearch(Client client, String modelId, String index, int searchSize) { return buildSearchBuilder(client, modelId, index, searchSize).request(); } public static TrainedModelDefinitionDoc parseModelDefinitionDocLenientlyFromSource(BytesReference source, String modelId, NamedXContentRegistry xContentRegistry) throws IOException { try (InputStream stream = source.streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream)) { return TrainedModelDefinitionDoc.fromXContent(parser, true).build(); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] failed to parse model definition", modelId), e); throw e; } } }
/* * Copyright 2014 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.lazy.primitive; import java.util.NoSuchElementException; import com.gs.collections.api.BooleanIterable; import com.gs.collections.api.iterator.BooleanIterator; import com.gs.collections.impl.bag.mutable.primitive.BooleanHashBag; import com.gs.collections.impl.block.factory.primitive.BooleanPredicates; import com.gs.collections.impl.list.mutable.FastList; import com.gs.collections.impl.list.mutable.primitive.BooleanArrayList; import com.gs.collections.impl.set.mutable.primitive.BooleanHashSet; import com.gs.collections.impl.test.Verify; import org.junit.Assert; import org.junit.Test; /** * JUnit test for {@link ReverseBooleanIterable}. */ public class ReverseBooleanIterableTest { @Test public void isEmpty() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Verify.assertEmpty(new BooleanArrayList().asReversed()); Verify.assertNotEmpty(iterable); } @Test public void contains() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false).asReversed(); Assert.assertTrue(iterable.contains(false)); Assert.assertFalse(iterable.contains(true)); } @Test public void containsAll() { BooleanIterable iterable = BooleanArrayList.newListWith(true, false, true).asReversed(); Assert.assertTrue(iterable.containsAll(true)); Assert.assertTrue(iterable.containsAll(true, false)); Assert.assertFalse(BooleanArrayList.newListWith(false, false).asReversed().containsAll(true)); Assert.assertFalse(BooleanArrayList.newListWith(false, false).asReversed().containsAll(BooleanArrayList.newListWith(true, false))); Assert.assertTrue(BooleanArrayList.newListWith(false, false, true).asReversed().containsAll(BooleanArrayList.newListWith(true, false))); } @Test public void iterator() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); BooleanIterator iterator = iterable.booleanIterator(); Assert.assertTrue(iterator.hasNext()); Assert.assertTrue(iterator.next()); Assert.assertTrue(iterator.hasNext()); Assert.assertFalse(iterator.next()); Assert.assertTrue(iterator.hasNext()); Assert.assertFalse(iterator.next()); } @Test(expected = NoSuchElementException.class) public void iterator_throws() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); BooleanIterator iterator = iterable.booleanIterator(); while (iterator.hasNext()) { iterator.next(); } iterator.next(); } @Test public void forEach() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); boolean[] result = {true}; iterable.forEach(each -> result[0] &= each); Assert.assertFalse(result[0]); } @Test public void size() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Verify.assertSize(0, new BooleanArrayList().asReversed()); Verify.assertSize(3, iterable); } @Test public void empty() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Assert.assertTrue(iterable.notEmpty()); Verify.assertNotEmpty(iterable); } @Test public void count() { Assert.assertEquals(2L, BooleanArrayList.newListWith(false, false, true).asReversed().count(BooleanPredicates.equal(false))); } @Test public void anySatisfy() { Assert.assertTrue(BooleanArrayList.newListWith(true, false).asReversed().anySatisfy(BooleanPredicates.equal(false))); Assert.assertFalse(BooleanArrayList.newListWith(true).asReversed().anySatisfy(BooleanPredicates.equal(false))); } @Test public void allSatisfy() { Assert.assertFalse(BooleanArrayList.newListWith(true, false).asReversed().allSatisfy(BooleanPredicates.equal(false))); Assert.assertTrue(BooleanArrayList.newListWith(false, false).asReversed().allSatisfy(BooleanPredicates.equal(false))); } @Test public void noneSatisfy() { Assert.assertFalse(BooleanArrayList.newListWith(true, false).asReversed().noneSatisfy(BooleanPredicates.equal(false))); Assert.assertTrue(BooleanArrayList.newListWith(false, false).asReversed().noneSatisfy(BooleanPredicates.equal(true))); } @Test public void select() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Verify.assertSize(2, iterable.select(BooleanPredicates.equal(false))); Verify.assertSize(1, iterable.select(BooleanPredicates.equal(true))); } @Test public void reject() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Verify.assertSize(1, iterable.reject(BooleanPredicates.equal(false))); Verify.assertSize(2, iterable.reject(BooleanPredicates.equal(true))); } @Test public void detectIfNone() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false).asReversed(); Assert.assertFalse(iterable.detectIfNone(BooleanPredicates.equal(false), true)); Assert.assertTrue(iterable.detectIfNone(BooleanPredicates.equal(true), true)); } @Test public void collect() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Verify.assertIterablesEqual(FastList.newListWith(false, true, true), iterable.collect(parameter -> !parameter)); } @Test public void toArray() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Assert.assertTrue(iterable.toArray()[0]); Assert.assertFalse(iterable.toArray()[1]); Assert.assertFalse(iterable.toArray()[2]); } @Test public void testToString() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Assert.assertEquals("[true, false, false]", iterable.toString()); Assert.assertEquals("[]", new BooleanArrayList().asReversed().toString()); } @Test public void makeString() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); Assert.assertEquals("true, false, false", iterable.makeString()); Assert.assertEquals("true", BooleanArrayList.newListWith(true).makeString("/")); Assert.assertEquals("true/false/false", iterable.makeString("/")); Assert.assertEquals(iterable.toString(), iterable.makeString("[", ", ", "]")); Assert.assertEquals("", new BooleanArrayList().asReversed().makeString()); } @Test public void appendString() { BooleanIterable iterable = BooleanArrayList.newListWith(false, false, true).asReversed(); StringBuilder appendable = new StringBuilder(); new BooleanArrayList().asReversed().appendString(appendable); Assert.assertEquals("", appendable.toString()); StringBuilder appendable2 = new StringBuilder(); iterable.appendString(appendable2); Assert.assertEquals("true, false, false", appendable2.toString()); StringBuilder appendable3 = new StringBuilder(); iterable.appendString(appendable3, "/"); Assert.assertEquals("true/false/false", appendable3.toString()); StringBuilder appendable4 = new StringBuilder(); iterable.appendString(appendable4, "[", ", ", "]"); Assert.assertEquals(iterable.toString(), appendable4.toString()); } @Test public void toList() { Assert.assertEquals(BooleanArrayList.newListWith(false, true), BooleanArrayList.newListWith(true, false).asReversed().toList()); } @Test public void toSet() { Assert.assertEquals(BooleanHashSet.newSetWith(true, false), BooleanArrayList.newListWith(true, false).asReversed().toSet()); } @Test public void toBag() { Assert.assertEquals(BooleanHashBag.newBagWith(true, false), BooleanArrayList.newListWith(true, false).asReversed().toBag()); } @Test public void asLazy() { Assert.assertEquals(BooleanArrayList.newListWith(false, true), BooleanArrayList.newListWith(true, false).asReversed().asLazy().toList()); } }
/* * Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2000-2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.impl.xs; /** * Collection of symbols used to parse a Schema Grammar. * * @xerces.internal * * @author jeffrey rodriguez */ public final class SchemaSymbols { // strings that's not added to the schema symbol table, because they // are not symbols in the schema document. // the validator can choose to add them by itself. // the following strings (xsi:, xsd) will be added into the // symbol table that comes with the parser // xsi attributes: in validator public static final String URI_XSI = "http://www.w3.org/2001/XMLSchema-instance".intern(); public static final String XSI_SCHEMALOCATION = "schemaLocation".intern(); public static final String XSI_NONAMESPACESCHEMALOCATION = "noNamespaceSchemaLocation".intern(); public static final String XSI_TYPE = "type".intern(); public static final String XSI_NIL = "nil".intern(); // schema namespace public static final String URI_SCHEMAFORSCHEMA = "http://www.w3.org/2001/XMLSchema".intern(); // all possible schema element names public static final String ELT_ALL = "all".intern(); public static final String ELT_ANNOTATION = "annotation".intern(); public static final String ELT_ANY = "any".intern(); public static final String ELT_ANYATTRIBUTE = "anyAttribute".intern(); public static final String ELT_APPINFO = "appinfo".intern(); public static final String ELT_ATTRIBUTE = "attribute".intern(); public static final String ELT_ATTRIBUTEGROUP = "attributeGroup".intern(); public static final String ELT_CHOICE = "choice".intern(); public static final String ELT_COMPLEXCONTENT = "complexContent".intern(); public static final String ELT_COMPLEXTYPE = "complexType".intern(); public static final String ELT_DOCUMENTATION = "documentation".intern(); public static final String ELT_ELEMENT = "element".intern(); public static final String ELT_ENUMERATION = "enumeration".intern(); public static final String ELT_EXTENSION = "extension".intern(); public static final String ELT_FIELD = "field".intern(); public static final String ELT_FRACTIONDIGITS = "fractionDigits".intern(); public static final String ELT_GROUP = "group".intern(); public static final String ELT_IMPORT = "import".intern(); public static final String ELT_INCLUDE = "include".intern(); public static final String ELT_KEY = "key".intern(); public static final String ELT_KEYREF = "keyref".intern(); public static final String ELT_LENGTH = "length".intern(); public static final String ELT_LIST = "list".intern(); public static final String ELT_MAXEXCLUSIVE = "maxExclusive".intern(); public static final String ELT_MAXINCLUSIVE = "maxInclusive".intern(); public static final String ELT_MAXLENGTH = "maxLength".intern(); public static final String ELT_MINEXCLUSIVE = "minExclusive".intern(); public static final String ELT_MININCLUSIVE = "minInclusive".intern(); public static final String ELT_MINLENGTH = "minLength".intern(); public static final String ELT_NOTATION = "notation".intern(); public static final String ELT_PATTERN = "pattern".intern(); public static final String ELT_REDEFINE = "redefine".intern(); public static final String ELT_RESTRICTION = "restriction".intern(); public static final String ELT_SCHEMA = "schema".intern(); public static final String ELT_SELECTOR = "selector".intern(); public static final String ELT_SEQUENCE = "sequence".intern(); public static final String ELT_SIMPLECONTENT = "simpleContent".intern(); public static final String ELT_SIMPLETYPE = "simpleType".intern(); public static final String ELT_TOTALDIGITS = "totalDigits".intern(); public static final String ELT_UNION = "union".intern(); public static final String ELT_UNIQUE = "unique".intern(); public static final String ELT_WHITESPACE = "whiteSpace".intern(); // all possible schema attribute names (and xml:lang defined on <schema> and <documentation>) public static final String ATT_ABSTRACT = "abstract".intern(); public static final String ATT_ATTRIBUTEFORMDEFAULT = "attributeFormDefault".intern(); public static final String ATT_BASE = "base".intern(); public static final String ATT_BLOCK = "block".intern(); public static final String ATT_BLOCKDEFAULT = "blockDefault".intern(); public static final String ATT_DEFAULT = "default".intern(); public static final String ATT_ELEMENTFORMDEFAULT = "elementFormDefault".intern(); public static final String ATT_FINAL = "final".intern(); public static final String ATT_FINALDEFAULT = "finalDefault".intern(); public static final String ATT_FIXED = "fixed".intern(); public static final String ATT_FORM = "form".intern(); public static final String ATT_ID = "id".intern(); public static final String ATT_ITEMTYPE = "itemType".intern(); public static final String ATT_MAXOCCURS = "maxOccurs".intern(); public static final String ATT_MEMBERTYPES = "memberTypes".intern(); public static final String ATT_MINOCCURS = "minOccurs".intern(); public static final String ATT_MIXED = "mixed".intern(); public static final String ATT_NAME = "name".intern(); public static final String ATT_NAMESPACE = "namespace".intern(); public static final String ATT_NILLABLE = "nillable".intern(); public static final String ATT_PROCESSCONTENTS = "processContents".intern(); public static final String ATT_REF = "ref".intern(); public static final String ATT_REFER = "refer".intern(); public static final String ATT_SCHEMALOCATION = "schemaLocation".intern(); public static final String ATT_SOURCE = "source".intern(); public static final String ATT_SUBSTITUTIONGROUP = "substitutionGroup".intern(); public static final String ATT_SYSTEM = "system".intern(); public static final String ATT_PUBLIC = "public".intern(); public static final String ATT_TARGETNAMESPACE = "targetNamespace".intern(); public static final String ATT_TYPE = "type".intern(); public static final String ATT_USE = "use".intern(); public static final String ATT_VALUE = "value".intern(); public static final String ATT_VERSION = "version".intern(); public static final String ATT_XML_LANG = "xml:lang".intern(); public static final String ATT_XPATH = "xpath".intern(); // all possible schema attribute values public static final String ATTVAL_TWOPOUNDANY = "##any"; public static final String ATTVAL_TWOPOUNDLOCAL = "##local"; public static final String ATTVAL_TWOPOUNDOTHER = "##other"; public static final String ATTVAL_TWOPOUNDTARGETNS = "##targetNamespace"; public static final String ATTVAL_POUNDALL = "#all"; public static final String ATTVAL_FALSE_0 = "0"; public static final String ATTVAL_TRUE_1 = "1"; public static final String ATTVAL_ANYSIMPLETYPE = "anySimpleType"; public static final String ATTVAL_ANYTYPE = "anyType"; public static final String ATTVAL_ANYURI = "anyURI"; public static final String ATTVAL_BASE64BINARY = "base64Binary"; public static final String ATTVAL_BOOLEAN = "boolean"; public static final String ATTVAL_BYTE = "byte"; public static final String ATTVAL_COLLAPSE = "collapse"; public static final String ATTVAL_DATE = "date"; public static final String ATTVAL_DATETIME = "dateTime"; public static final String ATTVAL_DAY = "gDay"; public static final String ATTVAL_DECIMAL = "decimal"; public static final String ATTVAL_DOUBLE = "double"; public static final String ATTVAL_DURATION = "duration"; public static final String ATTVAL_ENTITY = "ENTITY"; public static final String ATTVAL_ENTITIES = "ENTITIES"; public static final String ATTVAL_EXTENSION = "extension"; public static final String ATTVAL_FALSE = "false"; public static final String ATTVAL_FLOAT = "float"; public static final String ATTVAL_HEXBINARY = "hexBinary"; public static final String ATTVAL_ID = "ID"; public static final String ATTVAL_IDREF = "IDREF"; public static final String ATTVAL_IDREFS = "IDREFS"; public static final String ATTVAL_INT = "int"; public static final String ATTVAL_INTEGER = "integer"; public static final String ATTVAL_LANGUAGE = "language"; public static final String ATTVAL_LAX = "lax"; public static final String ATTVAL_LIST = "list"; public static final String ATTVAL_LONG = "long"; public static final String ATTVAL_NAME = "Name"; public static final String ATTVAL_NEGATIVEINTEGER = "negativeInteger"; public static final String ATTVAL_MONTH = "gMonth"; public static final String ATTVAL_MONTHDAY = "gMonthDay"; public static final String ATTVAL_NCNAME = "NCName"; public static final String ATTVAL_NMTOKEN = "NMTOKEN"; public static final String ATTVAL_NMTOKENS = "NMTOKENS"; public static final String ATTVAL_NONNEGATIVEINTEGER= "nonNegativeInteger"; public static final String ATTVAL_NONPOSITIVEINTEGER= "nonPositiveInteger"; public static final String ATTVAL_NORMALIZEDSTRING = "normalizedString"; public static final String ATTVAL_NOTATION = "NOTATION"; public static final String ATTVAL_OPTIONAL = "optional"; public static final String ATTVAL_POSITIVEINTEGER = "positiveInteger"; public static final String ATTVAL_PRESERVE = "preserve"; public static final String ATTVAL_PROHIBITED = "prohibited"; public static final String ATTVAL_QNAME = "QName"; public static final String ATTVAL_QUALIFIED = "qualified"; public static final String ATTVAL_REPLACE = "replace"; public static final String ATTVAL_REQUIRED = "required"; public static final String ATTVAL_RESTRICTION = "restriction"; public static final String ATTVAL_SHORT = "short"; public static final String ATTVAL_SKIP = "skip"; public static final String ATTVAL_STRICT = "strict"; public static final String ATTVAL_STRING = "string"; public static final String ATTVAL_SUBSTITUTION = "substitution"; public static final String ATTVAL_TIME = "time"; public static final String ATTVAL_TOKEN = "token"; public static final String ATTVAL_TRUE = "true"; public static final String ATTVAL_UNBOUNDED = "unbounded"; public static final String ATTVAL_UNION = "union"; public static final String ATTVAL_UNQUALIFIED = "unqualified"; public static final String ATTVAL_UNSIGNEDBYTE = "unsignedByte"; public static final String ATTVAL_UNSIGNEDINT = "unsignedInt"; public static final String ATTVAL_UNSIGNEDLONG = "unsignedLong"; public static final String ATTVAL_UNSIGNEDSHORT = "unsignedShort"; public static final String ATTVAL_YEAR = "gYear"; public static final String ATTVAL_YEARMONTH = "gYearMonth"; // form qualified/unqualified public static final short FORM_UNQUALIFIED = 0; public static final short FORM_QUALIFIED = 1; // attribute use public static final short USE_OPTIONAL = 0; public static final short USE_REQUIRED = 1; public static final short USE_PROHIBITED = 2; // maxOccurs = "unbounded" public static final int OCCURRENCE_UNBOUNDED = -1; }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.html.hc.html.metadata; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import org.junit.Rule; import org.junit.Test; import com.helger.commons.state.EChange; import com.helger.commons.url.SimpleURL; import com.helger.html.hc.html.HC_Target; import com.helger.html.hc.html.script.HCScriptFile; import com.helger.html.hc.html.script.HCScriptInline; import com.helger.html.hc.mock.HCTestRuleOptimized; import com.helger.html.hc.render.HCRenderer; import com.helger.html.js.UnparsedJSCodeProvider; /** * Test class for class {@link HCHead} * * @author Philip Helger */ public final class HCHeadTest { @Rule public final HCTestRuleOptimized m_aRule = new HCTestRuleOptimized (); @Test public void testBasic () { final HCHead aHead = new HCHead (); assertNull (aHead.getProfile ()); assertNull (aHead.getPageTitle ()); assertNull (aHead.getBaseHref ()); assertNull (aHead.getBaseTarget ()); assertTrue (aHead.metaElements ().isEmpty ()); assertTrue (aHead.links ().isEmpty ()); assertTrue (aHead.getAllCSSNodes ().isEmpty ()); assertTrue (aHead.getAllJSNodes ().isEmpty ()); assertEquals ("", aHead.getPlainText ()); assertSame (aHead, aHead.setProfile ("any")); assertEquals ("any", aHead.getProfile ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\" profile=\"any\"></head>", HCRenderer.getAsHTMLString (aHead)); assertSame (aHead, aHead.setPageTitle ("Title")); assertEquals ("Title", aHead.getPageTitle ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\" profile=\"any\"><title>Title</title></head>", HCRenderer.getAsHTMLString (aHead)); assertSame (aHead, aHead.setBaseHref (new SimpleURL ("/"))); assertEquals ("/", aHead.getBaseHref ().getAsStringWithEncodedParameters ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\" profile=\"any\"><title>Title</title><base href=\"/\" /></head>", HCRenderer.getAsHTMLString (aHead)); assertSame (aHead, aHead.setBaseTarget (HC_Target.BLANK)); assertEquals (HC_Target.BLANK, aHead.getBaseTarget ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\" profile=\"any\"><title>Title</title><base href=\"/\" target=\"_blank\" /></head>", HCRenderer.getAsHTMLString (aHead)); assertNotNull (aHead.toString ()); } @Test public void testMetaElements () { final HCHead aHead = new HCHead (); assertTrue (aHead.metaElements ().isEmpty ()); assertTrue (aHead.metaElements ().add (new HCMeta ().setName ("foo").setContent ("bar"))); assertFalse (aHead.metaElements ().isEmpty ()); assertEquals (1, aHead.metaElements ().size ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\">" + "<meta name=\"foo\" content=\"bar\" />" + "</head>", HCRenderer.getAsHTMLString (aHead)); assertTrue (aHead.metaElements ().add (new HCMeta ().setHttpEquiv ("goo").setContent ("car"))); assertEquals (2, aHead.metaElements ().size ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\">" + "<meta name=\"foo\" content=\"bar\" />" + "<meta http-equiv=\"goo\" content=\"car\" />" + "</head>", HCRenderer.getAsHTMLString (aHead)); assertEquals (EChange.UNCHANGED, aHead.removeMetaElement ("any")); assertEquals (2, aHead.metaElements ().size ()); assertEquals (EChange.CHANGED, aHead.removeMetaElement ("foo")); assertEquals (1, aHead.metaElements ().size ()); assertEquals (EChange.UNCHANGED, aHead.removeMetaElement ("foo")); assertEquals (1, aHead.metaElements ().size ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\">" + "<meta http-equiv=\"goo\" content=\"car\" />" + "</head>", HCRenderer.getAsHTMLString (aHead)); assertEquals (EChange.CHANGED, aHead.removeMetaElement ("goo")); assertEquals (0, aHead.metaElements ().size ()); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"></head>", HCRenderer.getAsHTMLString (aHead)); } @Test public void testGenerate () { final HCHead aHead = new HCHead (); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setPageTitle ("test"); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setBaseHref (new SimpleURL ("/root")); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title><base href=\"/root\" /></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setBaseHref (null); aHead.setBaseTarget (HC_Target.BLANK); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title><base target=\"_blank\" /></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setBaseTarget (null); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setShortcutIconHref (new SimpleURL ("/favicon.ico")); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title><link rel=\"shortcut icon\" href=\"/favicon.ico\"></link><link rel=\"icon\" type=\"image/icon\" href=\"/favicon.ico\"></link></head>", HCRenderer.getAsHTMLString (aHead)); aHead.setShortcutIconHref (null); aHead.addJS (new HCScriptFile ().setSrc (new SimpleURL ("/my.js"))); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title><script type=\"text/javascript\" src=\"/my.js\"></script></head>", HCRenderer.getAsHTMLString (aHead)); aHead.addCSS (HCLink.createCSSLink (new SimpleURL ("/my.css"))); assertEquals ("<head xmlns=\"http://www.w3.org/1999/xhtml\"><title>test</title><link rel=\"stylesheet\" type=\"text/css\" href=\"/my.css\"></link><script type=\"text/javascript\" src=\"/my.js\"></script></head>", HCRenderer.getAsHTMLString (aHead)); } @Test public void testChildrenStuff () { final HCHead aHead = new HCHead (); assertTrue (aHead.hasChildren ()); assertEquals (2, aHead.getChildCount ()); assertTrue (aHead.getChildAtIndex (0) instanceof HCTitle); assertTrue (aHead.getChildAtIndex (1) instanceof HCBase); assertNull (aHead.getChildAtIndex (2)); assertTrue (aHead.getFirstChild () instanceof HCTitle); assertTrue (aHead.getLastChild () instanceof HCBase); aHead.addCSS (new HCStyle ("bla{}")); assertEquals (3, aHead.getChildCount ()); assertTrue (aHead.getChildAtIndex (0) instanceof HCTitle); assertTrue (aHead.getChildAtIndex (1) instanceof HCBase); assertTrue (aHead.getChildAtIndex (2) instanceof HCStyle); assertNull (aHead.getChildAtIndex (3)); assertTrue (aHead.getFirstChild () instanceof HCTitle); assertTrue (aHead.getLastChild () instanceof HCStyle); aHead.addCSS (new HCStyle ("foo{}")); assertEquals (4, aHead.getChildCount ()); assertTrue (aHead.getChildAtIndex (0) instanceof HCTitle); assertTrue (aHead.getChildAtIndex (1) instanceof HCBase); assertTrue (aHead.getChildAtIndex (2) instanceof HCStyle); assertTrue (aHead.getChildAtIndex (3) instanceof HCStyle); assertNull (aHead.getChildAtIndex (4)); assertTrue (aHead.getFirstChild () instanceof HCTitle); assertTrue (aHead.getLastChild () instanceof HCStyle); aHead.links ().add (new HCLink ().setRev (EHCLinkType.APPENDIX)); assertEquals (5, aHead.getChildCount ()); assertTrue (aHead.getChildAtIndex (0) instanceof HCTitle); assertTrue (aHead.getChildAtIndex (1) instanceof HCBase); assertTrue (aHead.getChildAtIndex (2) instanceof HCLink); assertTrue (aHead.getChildAtIndex (3) instanceof HCStyle); assertTrue (aHead.getChildAtIndex (4) instanceof HCStyle); assertNull (aHead.getChildAtIndex (5)); assertTrue (aHead.getFirstChild () instanceof HCTitle); assertTrue (aHead.getLastChild () instanceof HCStyle); aHead.addJS (new HCScriptInline (new UnparsedJSCodeProvider ("window.x=1;"))); assertEquals (6, aHead.getChildCount ()); assertTrue (aHead.getChildAtIndex (0) instanceof HCTitle); assertTrue (aHead.getChildAtIndex (1) instanceof HCBase); assertTrue (aHead.getChildAtIndex (2) instanceof HCLink); assertTrue (aHead.getChildAtIndex (3) instanceof HCStyle); assertTrue (aHead.getChildAtIndex (4) instanceof HCStyle); assertTrue (aHead.getChildAtIndex (5) instanceof HCScriptInline); assertNull (aHead.getChildAtIndex (6)); assertTrue (aHead.getFirstChild () instanceof HCTitle); assertTrue (aHead.getLastChild () instanceof HCScriptInline); } }
/******************************************************************************* * Copyright (c) 2010 Haifeng Li * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package smile.stat.distribution; import smile.math.Math; /** * The hypergeometric distribution is a discrete probability distribution that * describes the number of successes in a sequence of n draws from a finite * population without replacement, just as the binomial distribution describes * the number of successes for draws with replacement. * <p> * Suppose you are to draw "n" balls without replacement from an urn containing * "N" balls in total, "m" of which are white. The hypergeometric distribution * describes the distribution of the number of white balls drawn from the urn. * A random variable X follows the hypergeometric distribution with parameters * N, m and n if the probability is given by * <pre> * <sub>m</sub>C<sub>k</sub> <sub>(N-m)</sub>C<sub>(n-k)</sub> * P(X = k) = ---------------- * <sub>N</sub>C<sub>n</sub> * </pre> * where <sub>n</sub>C<sub>k</sub> is n choose k. * * @author Haifeng Li */ public class HyperGeometricDistribution extends DiscreteDistribution { private int N; private int m; private int n; private RandomNumberGenerator rng; /** * Constructor. * @param N the number of total samples. * @param m the number of defects. * @param n the number of draws. */ public HyperGeometricDistribution(int N, int m, int n) { if (N < 0) { throw new IllegalArgumentException("Invalid N: " + N); } if (m < 0 || m > N) { throw new IllegalArgumentException("Invalid m: " + m); } if (n < 0 || n > N) { throw new IllegalArgumentException("Invalid n: " + n); } this.N = N; this.m = m; this.n = n; } @Override public int npara() { return 3; } @Override public double mean() { return (double) m * n / N; } @Override public double var() { double r = (double) m / N; return n * (N - n) * r * (1 - r) / (N - 1); } @Override public double sd() { return Math.sqrt(var()); } @Override public double entropy() { throw new UnsupportedOperationException("Hypergeometric distribution does not support entropy()"); } @Override public String toString() { return String.format("Hypergeometric Distribution(%d, %d, %d)", N, m, n); } @Override public double p(int k) { if (k < Math.max(0, m + n - N) || k > Math.min(m, n)) { return 0.0; } else { return Math.exp(logp(k)); } } @Override public double logp(int k) { if (k < Math.max(0, m + n - N) || k > Math.min(m, n)) { return Double.NEGATIVE_INFINITY; } else { return Math.logChoose(m, k) + Math.logChoose(N - m, n - k) - Math.logChoose(N, n); } } @Override public double cdf(double k) { if (k < Math.max(0, m + n - N)) { return 0.0; } else if (k >= Math.min(m, n)) { return 1.0; } double p = 0.0; for (int i = Math.max(0, m + n - N); i <= k; i++) { p += p(i); } return p; } @Override public double quantile(double p) { if (p < 0.0 || p > 1.0) { throw new IllegalArgumentException("Invalid p: " + p); } if (p == 0.0) { return Math.max(0, m+n-N); } if (p == 1.0) { return Math.min(m,n); } // Starting guess near peak of density. // Expand interval until we bracket. int kl, ku, inc = 1; int k = Math.max(0, Math.min(n, (int) (n * p))); if (p < cdf(k)) { do { k = Math.max(k - inc, 0); inc *= 2; } while (p < cdf(k) && k > 0); kl = k; ku = k + inc / 2; } else { do { k = Math.min(k + inc, n + 1); inc *= 2; } while (p > cdf(k)); ku = k; kl = k - inc / 2; } return quantile(p, kl, ku); } /** * Uses inversion by chop-down search from the mode when the mean &lt; 20 * and the patchwork-rejection method when the mean &gt; 20. */ @Override public double rand() { int mm = m; int nn = n; if (mm > N / 2) { // invert mm mm = N - mm; } if (nn > N / 2) { // invert nn nn = N - nn; } if (nn > mm) { // swap nn and mm int swap = nn; nn = mm; mm = swap; } if (rng == null) { if ((double) nn * mm >= 20 * N) { // use ratio-of-uniforms method rng = new Patchwork(N, mm, nn); } else { // inversion method, using chop-down search from mode rng = new Inversion(N, mm, nn); } } return rng.rand(); } abstract class RandomNumberGenerator { protected int N, m, n; protected int fak; protected int addd; RandomNumberGenerator(int N, int m, int n) { this.N = N; this.m = m; this.n = n; // transformations fak = 1; // used for undoing transformations addd = 0; if (m > N / 2) { // invert mm m = N - m; fak = -1; addd = n; } if (n > N / 2) { // invert nn n = N - n; addd += fak * m; fak = -fak; } if (n > m) { // swap nn and mm int swap = n; n = m; m = swap; } } public int rand() { // cases with only one possible result end here if (n == 0) { return addd; } int x = random(); // undo transformations return x * fak + addd; } protected abstract int random(); } class Patchwork extends RandomNumberGenerator { private int L, k1, k2, k4, k5; private double dl, dr, r1, r2, r4, r5, ll, lr, cPm, f1, f2, f4, f5, p1, p2, p3, p4, p5, p6; /** * Initialize random number generator. */ Patchwork(int N, int m, int n) { super(N, m, n); double Mp, np, p, modef, U; // (X, Y) <-> (V, W) Mp = (double) (m + 1); np = (double) (n + 1); L = N - m - n; p = Mp / (N + 2.); modef = np * p; // approximate deviation of reflection points k2, k4 from modef - 1/2 U = Math.sqrt(modef * (1. - p) * (1. - (n + 2.) / (N + 3.)) + 0.25); // mode, reflection points k2 and k4, and points k1 and k5, which // delimit the centre region of h(x) // k2 = ceil (modef - 1/2 - U), k1 = 2*k2 - (mode - 1 + delta_ml) // k4 = floor(modef - 1/2 + U), k5 = 2*k4 - (mode + 1 - delta_mr) int mode = (int) modef; k2 = (int) Math.ceil(modef - 0.5 - U); if (k2 >= mode) { k2 = mode - 1; } k4 = (int) (modef - 0.5 + U); k1 = k2 + k2 - mode + 1; // delta_ml = 0 k5 = k4 + k4 - mode; // delta_mr = 1 // range width of the critical left and right centre region dl = (double) (k2 - k1); dr = (double) (k5 - k4); // recurrence constants r(k) = p(k)/p(k-1) at k = k1, k2, k4+1, k5+1 r1 = (np / (double) k1 - 1.) * (Mp - k1) / (double) (L + k1); r2 = (np / (double) k2 - 1.) * (Mp - k2) / (double) (L + k2); r4 = (np / (double) (k4 + 1) - 1.) * (m - k4) / (double) (L + k4 + 1); r5 = (np / (double) (k5 + 1) - 1.) * (m - k5) / (double) (L + k5 + 1); // reciprocal values of the scale parameters of expon. tail envelopes ll = Math.log(r1); // expon. tail left lr = -Math.log(r5); // expon. tail right // hypergeom. constant, necessary for computing function values f(k) cPm = lnpk(mode, L, m, n); // function values f(k) = p(k)/p(mode) at k = k2, k4, k1, k5 f2 = Math.exp(cPm - lnpk(k2, L, m, n)); f4 = Math.exp(cPm - lnpk(k4, L, m, n)); f1 = Math.exp(cPm - lnpk(k1, L, m, n)); f5 = Math.exp(cPm - lnpk(k5, L, m, n)); // area of the two centre and the two exponential tail regions // area of the two immediate acceptance regions between k2, k4 p1 = f2 * (dl + 1.); // immed. left p2 = f2 * dl + p1; // centre left p3 = f4 * (dr + 1.) + p2; // immed. right p4 = f4 * dr + p3; // centre right p5 = f1 / ll + p4; // expon. tail left p6 = f5 / lr + p5; // expon. tail right } /** * This method is valid only for mode &ge; 10 and 0 &le; nn &le; mm <&le; N/2. * <p> * This method is fast when called repeatedly with the same parameters, but * slow when the parameters change due to a high setup time. The computation * time hardly depends on the parameters, except that it matters a lot whether * parameters are within the range where the LnFac function is tabulated. * <p> * Uses the Patchwork Rejection method of Heinz Zechner (HPRS). * The area below the histogram function f(x) in its body is rearranged by * two point reflections. Within a large center interval variates are sampled * efficiently by rejection from uniform hats. Rectangular immediate acceptance * regions speed up the generation. The remaining tails are covered by * exponential functions. * <p> * For detailed explanation, see: * Stadlober, E & Zechner, H: "The Patchwork Rejection Technique for * Sampling from Unimodal Distributions". ACM Transactions on Modeling * and Computer Simulation, vol. 9, no. 1, 1999, p. 59-83. */ @Override protected int random() { int Dk, X, V; double U, Y, W; // (X, Y) <-> (V, W) while (true) { // generate uniform number U -- U(0, p6) // case distinction corresponding to U if ((U = Math.random() * p6) < p2) { // centre left // immediate acceptance region R2 = [k2, mode) *[0, f2), X = k2, ... mode -1 if ((W = U - p1) < 0.) { return (k2 + (int) (U / f2)); } // immediate acceptance region R1 = [k1, k2)*[0, f1), X = k1, ... k2-1 if ((Y = W / dl) < f1) { return (k1 + (int) (W / f1)); } // computation of candidate X < k2, and its reflected counterpart V > k2 // either squeeze-acceptance of X or acceptance-rejection of V Dk = (int) (dl * Math.random()) + 1; if (Y <= f2 - Dk * (f2 - f2 / r2)) { // quick accept of return (k2 - Dk); } // X = k2 - Dk if ((W = f2 + f2 - Y) < 1.) { // quick reject of V V = k2 + Dk; if (W <= f2 + Dk * (1. - f2) / (dl + 1.)) { // quick accept of V return (V); } if (Math.log(W) <= cPm - lnpk(V, L, m, n)) { return (V); // final accept of V } } X = k2 - Dk; // go to final accept/reject } else if (U < p4) { // centre right // immediate acceptance region R3 = [mode, k4+1)*[0, f4), X = mode, ... k4 if ((W = U - p3) < 0.) { return (k4 - (int) ((U - p2) / f4)); } // immediate acceptance region R4 = [k4+1, k5+1)*[0, f5) if ((Y = W / dr) < f5) { return (k5 - (int) (W / f5)); } // computation of candidate X > k4, and its reflected counterpart V < k4 // either squeeze-acceptance of X or acceptance-rejection of V Dk = (int) (dr * Math.random()) + 1; if (Y <= f4 - Dk * (f4 - f4 * r4)) { // quick accept of return (k4 + Dk); // X = k4 + Dk } if ((W = f4 + f4 - Y) < 1.) { // quick reject of V V = k4 - Dk; if (W <= f4 + Dk * (1. - f4) / dr) { // quick accept of return V; // V = k4 - Dk } if (Math.log(W) <= cPm - lnpk(V, L, m, n)) { return (V); // final accept of V } } X = k4 + Dk; // go to final accept/reject } else { Y = Math.random(); if (U < p5) { // expon. tail left Dk = (int) (1. - Math.log(Y) / ll); if ((X = k1 - Dk) < 0) { continue; // 0 <= X <= k1 - 1 } Y *= (U - p4) * ll; // Y -- U(0, h(x)) if (Y <= f1 - Dk * (f1 - f1 / r1)) { return X; // quick accept of X } } else { // expon. tail right Dk = (int) (1. - Math.log(Y) / lr); if ((X = k5 + Dk) > n) { continue; // k5 + 1 <= X <= nn } Y *= (U - p5) * lr; // Y -- U(0, h(x)) if (Y <= f5 - Dk * (f5 - f5 * r5)) { return X; // quick accept of X } } } // acceptance-rejection test of candidate X from the original area // test, whether Y <= f(X), with Y = U*h(x) and U -- U(0, 1) // log f(X) = log( mode! (mm - mode)! (nn - mode)! (N - mm - nn + mode)! ) // - log( X! (mm - X)! (nn - X)! (N - mm - nn + X)! ) if (Math.log(Y) <= cPm - lnpk(X, L, m, n)) { return (X); } } } /** * subfunction used by random number generator. */ private double lnpk(int k, int L, int m, int n) { return Math.logFactorial(k) + Math.logFactorial(m - k) + Math.logFactorial(n - k) + Math.logFactorial(L + k); } } class Inversion extends RandomNumberGenerator { private int mode, mp; // Mode, mode+1 private int bound; // Safety upper bound private double fm; // Value at mode /** * Initialize random number generator. */ Inversion(int N, int m, int n) { super(N, m, n); int L = N - m - n; // Parameter double Mp = m + 1; double np = n + 1; double p = Mp / (N + 2.); double modef = np * p; // mode, real mode = (int) modef; // mode, integer if (mode == modef && p == 0.5) { mp = mode--; } else { mp = mode + 1; } // mode probability, using log factorial function // (may read directly from fac_table if N < FAK_LEN) fm = Math.exp(Math.logFactorial(N - m) - Math.logFactorial(L + mode) - Math.logFactorial(n - mode) + Math.logFactorial(m) - Math.logFactorial(m - mode) - Math.logFactorial(mode) - Math.logFactorial(N) + Math.logFactorial(N - n) + Math.logFactorial(n)); // safety bound - guarantees at least 17 significant decimal digits // bound = min(nn, (int)(modef + k*c')) bound = (int) (modef + 11. * Math.sqrt(modef * (1. - p) * (1. - n / (double) N) + 1.)); if (bound > n) { bound = n; } } /** * Hypergeometric distribution by inversion method, using down-up * search starting at the mode using the chop-down technique. * <p> * Assumes 0 &le; n &le; m &le; N/2. * Overflow protection is needed when N > 680 or n > 75. * <p> * This method is faster than the rejection method when the variance is low. */ @Override protected int random() { // Sampling int I; // Loop counter int L = N - m - n; // Parameter double Mp, np; // mm + 1, nn + 1 double U; // uniform random double c, d; // factors in iteration double divisor; // divisor, eliminated by scaling double k1, k2; // float version of loop counter double L1 = L; // float version of L Mp = (double) (m + 1); np = (double) (n + 1); // loop until accepted while (true) { U = Math.random(); // uniform random number to be converted // start chop-down search at mode if ((U -= fm) <= 0.) { return (mode); } c = d = fm; // alternating down- and upward search from the mode k1 = mp - 1; k2 = mode + 1; for (I = 1; I <= mode; I++, k1--, k2++) { // Downward search from k1 = hyp_mp - 1 divisor = (np - k1) * (Mp - k1); // Instead of dividing c with divisor, we multiply U and d because // multiplication is faster. This will give overflow if N > 800 U *= divisor; d *= divisor; c *= k1 * (L1 + k1); if ((U -= c) <= 0.) { return (mp - I - 1); // = k1 - 1 } // Upward search from k2 = hyp_mode + 1 divisor = k2 * (L1 + k2); // re-scale parameters to avoid time-consuming division U *= divisor; c *= divisor; d *= (np - k2) * (Mp - k2); if ((U -= d) <= 0.) { return (mode + I); // = k2 } // Values of nn > 75 or N > 680 may give overflow if you leave out this.. // overflow protection // if (U > 1.E100) {U *= 1.E-100; c *= 1.E-100; d *= 1.E-100;} } // Upward search from k2 = 2*mode + 1 to bound for (k2 = I = mp + mode; I <= bound; I++, k2++) { divisor = k2 * (L1 + k2); U *= divisor; d *= (np - k2) * (Mp - k2); if ((U -= d) <= 0.) { return (I); } // more overflow protection // if (U > 1.E100) {U *= 1.E-100; d *= 1.E-100;} } } } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver10; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFActionSetNwSrcVer10 implements OFActionSetNwSrc { private static final Logger logger = LoggerFactory.getLogger(OFActionSetNwSrcVer10.class); // version: 1.0 final static byte WIRE_VERSION = 1; final static int LENGTH = 8; private final static IPv4Address DEFAULT_NW_ADDR = IPv4Address.NONE; // OF message fields private final IPv4Address nwAddr; // // Immutable default instance final static OFActionSetNwSrcVer10 DEFAULT = new OFActionSetNwSrcVer10( DEFAULT_NW_ADDR ); // package private constructor - used by readers, builders, and factory OFActionSetNwSrcVer10(IPv4Address nwAddr) { if(nwAddr == null) { throw new NullPointerException("OFActionSetNwSrcVer10: property nwAddr cannot be null"); } this.nwAddr = nwAddr; } // Accessors for OF message fields @Override public OFActionType getType() { return OFActionType.SET_NW_SRC; } @Override public IPv4Address getNwAddr() { return nwAddr; } @Override public OFVersion getVersion() { return OFVersion.OF_10; } public OFActionSetNwSrc.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFActionSetNwSrc.Builder { final OFActionSetNwSrcVer10 parentMessage; // OF message fields private boolean nwAddrSet; private IPv4Address nwAddr; BuilderWithParent(OFActionSetNwSrcVer10 parentMessage) { this.parentMessage = parentMessage; } @Override public OFActionType getType() { return OFActionType.SET_NW_SRC; } @Override public IPv4Address getNwAddr() { return nwAddr; } @Override public OFActionSetNwSrc.Builder setNwAddr(IPv4Address nwAddr) { this.nwAddr = nwAddr; this.nwAddrSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_10; } @Override public OFActionSetNwSrc build() { IPv4Address nwAddr = this.nwAddrSet ? this.nwAddr : parentMessage.nwAddr; if(nwAddr == null) throw new NullPointerException("Property nwAddr must not be null"); // return new OFActionSetNwSrcVer10( nwAddr ); } } static class Builder implements OFActionSetNwSrc.Builder { // OF message fields private boolean nwAddrSet; private IPv4Address nwAddr; @Override public OFActionType getType() { return OFActionType.SET_NW_SRC; } @Override public IPv4Address getNwAddr() { return nwAddr; } @Override public OFActionSetNwSrc.Builder setNwAddr(IPv4Address nwAddr) { this.nwAddr = nwAddr; this.nwAddrSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_10; } // @Override public OFActionSetNwSrc build() { IPv4Address nwAddr = this.nwAddrSet ? this.nwAddr : DEFAULT_NW_ADDR; if(nwAddr == null) throw new NullPointerException("Property nwAddr must not be null"); return new OFActionSetNwSrcVer10( nwAddr ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFActionSetNwSrc> { @Override public OFActionSetNwSrc readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 6 short type = bb.readShort(); if(type != (short) 0x6) throw new OFParseError("Wrong type: Expected=OFActionType.SET_NW_SRC(6), got="+type); int length = U16.f(bb.readShort()); if(length != 8) throw new OFParseError("Wrong length: Expected=8(8), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); IPv4Address nwAddr = IPv4Address.read4Bytes(bb); OFActionSetNwSrcVer10 actionSetNwSrcVer10 = new OFActionSetNwSrcVer10( nwAddr ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", actionSetNwSrcVer10); return actionSetNwSrcVer10; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFActionSetNwSrcVer10Funnel FUNNEL = new OFActionSetNwSrcVer10Funnel(); static class OFActionSetNwSrcVer10Funnel implements Funnel<OFActionSetNwSrcVer10> { private static final long serialVersionUID = 1L; @Override public void funnel(OFActionSetNwSrcVer10 message, PrimitiveSink sink) { // fixed value property type = 6 sink.putShort((short) 0x6); // fixed value property length = 8 sink.putShort((short) 0x8); message.nwAddr.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFActionSetNwSrcVer10> { @Override public void write(ByteBuf bb, OFActionSetNwSrcVer10 message) { // fixed value property type = 6 bb.writeShort((short) 0x6); // fixed value property length = 8 bb.writeShort((short) 0x8); message.nwAddr.write4Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFActionSetNwSrcVer10("); b.append("nwAddr=").append(nwAddr); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFActionSetNwSrcVer10 other = (OFActionSetNwSrcVer10) obj; if (nwAddr == null) { if (other.nwAddr != null) return false; } else if (!nwAddr.equals(other.nwAddr)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((nwAddr == null) ? 0 : nwAddr.hashCode()); return result; } }