gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package com.box.l10n.mojito.boxsdk; import com.box.sdk.BoxAPIConnection; import com.box.sdk.BoxAPIException; import com.box.sdk.BoxFile; import com.box.sdk.BoxFolder; import com.box.sdk.BoxItem; import com.box.sdk.BoxSharedLink; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import org.apache.commons.io.IOUtils; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Component; import java.io.ByteArrayOutputStream; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; /** * @author jaurambault */ @Component public class BoxSDKService { /** * logger */ static Logger logger = LoggerFactory.getLogger(BoxSDKService.class); @Autowired BoxAPIConnectionProvider boxAPIConnectionProvider; @Autowired BoxSDKServiceConfigProvider boxSDKServiceConfigProvider; public BoxSDKServiceConfig getBoxSDKServiceConfig() throws BoxSDKServiceException { return boxSDKServiceConfigProvider.getConfig(); } public BoxAPIConnection getBoxAPIConnection() throws BoxSDKServiceException { return boxAPIConnectionProvider.getConnection(); } /** * Returns the Box root folder * * @return The root folder for the current profile (or null if not found) * @throws BoxSDKServiceException When an error occurred while retrieving * the folder */ public BoxFolder getRootFolder() throws BoxSDKServiceException { try { return new BoxFolder(getBoxAPIConnection(), getBoxSDKServiceConfig().getRootFolderId()); } catch (BoxAPIException e) { throw new BoxSDKServiceException("Can't retrieve the root folder", e); } } /** * Creates a shared folder inside the drop folder * * @param folderName The name of the shared folder to create * @return The created shared folder * @throws BoxSDKServiceException When an error occurred while creating the * folder */ public BoxFolder createSharedFolder(String folderName) throws BoxSDKServiceException { return createSharedFolder(folderName, getBoxSDKServiceConfig().getRootFolderId()); } /** * Creates a shared folder in the given parent folder * * @param folderName The name of the shared folder to create * @param parentId The ID of the parent folder where the folder should be * created * @return The created shared folder * @throws BoxSDKServiceException When an error occurred while creating the * folder */ public BoxFolder createSharedFolder(String folderName, String parentId) throws BoxSDKServiceException { BoxFolder createFolder = createFolder(folderName, parentId); try { createFolder.createSharedLink(BoxSharedLink.Access.OPEN, null, null); return createFolder; } catch (BoxAPIException e) { throw new BoxSDKServiceException("Can't create shared link for directory: " + createFolder.getID(), e); } } /** * Creates a folder inside the drop folder * * @param folderName The name of the folder to create * @return The created folder * @throws BoxSDKServiceException When an error occurred while creating the * folder */ public BoxFolder createFolderUnderRoot(String folderName) throws BoxSDKServiceException { return createFolder(folderName, getBoxSDKServiceConfig().getRootFolderId()); } /** * Creates a folder inside the given parent folder * * @param folderName The name of the folder to create * @param parentId The ID of the parent folder where the folder should be * created * @return The created folder * @throws BoxSDKServiceException When an error occurred while creating the * folder */ public BoxFolder createFolder(String folderName, String parentId) throws BoxSDKServiceException { try { BoxFolder parentFolder = new BoxFolder(getBoxAPIConnection(), parentId); BoxFolder.Info createFolderInfo = parentFolder.createFolder(folderName); logger.debug("created: " + createFolderInfo.getID() + ", name: " + createFolderInfo.getName()); return createFolderInfo.getResource(); } catch (BoxAPIException e) { throw new BoxSDKServiceException("Can't create folder: " + folderName, e); } } /** * Uploads a file to the given folder. * <p> * Creates new file if needed or update existing file. * * @param folderId id of the folder where the file should be uploaded * @param filename The name of the file to be uploaded * @param filecontent The content of the file to be uploaded * @return The uploaded file * @throws BoxSDKServiceException When an error occurred while uploading the * file */ public BoxFile uploadFile(String folderId, String filename, String filecontent) throws BoxSDKServiceException { try { BoxFile uploadFile = getFileByName(folderId, filename); if (uploadFile == null) { logger.debug("Upload a new file named: {} to folder: {}", filename, folderId); BoxFolder boxFolder = new BoxFolder(getBoxAPIConnection(), folderId); BoxFile.Info uploadFileInfo = boxFolder.uploadFile(IOUtils.toInputStream(filecontent, StandardCharsets.UTF_8), filename); uploadFile = uploadFileInfo.getResource(); logger.debug("Uploaded new file, id: " + uploadFile.getID() + ", name: " + filename); } else { logger.debug("Upload a new version of file named: {} to folder: {}", filename, folderId); uploadFile.uploadVersion(IOUtils.toInputStream(filecontent, StandardCharsets.UTF_8)); logger.debug("Uploaded new version of file, id: " + uploadFile.getID() + ", name: " + filename); } return uploadFile; } catch (BoxAPIException e) { String msg = "Can't upload file: " + filename + ", in folder id: " + folderId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } } /** * Gets a file by name in a folder. * * @param folderId the folder id in which to look for the file * @param filename the filename * @return the file if exists else {@link null}. * @throws BoxSDKServiceException */ private BoxFile getFileByName(String folderId, String filename) throws BoxSDKServiceException { Preconditions.checkNotNull(filename, "filename must not be null"); BoxFile boxFile = null; for (BoxFile listFile : listFiles(folderId)) { if (filename.equals(listFile.getInfo().getName())) { boxFile = listFile; break; } } return boxFile; } /** * Lists the files in a folder. * * @param folderId The folder id * @return A list of files (empty list if no files) * @throws BoxSDKServiceException When an error occurred while listing the * files */ public List<BoxFile> listFiles(String folderId) throws BoxSDKServiceException { try { BoxFolder folder = new BoxFolder(getBoxAPIConnection(), folderId); List<BoxFile> files = new ArrayList<>(); for (BoxItem.Info itemInfo : folder) { if (itemInfo instanceof BoxFile.Info) { BoxFile.Info fileInfo = (BoxFile.Info) itemInfo; files.add(fileInfo.getResource()); } } return files; } catch (BoxAPIException e) { String msg = "Can't list files in folder, id: " + folderId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } } /** * Gets the file content of a {@link BoxFile}. * * @param file The file to be read * @return The file with its content * @throws BoxSDKServiceException When an error occurred while getting the * file content */ public BoxFileWithContent getFileContent(BoxFile file) throws BoxSDKServiceException { BoxFileWithContent boxFileWithContent = new BoxFileWithContent(); boxFileWithContent.setBoxFile(file); boxFileWithContent.setContent(getFileContent(file.getID())); return boxFileWithContent; } /** * Gets the file content of a file * * @param fileId id of the file to be read * @return The file content * @throws BoxSDKServiceException */ public String getFileContent(String fileId) throws BoxSDKServiceException { try { BoxFile boxFile = new BoxFile(getBoxAPIConnection(), fileId); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); boxFile.download(byteArrayOutputStream); return byteArrayOutputStream.toString(StandardCharsets.UTF_8.toString()); } catch (BoxAPIException e) { String msg = "Can't get file content, file id: " + fileId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } /** * Returns the HTML code to create an Embed Widget pointing to the given * folder * * @param boxFolder The folder that will be used for the Embed Widget * @return The HTML code to add on a page that will display the widget */ public String getBoxEmbedWidgetIFrame(BoxFolder boxFolder) { String url = boxFolder.getInfo().getSharedLink().getURL(); url = url.replaceFirst("/s/", "/embed_widget/s/"); return "<iframe src=\"" + url + "?view=list&sort=date&theme=blue\" width=\"500\" height=\"400\" show_parent_path=\"yes\" " + "frameborder=\"0\" allowfullscreen webkitallowfullscreen mozallowfullscreen oallowfullscreen msallowfullscreen></iframe>"; } /** * Get a folder given the folder name from the shared (drop) folder, parent * folder. This only searches one level deep. * * @param folderName The name of the folder to get (not null) * @return The searched folder (null if no folder found) * @throws BoxSDKServiceException When an error occurred while retrieving * the folder */ public BoxFolder getFolderWithName(String folderName) throws BoxSDKServiceException { return this.getFolderWithNameAndParentFolderId(folderName, getBoxSDKServiceConfig().getRootFolderId()); } /** * Get a folder given the folder name from the parent folder. This only * searches one level deep. * * @param folderName The name of the folder to get (not null) * @param parentFolderId The parent folder in which to search the folder for * @return The searched folder (null if no folder found) * @throws BoxSDKServiceException When an error occurred while retrieving * the folder */ public BoxFolder getFolderWithNameAndParentFolderId(String folderName, String parentFolderId) throws BoxSDKServiceException { if (Strings.isNullOrEmpty(folderName)) { throw new BoxSDKServiceException("A null folder name is not acceptable"); } BoxFolder folder = null; try { BoxFolder parentFolder = new BoxFolder(getBoxAPIConnection(), parentFolderId); for (BoxItem.Info itemInfo : parentFolder) { if (itemInfo instanceof BoxFolder.Info) { BoxFolder.Info folderInfo = (BoxFolder.Info) itemInfo; if (folderName.equals(folderInfo.getName())) { folder = folderInfo.getResource(); break; } } } } catch (BoxAPIException e) { String msg = "Error trying to find folder with name = " + folderName; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } return folder; } /** * Delete the given folder and all of its content. * * @param folderId id of the folder that should be deleted * @throws BoxSDKServiceException When an error occurred while deleting the * folder */ public void deleteFolderAndItsContent(String folderId) throws BoxSDKServiceException { try { BoxFolder boxFolder = new BoxFolder(getBoxAPIConnection(), folderId); boxFolder.delete(true); } catch (BoxAPIException e) { String msg = "Error trying to delete folder " + folderId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } } /** * Deletes from a folder all content that is older than a given date. * * @param folderId the folder id that contains the content to be deleted * @param olderThan an instant to check against * @throws BoxSDKServiceException */ public void deleteFolderContentOlderThan(String folderId, DateTime olderThan) throws BoxSDKServiceException { try { BoxFolder boxFolder = new BoxFolder(getBoxAPIConnection(), folderId); for (BoxItem.Info itemInfo : boxFolder) { if (itemInfo instanceof BoxFolder.Info) { BoxFolder subFolder = (BoxFolder) itemInfo.getResource(); if (olderThan.isAfter(subFolder.getInfo().getCreatedAt().getTime())) { subFolder.delete(true); } } else if (itemInfo instanceof BoxFile.Info) { BoxFile file = (BoxFile) itemInfo.getResource(); if (olderThan.isAfter(file.getInfo().getCreatedAt().getTime())) { file.delete(); } } } } catch (BoxAPIException e) { String msg = "Error trying to delete content older than: " + olderThan.toString() + " in folder: " + folderId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } } /** * Adds a comment on a file. * * @param fileId the fileId (not null) * @param comment the comment to add on the file * @throws BoxSDKServiceException When an error occurred while adding the * comment */ public void addCommentToFile(String fileId, String comment) throws BoxSDKServiceException { try { BoxFile boxFile = new BoxFile(getBoxAPIConnection(), fileId); boxFile.addComment(comment); } catch (BoxAPIException e) { if (isSimilarCommentException(e)) { logger.debug("Trying to add the same comment, do nothing"); } else { String msg = "Error trying to add a comment to file " + fileId; logger.error(msg, e); throw new BoxSDKServiceException(msg, e); } } } /** * Indicates if an exception is due to trying to add a comment that is the * same as the last comment. * * @param e the exception that contains details about the error * @return {@code true} if the exception is due to trying to add a comment * that is the same as last comment else {@code false} */ private boolean isSimilarCommentException(BoxAPIException e) { //TODO(P1) would be better to properly parse the response return e.getResponseCode() == HttpStatus.CONFLICT.value() && e.getResponse().contains("recent_similar_comment"); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.orc.writer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.airlift.slice.Slice; import io.prestosql.orc.checkpoint.BooleanStreamCheckpoint; import io.prestosql.orc.checkpoint.LongStreamCheckpoint; import io.prestosql.orc.metadata.ColumnEncoding; import io.prestosql.orc.metadata.CompressedMetadataWriter; import io.prestosql.orc.metadata.CompressionKind; import io.prestosql.orc.metadata.OrcColumnId; import io.prestosql.orc.metadata.RowGroupIndex; import io.prestosql.orc.metadata.Stream; import io.prestosql.orc.metadata.Stream.StreamKind; import io.prestosql.orc.metadata.statistics.ColumnStatistics; import io.prestosql.orc.stream.LongOutputStream; import io.prestosql.orc.stream.PresentOutputStream; import io.prestosql.orc.stream.StreamDataOutput; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.ColumnarMap; import org.openjdk.jol.info.ClassLayout; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static io.prestosql.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT_V2; import static io.prestosql.orc.metadata.CompressionKind.NONE; import static io.prestosql.orc.stream.LongOutputStream.createLengthOutputStream; import static io.prestosql.spi.block.ColumnarMap.toColumnarMap; import static java.util.Objects.requireNonNull; public class MapColumnWriter implements ColumnWriter { private static final int INSTANCE_SIZE = ClassLayout.parseClass(MapColumnWriter.class).instanceSize(); private final OrcColumnId columnId; private final boolean compressed; private final ColumnEncoding columnEncoding; private final LongOutputStream lengthStream; private final PresentOutputStream presentStream; private final ColumnWriter keyWriter; private final ColumnWriter valueWriter; private final List<ColumnStatistics> rowGroupColumnStatistics = new ArrayList<>(); private int nonNullValueCount; private boolean closed; public MapColumnWriter(OrcColumnId columnId, CompressionKind compression, int bufferSize, ColumnWriter keyWriter, ColumnWriter valueWriter) { this.columnId = requireNonNull(columnId, "columnId is null"); this.compressed = requireNonNull(compression, "compression is null") != NONE; this.columnEncoding = new ColumnEncoding(DIRECT_V2, 0); this.keyWriter = requireNonNull(keyWriter, "keyWriter is null"); this.valueWriter = requireNonNull(valueWriter, "valueWriter is null"); this.lengthStream = createLengthOutputStream(compression, bufferSize); this.presentStream = new PresentOutputStream(compression, bufferSize); } @Override public List<ColumnWriter> getNestedColumnWriters() { return ImmutableList.<ColumnWriter>builder() .add(keyWriter) .addAll(keyWriter.getNestedColumnWriters()) .add(valueWriter) .addAll(valueWriter.getNestedColumnWriters()) .build(); } @Override public Map<OrcColumnId, ColumnEncoding> getColumnEncodings() { ImmutableMap.Builder<OrcColumnId, ColumnEncoding> encodings = ImmutableMap.builder(); encodings.put(columnId, columnEncoding); encodings.putAll(keyWriter.getColumnEncodings()); encodings.putAll(valueWriter.getColumnEncodings()); return encodings.build(); } @Override public void beginRowGroup() { lengthStream.recordCheckpoint(); presentStream.recordCheckpoint(); keyWriter.beginRowGroup(); valueWriter.beginRowGroup(); } @Override public void writeBlock(Block block) { checkState(!closed); checkArgument(block.getPositionCount() > 0, "Block is empty"); ColumnarMap columnarMap = toColumnarMap(block); writeColumnarMap(columnarMap); } private void writeColumnarMap(ColumnarMap columnarMap) { // write nulls and lengths for (int position = 0; position < columnarMap.getPositionCount(); position++) { boolean present = !columnarMap.isNull(position); presentStream.writeBoolean(present); if (present) { nonNullValueCount++; lengthStream.writeLong(columnarMap.getEntryCount(position)); } } // write keys and value Block keysBlock = columnarMap.getKeysBlock(); if (keysBlock.getPositionCount() > 0) { keyWriter.writeBlock(keysBlock); valueWriter.writeBlock(columnarMap.getValuesBlock()); } } @Override public Map<OrcColumnId, ColumnStatistics> finishRowGroup() { checkState(!closed); ColumnStatistics statistics = new ColumnStatistics((long) nonNullValueCount, 0, null, null, null, null, null, null, null, null, null); rowGroupColumnStatistics.add(statistics); nonNullValueCount = 0; ImmutableMap.Builder<OrcColumnId, ColumnStatistics> columnStatistics = ImmutableMap.builder(); columnStatistics.put(columnId, statistics); columnStatistics.putAll(keyWriter.finishRowGroup()); columnStatistics.putAll(valueWriter.finishRowGroup()); return columnStatistics.build(); } @Override public void close() { closed = true; keyWriter.close(); valueWriter.close(); lengthStream.close(); presentStream.close(); } @Override public Map<OrcColumnId, ColumnStatistics> getColumnStripeStatistics() { checkState(closed); ImmutableMap.Builder<OrcColumnId, ColumnStatistics> columnStatistics = ImmutableMap.builder(); columnStatistics.put(columnId, ColumnStatistics.mergeColumnStatistics(rowGroupColumnStatistics)); columnStatistics.putAll(keyWriter.getColumnStripeStatistics()); columnStatistics.putAll(valueWriter.getColumnStripeStatistics()); return columnStatistics.build(); } @Override public List<StreamDataOutput> getIndexStreams(CompressedMetadataWriter metadataWriter) throws IOException { checkState(closed); ImmutableList.Builder<RowGroupIndex> rowGroupIndexes = ImmutableList.builder(); List<LongStreamCheckpoint> lengthCheckpoints = lengthStream.getCheckpoints(); Optional<List<BooleanStreamCheckpoint>> presentCheckpoints = presentStream.getCheckpoints(); for (int i = 0; i < rowGroupColumnStatistics.size(); i++) { int groupId = i; ColumnStatistics columnStatistics = rowGroupColumnStatistics.get(groupId); LongStreamCheckpoint lengthCheckpoint = lengthCheckpoints.get(groupId); Optional<BooleanStreamCheckpoint> presentCheckpoint = presentCheckpoints.map(checkpoints -> checkpoints.get(groupId)); List<Integer> positions = createArrayColumnPositionList(compressed, lengthCheckpoint, presentCheckpoint); rowGroupIndexes.add(new RowGroupIndex(positions, columnStatistics)); } Slice slice = metadataWriter.writeRowIndexes(rowGroupIndexes.build()); Stream stream = new Stream(columnId, StreamKind.ROW_INDEX, slice.length(), false); ImmutableList.Builder<StreamDataOutput> indexStreams = ImmutableList.builder(); indexStreams.add(new StreamDataOutput(slice, stream)); indexStreams.addAll(keyWriter.getIndexStreams(metadataWriter)); indexStreams.addAll(keyWriter.getBloomFilters(metadataWriter)); indexStreams.addAll(valueWriter.getIndexStreams(metadataWriter)); indexStreams.addAll(valueWriter.getBloomFilters(metadataWriter)); return indexStreams.build(); } private static List<Integer> createArrayColumnPositionList( boolean compressed, LongStreamCheckpoint lengthCheckpoint, Optional<BooleanStreamCheckpoint> presentCheckpoint) { ImmutableList.Builder<Integer> positionList = ImmutableList.builder(); presentCheckpoint.ifPresent(booleanStreamCheckpoint -> positionList.addAll(booleanStreamCheckpoint.toPositionList(compressed))); positionList.addAll(lengthCheckpoint.toPositionList(compressed)); return positionList.build(); } @Override public List<StreamDataOutput> getBloomFilters(CompressedMetadataWriter metadataWriter) throws IOException { return ImmutableList.of(); } @Override public List<StreamDataOutput> getDataStreams() { checkState(closed); ImmutableList.Builder<StreamDataOutput> outputDataStreams = ImmutableList.builder(); presentStream.getStreamDataOutput(columnId).ifPresent(outputDataStreams::add); outputDataStreams.add(lengthStream.getStreamDataOutput(columnId)); outputDataStreams.addAll(keyWriter.getDataStreams()); outputDataStreams.addAll(valueWriter.getDataStreams()); return outputDataStreams.build(); } @Override public long getBufferedBytes() { return lengthStream.getBufferedBytes() + presentStream.getBufferedBytes() + keyWriter.getBufferedBytes() + valueWriter.getBufferedBytes(); } @Override public long getRetainedBytes() { long retainedBytes = INSTANCE_SIZE + lengthStream.getRetainedBytes() + presentStream.getRetainedBytes() + keyWriter.getRetainedBytes() + valueWriter.getRetainedBytes(); for (ColumnStatistics statistics : rowGroupColumnStatistics) { retainedBytes += statistics.getRetainedSizeInBytes(); } return retainedBytes; } @Override public void reset() { closed = false; lengthStream.reset(); presentStream.reset(); keyWriter.reset(); valueWriter.reset(); rowGroupColumnStatistics.clear(); nonNullValueCount = 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.shiro.security; import java.io.ByteArrayInputStream; import java.io.ObjectInputStream; import org.apache.camel.AsyncCallback; import org.apache.camel.CamelAuthorizationException; import org.apache.camel.CamelExchangeException; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.processor.DelegateAsyncProcessor; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; import org.apache.shiro.SecurityUtils; import org.apache.shiro.authc.AuthenticationException; import org.apache.shiro.authc.IncorrectCredentialsException; import org.apache.shiro.authc.LockedAccountException; import org.apache.shiro.authc.UnknownAccountException; import org.apache.shiro.authc.UsernamePasswordToken; import org.apache.shiro.authz.Permission; import org.apache.shiro.codec.Base64; import org.apache.shiro.subject.Subject; import org.apache.shiro.util.ByteSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@link Processor} that executes the authentication and authorization of the {@link Subject} accordingly * to the {@link ShiroSecurityPolicy}. */ public class ShiroSecurityProcessor extends DelegateAsyncProcessor { private static final Logger LOG = LoggerFactory.getLogger(ShiroSecurityProcessor.class); private final ShiroSecurityPolicy policy; public ShiroSecurityProcessor(Processor processor, ShiroSecurityPolicy policy) { super(processor); this.policy = policy; } @Override public boolean process(Exchange exchange, AsyncCallback callback) { try { applySecurityPolicy(exchange); } catch (Exception e) { // exception occurred so break out exchange.setException(e); callback.done(true); return true; } return super.process(exchange, callback); } private void applySecurityPolicy(Exchange exchange) throws Exception { ByteSource encryptedToken; // if we have username and password as headers then use them to create a token String username = exchange.getIn().getHeader(ShiroSecurityConstants.SHIRO_SECURITY_USERNAME, String.class); String password = exchange.getIn().getHeader(ShiroSecurityConstants.SHIRO_SECURITY_PASSWORD, String.class); if (username != null && password != null) { ShiroSecurityToken token = new ShiroSecurityToken(username, password); // store the token as header, either as base64 or as the object as-is if (policy.isBase64()) { ByteSource bytes = ShiroSecurityHelper.encrypt(token, policy.getPassPhrase(), policy.getCipherService()); String base64 = bytes.toBase64(); exchange.getIn().setHeader(ShiroSecurityConstants.SHIRO_SECURITY_TOKEN, base64); } else { exchange.getIn().setHeader(ShiroSecurityConstants.SHIRO_SECURITY_TOKEN, token); } // and now remove the headers as we turned those into the token instead exchange.getIn().removeHeader(ShiroSecurityConstants.SHIRO_SECURITY_USERNAME); exchange.getIn().removeHeader(ShiroSecurityConstants.SHIRO_SECURITY_PASSWORD); } Object token = ExchangeHelper.getMandatoryHeader(exchange, ShiroSecurityConstants.SHIRO_SECURITY_TOKEN, Object.class); // we support the token in a number of ways if (token instanceof ShiroSecurityToken) { ShiroSecurityToken sst = (ShiroSecurityToken) token; encryptedToken = ShiroSecurityHelper.encrypt(sst, policy.getPassPhrase(), policy.getCipherService()); // Remove unencrypted token + replace with an encrypted token exchange.getIn().removeHeader(ShiroSecurityConstants.SHIRO_SECURITY_TOKEN); exchange.getIn().setHeader(ShiroSecurityConstants.SHIRO_SECURITY_TOKEN, encryptedToken); } else if (token instanceof String) { String data = (String) token; if (policy.isBase64()) { byte[] bytes = Base64.decode(data); encryptedToken = ByteSource.Util.bytes(bytes); } else { encryptedToken = ByteSource.Util.bytes(data); } } else if (token instanceof ByteSource) { encryptedToken = (ByteSource) token; } else { throw new CamelExchangeException("Shiro security header " + ShiroSecurityConstants.SHIRO_SECURITY_TOKEN + " is unsupported type: " + ObjectHelper.classCanonicalName(token), exchange); } ByteSource decryptedToken = policy.getCipherService().decrypt(encryptedToken.getBytes(), policy.getPassPhrase()); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(decryptedToken.getBytes()); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); ShiroSecurityToken securityToken; try { securityToken = (ShiroSecurityToken)objectInputStream.readObject(); } finally { IOHelper.close(objectInputStream, byteArrayInputStream); } Subject currentUser = SecurityUtils.getSubject(); // Authenticate user if not authenticated try { authenticateUser(currentUser, securityToken); // Test whether user's role is authorized to perform functions in the permissions list authorizeUser(currentUser, exchange); } finally { if (policy.isAlwaysReauthenticate()) { currentUser.logout(); } } } private void authenticateUser(Subject currentUser, ShiroSecurityToken securityToken) { boolean authenticated = currentUser.isAuthenticated(); boolean sameUser = securityToken.getUsername().equals(currentUser.getPrincipal()); LOG.trace("Authenticated: {}, same Username: {}", authenticated, sameUser); if (!authenticated || !sameUser) { UsernamePasswordToken token = new UsernamePasswordToken(securityToken.getUsername(), securityToken.getPassword()); if (policy.isAlwaysReauthenticate()) { token.setRememberMe(false); } else { token.setRememberMe(true); } try { currentUser.login(token); LOG.debug("Current user {} successfully authenticated", currentUser.getPrincipal()); } catch (UnknownAccountException uae) { throw new UnknownAccountException("Authentication Failed. There is no user with username of " + token.getPrincipal(), uae.getCause()); } catch (IncorrectCredentialsException ice) { throw new IncorrectCredentialsException("Authentication Failed. Password for account " + token.getPrincipal() + " was incorrect!", ice.getCause()); } catch (LockedAccountException lae) { throw new LockedAccountException("Authentication Failed. The account for username " + token.getPrincipal() + " is locked." + " Please contact your administrator to unlock it.", lae.getCause()); } catch (AuthenticationException ae) { throw new AuthenticationException("Authentication Failed.", ae.getCause()); } } } private void authorizeUser(Subject currentUser, Exchange exchange) throws CamelAuthorizationException { boolean authorized = false; if (!policy.getPermissionsList().isEmpty()) { if (policy.isAllPermissionsRequired()) { authorized = currentUser.isPermittedAll(policy.getPermissionsList()); } else { for (Permission permission : policy.getPermissionsList()) { if (currentUser.isPermitted(permission)) { authorized = true; break; } } } } else if (!policy.getRolesList().isEmpty()) { if (policy.isAllRolesRequired()) { authorized = currentUser.hasAllRoles(policy.getRolesList()); } else { for (String role : policy.getRolesList()) { if (currentUser.hasRole(role)) { authorized = true; break; } } } } else { LOG.trace("Valid Permissions or Roles List not specified for ShiroSecurityPolicy. " + "No authorization checks will be performed for current user."); authorized = true; } if (!authorized) { throw new CamelAuthorizationException("Authorization Failed. Subject's role set does " + "not have the necessary roles or permissions to perform further processing.", exchange); } LOG.debug("Current user {} is successfully authorized.", currentUser.getPrincipal()); } }
package io.bxbxbai.zhuanlan.utils; import android.content.Context; import android.os.AsyncTask; import com.google.gson.Gson; /** * The main reservoir class. * @author anupcowkur */ public class Reservoir { public static final int DEFAULT_DIRECTOR_SIZE = 20 * 1024 *1024; private static SimpleDiskCache cache; /** * Initialize Reservoir * * @param context context. * @param maxSize the maximum size in bytes. */ public static synchronized void init(Context context, long maxSize) { try { cache = SimpleDiskCache.open(context.getFilesDir(), 1, maxSize); } catch (Exception e) { e.printStackTrace(); } } /** * Check if an object with the given key exists in the Reservoir. * * @param key the key string. * @return true if object with given key exists. */ public static boolean contains(String key) throws Exception { if (cache == null) { throw new IllegalStateException("You must call Reservoir.init(context, maxSize) first!"); } return cache.contains(key); } /** * Put an object into Reservoir with the given key. This a blocking IO operation. Previously * stored object with the same * key (if any) will be overwritten. * * @param key the key string. * @param object the object to be stored. */ public static void put(String key, Object object) throws Exception { String json = new Gson().toJson(object); cache.put(key, json); } /** * Put an object into Reservoir with the given key asynchronously. Previously * stored object with the same * key (if any) will be overwritten. * * @param key the key string. * @param object the object to be stored. * @param callback a callback of type {@link PutCallback} * which is called upon completion. */ public static void putAsync(String key, Object object, PutCallback callback) { new PutTask(key, object, callback).execute(); } /** * Get an object from Reservoir with the given key. This a blocking IO operation. * * @param key the key string. * @param classOfT the Class type of the expected return object. * @return the object of the given type if it exists. */ public static <T> T get(String key, Class<T> classOfT) throws Exception { String json = cache.getString(key).getString(); T value = new Gson().fromJson(json, classOfT); if (value == null) throw new NullPointerException(); return value; } /** * Get an object from Reservoir with the given key asynchronously. * * @param key the key string. * @param callback a callback of type {@link GetCallback} * which is called upon completion. */ public static <T> void getAsync(String key, Class<T> classOfT, GetCallback<T> callback) { new GetTask<T>(key, classOfT, callback).execute(); } /** * Delete an object from Reservoir with the given key. This a blocking IO operation. Previously * stored object with the same * key (if any) will be deleted. * * @param key the key string. */ public static void delete(String key) throws Exception { cache.delete(key); } /** * Delete an object into Reservoir with the given key asynchronously. Previously * stored object with the same * key (if any) will be deleted. * * @param key the key string. * @param callback a callback of type {@link DeleteCallback} * which is called upon completion. */ public static void deleteAsync(String key, DeleteCallback callback) { new DeleteTask(key, callback).execute(); } /** * AsyncTask to perform put operation in a background thread. */ private static class PutTask extends AsyncTask<Void, Void, Void> { private final String key; private Exception e; private final PutCallback callback; final Object object; private PutTask(String key, Object object, PutCallback callback) { this.key = key; this.callback = callback; this.object = object; this.e = null; } @Override protected Void doInBackground(Void... params) { try { String json = new Gson().toJson(object); cache.put(key, json); } catch (Exception e) { this.e = e; } return null; } @Override protected void onPostExecute(Void aVoid) { if (callback != null) { if (e == null) { callback.onSuccess(); } else { callback.onFailure(e); } } } } /** * AsyncTask to perform get operation in a background thread. */ private static class GetTask<T> extends AsyncTask<Void, Void, T> { private final String key; private final GetCallback callback; private final Class<T> classOfT; private Exception e; private GetTask(String key, Class<T> classOfT, GetCallback callback) { this.key = key; this.callback = callback; this.classOfT = classOfT; this.e = null; } @Override protected T doInBackground(Void... params) { try { String json = cache.getString(key).getString(); T value = new Gson().fromJson(json, classOfT); if (value == null) throw new NullPointerException(); return value; } catch (Exception e) { this.e = e; return null; } } @Override protected void onPostExecute(T object) { if (callback != null) { if (e == null) { callback.onSuccess(object); } else { callback.onFailure(e); } } } } /** * AsyncTask to perform delete operation in a background thread. */ private static class DeleteTask extends AsyncTask<Void, Void, Void> { private final String key; private Exception e; private final DeleteCallback callback; private DeleteTask(String key, DeleteCallback callback) { this.key = key; this.callback = callback; this.e = null; } @Override protected Void doInBackground(Void... params) { try { cache.delete(key); } catch (Exception e) { this.e = e; } return null; } @Override protected void onPostExecute(Void aVoid) { if (callback != null) { if (e == null) { callback.onSuccess(); } else { callback.onFailure(e); } } } } public static class DeleteCallback { public void onSuccess() {} public void onFailure(Exception e) {} } public static class GetCallback<T> { public void onSuccess(T object) {} public void onFailure(Exception e) {} } public static class PutCallback { public void onSuccess() {} public void onFailure(Exception e) {} } }
/** * The MIT License (MIT) * <p> * Copyright (c) 2015-2017 the original author or authors. * <p> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p> * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * <p> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.bernardomg.example.swss.test.util.factory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.security.KeyStore; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.TimeZone; import javax.xml.soap.MessageFactory; import javax.xml.soap.MimeHeaders; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPEnvelope; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPHeader; import javax.xml.soap.SOAPHeaderElement; import javax.xml.soap.SOAPMessage; import javax.xml.soap.SOAPPart; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.digest.DigestUtils; import org.apache.tools.ant.util.Base64Converter; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.signature.XMLSignature; import org.apache.xml.security.transforms.Transforms; import org.apache.xml.security.utils.Constants; import org.apache.xml.security.utils.XMLUtils; import org.springframework.core.io.ClassPathResource; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import com.bernardomg.example.swss.test.util.SoapMessageUtils; import freemarker.template.Configuration; import freemarker.template.Template; /** * Factory method for secured SOAP messages. * <p> * These can be contained in a {@code SOAPMessage}, or be the input of an * {@code InputStream}. In the second case the {@code InputStream} will point to * a string containing the full message. * * @author Bernardo Mart&iacute;nez Garrido */ public final class SecureSoapMessages { /** * Creates a SOAP message with a digested password, username and nonce. * <p> * The nonce will be generated during the securing process. * <p> * A freemarker template should be provided, it will be used to generate the * final message from the received parameters. * * @param path * path to the freemarker template * @param user * user to include in the message * @param password * password to include in the message * @return a digested password {@code SOAPMessage} * @return a SOAP message with a digested password, username and nonce * @throws Exception * if any error occurs during the message creation */ public static final SOAPMessage getDigestedPasswordMessage( final String path, final String user, final String password) throws Exception { return MessageFactory.newInstance().createMessage(new MimeHeaders(), getDigestedPasswordStream(path, user, password)); } /** * Creates a SOAP message with a digested password, username and nonce. * <p> * The nonce will be generated during the securing process. * <p> * A freemarker template should be provided, it will be used to generate the * final message from the received parameters. * * @param path * path to the freemarker template * @param user * user to include in the message * @param password * password to include in the message * @return a SOAP message with a digested password, username and nonce * @throws Exception * if any error occurs during the message creation */ public static final InputStream getDigestedPasswordStream(final String path, final String user, final String password) throws Exception { return new ByteArrayInputStream( getDigestedPasswordMessageContent(path, user, password) .getBytes("UTF-8")); } /** * Creates a SOAP message with a plain password and username. * <p> * A freemarker template should be provided, it will be used to generate the * final message from the received parameters. * * @param path * path to the freemarker template * @param user * user to include in the message * @param password * password to include in the message * @return a SOAP message with a plain password and username * @throws Exception * if any error occurs during the message creation */ public static final SOAPMessage getPlainPasswordMessage(final String path, final String user, final String password) throws Exception { return MessageFactory.newInstance().createMessage(new MimeHeaders(), getPlainPasswordStream(path, user, password)); } /** * Creates a SOAP message with a plain password and username. * <p> * A freemarker template should be provided, it will be used to generate the * final message from the received parameters. * * @param path * path to the freemarker template * @param user * user to include in the message * @param password * password to include in the message * @return a SOAP message with a plain password and username * @throws Exception * if any error occurs during the message creation */ public static final InputStream getPlainPasswordStream(final String path, final String user, final String password) throws Exception { return new ByteArrayInputStream( getPlainPasswordMessageContent(path, user, password) .getBytes("UTF-8")); } /** * Creates a SOAP message with a signature. * <p> * A valid SOAP message is required, this will be the message to be signed. * * @param pathBase * path to the SOAP message to sign * @param privateKeyAlias * alias for the private key * @param privateKeyPass * password for the private key * @param certificateAlias * alias for the certificate * @param keystore * key store for the signing * @return a singed SOAP message * @throws Exception * if any error occurs during the message creation */ public static final SOAPMessage getSignedMessage(final String pathBase, final String privateKeyAlias, final String privateKeyPass, final String certificateAlias, final KeyStore keystore) throws Exception { Element root = null; final String BaseURI = new ClassPathResource(pathBase).getURI() .toString(); SOAPMessage soapMessage; final Base64Converter base64 = new Base64Converter(); String token; Node binaryToken; X509Certificate cert; PrivateKey privateKey; XMLSignature sig; soapMessage = getMessageToSign(pathBase); // get the private key used to sign, from the keystore privateKey = (PrivateKey) keystore.getKey(privateKeyAlias, privateKeyPass.toCharArray()); cert = (X509Certificate) keystore.getCertificate(certificateAlias); // create basic structure of signature final Document doc = toDocument(soapMessage); org.apache.xml.security.Init.init(); sig = getSignature(doc, BaseURI, cert, privateKey); // optional, but better root = doc.getDocumentElement(); root.normalize(); root.getElementsByTagName("wsse:Security").item(0) .appendChild(sig.getElement()); token = base64.encode(cert.getEncoded()); binaryToken = root.getElementsByTagName("wsse:BinarySecurityToken") .item(0); binaryToken.setTextContent(token); // write signature to file XMLUtils.outputDOMc14nWithComments(doc, System.out); return toMessage(doc); } /** * Creates a SOAP message with a signature. * <p> * A valid SOAP message is required, this will be the message to be signed. * * @param pathBase * path to the SOAP message to sign * @param privateKeyAlias * alias for the private key * @param privateKeyPass * password for the private key * @param certificateAlias * alias for the certificate * @param keystore * key store for the signing * @return a singed SOAP message * @throws Exception * if any error occurs during the message creation */ public static final InputStream getSignedStream(final String pathBase, final String privateKeyAlias, final String privateKeyPass, final String certificateAlias, final KeyStore keystore) throws Exception { final SOAPMessage msg = SecureSoapMessages.getSignedMessage(pathBase, privateKeyAlias, privateKeyPass, certificateAlias, keystore); final ByteArrayOutputStream out = new ByteArrayOutputStream(); msg.writeTo(out); final String strMsg = new String(out.toByteArray()); return new ByteArrayInputStream(strMsg.getBytes()); } /** * Generates the digest value for the SOAP secure header. * <p> * This is a codified password, with the help of the date and nonce values. * Both of these values should be found on the SOAP secure header. * * @param password * password to digest * @param date * date used on the SOAP header * @param nonce * nonce used on the SOAP header * @return the digested password * @throws UnsupportedEncodingException * if the UTF-8 encoding is not supported */ private static final String generateDigest(final String password, final String date, final String nonce) throws UnsupportedEncodingException { final ByteBuffer buf; // Buffers storing the data to digest byte[] toHash; // Bytes to generate the hash // Fills buffer with data to digest buf = ByteBuffer.allocate(1000); buf.put(Base64.decodeBase64(nonce)); buf.put(date.getBytes("UTF-8")); buf.put(password.getBytes("UTF-8")); // Initializes hash bytes to the correct size toHash = new byte[buf.position()]; buf.rewind(); // Copies bytes from the buffer to the hash bytes buf.get(toHash); return Base64.encodeBase64String(DigestUtils.sha1(toHash)); } /** * Generates the current date in the format expected by the SOAP message. * * @return the current date */ private static final String getCurrentDate() { final DateFormat format; // Format to apply // Zulu time format format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); format.setTimeZone(TimeZone.getTimeZone("UTC")); return format.format(new Date()); } /** * Generates the text content for the digested password SOAP message. * <p> * This will be created from a freemarker template. * * @param path * path to the freemarker template * @param user * username to use * @param password * password to use * @return the text content for the digested password message * @throws Exception * if any error occurs during the message creation */ private static final String getDigestedPasswordMessageContent( final String path, final String user, final String password) throws Exception { final String nonce; // Nonce for the message final String date; // Current date final String digest; // Digested password final Template template; // Freemarker template final Map<String, Object> data; // Data for the template final ByteArrayOutputStream out; // Steam with the message // Generates security data nonce = getNonce(); date = getCurrentDate(); digest = generateDigest(password, date, nonce); // Prepares the data for the template data = new HashMap<String, Object>(); data.put("user", user); data.put("password", password); data.put("nonce", nonce); data.put("date", date); data.put("digest", digest); // Processes the template to the output out = new ByteArrayOutputStream(); template = new Configuration(Configuration.VERSION_2_3_0) .getTemplate(path); template.process(data, new OutputStreamWriter(out)); return new String(out.toByteArray()); } private static final SOAPMessage getMessageToSign(final String pathBase) throws SOAPException, IOException { final SOAPMessage soapMessage; final SOAPPart soapPart; final SOAPEnvelope soapEnvelope; final SOAPHeader soapHeader; final SOAPHeaderElement secElement; final SOAPElement binaryTokenElement; soapMessage = SoapMessageUtils.getMessage(pathBase); soapPart = soapMessage.getSOAPPart(); soapEnvelope = soapPart.getEnvelope(); soapHeader = soapEnvelope.getHeader(); secElement = soapHeader.addHeaderElement(soapEnvelope.createName( "Security", "wsse", "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd")); binaryTokenElement = secElement.addChildElement(soapEnvelope.createName( "BinarySecurityToken", "wsse", "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd")); binaryTokenElement.setAttribute("EncodingType", "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary"); binaryTokenElement.setAttribute("ValueType", "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3"); return soapMessage; } /** * Generates a nonce value for the SOAP secure header. * * @return the nonce value * @throws Exception * if any error occurs while generating the nonce */ private static final String getNonce() throws Exception { final SecureRandom random; // Random value generator final byte[] nonceBytes; // Bytes to generate the nonce random = SecureRandom.getInstance("SHA1PRNG"); random.setSeed(System.currentTimeMillis()); nonceBytes = new byte[16]; random.nextBytes(nonceBytes); return new String(Base64.encodeBase64(nonceBytes), "UTF-8"); } /** * Generates the text content for the plain password SOAP message. * <p> * This will be created from a freemarker template. * * @param path * path to the freemarker template * @param user * username to use * @param password * password to use * @return the text content for the plain passworde message * @throws Exception * if any error occurs during the message creation */ private static final String getPlainPasswordMessageContent( final String path, final String user, final String password) throws Exception { final Template template; // Freemarker template final Map<String, Object> data; // Data for the template final ByteArrayOutputStream out; // Steam with the message // Prepares the data for the template data = new HashMap<String, Object>(); data.put("user", user); data.put("password", password); // Processes the template to the output out = new ByteArrayOutputStream(); template = new Configuration(Configuration.VERSION_2_3_0) .getTemplate(path); template.process(data, new OutputStreamWriter(out)); return new String(out.toByteArray()); } private static final XMLSignature getSignature(final Document doc, final String BaseURI, final X509Certificate cert, final PrivateKey privateKey) throws XMLSecurityException { final XMLSignature sig; sig = new XMLSignature(doc, BaseURI, XMLSignature.ALGO_ID_SIGNATURE_RSA_SHA1); final Transforms transforms = new Transforms(doc); transforms.addTransform(Transforms.TRANSFORM_C14N_OMIT_COMMENTS); // Sign the content of SOAP Envelope sig.addDocument("", transforms, Constants.ALGO_ID_DIGEST_SHA1); sig.addKeyInfo(cert); sig.addKeyInfo(cert.getPublicKey()); sig.sign(privateKey); return sig; } private static final Document toDocument(final SOAPMessage soapMsg) throws TransformerConfigurationException, TransformerException, SOAPException, IOException { final Source src = soapMsg.getSOAPPart().getContent(); final TransformerFactory tf = TransformerFactory.newInstance(); final Transformer transformer = tf.newTransformer(); final DOMResult result = new DOMResult(); transformer.transform(src, result); return (Document) result.getNode(); } private static final SOAPMessage toMessage(final Document jdomDocument) throws IOException, SOAPException { final SOAPMessage message = MessageFactory.newInstance() .createMessage(); final SOAPPart sp = message.getSOAPPart(); sp.setContent(new DOMSource(jdomDocument.getFirstChild())); return message; } /** * Private constructor to avoid initialization. */ private SecureSoapMessages() { super(); } }
package ie.ibuttimer.pmat; import ie.ibuttimer.pmat.db.AccountCurrency; import ie.ibuttimer.pmat.db.AccountType; import ie.ibuttimer.pmat.db.Bank; import ie.ibuttimer.pmat.db.DatabaseManager; import ie.ibuttimer.pmat.util.DateTimeFormat; import ie.ibuttimer.pmat.util.DeviceConfiguration; import ie.ibuttimer.pmat.util.Logger; import ie.ibuttimer.widget.AmountEditText; import ie.ibuttimer.widget.SelectDateFragment; import ie.ibuttimer.widget.TextViewAdapter; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.Currency; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import android.os.Bundle; import android.app.Activity; import android.app.DatePickerDialog; import android.app.DialogFragment; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnFocusChangeListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.Button; import android.widget.DatePicker; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.Spinner; import android.widget.TextView; import android.text.Editable; import android.text.TextWatcher; import android.content.ContentResolver; import android.content.ContentValues; import android.database.SQLException; public class AddAccountActivity extends BaseActivity implements DatePickerDialog.OnDateSetListener { // widget variables and other related variables private EditText editTextName; private EditText editTextNickname; private GregorianCalendar accountDate = null; private Spinner spinnerType; private ArrayList<AccountType> accTypes = new ArrayList<AccountType>(); private TextViewAdapter<AccountType> accTypeAdapter; private AccountType accType; private Spinner spinnerCurrency; private ArrayList<AccountCurrency> currencies = new ArrayList<AccountCurrency>(); private TextViewAdapter<AccountCurrency> currencyAdapter; private AccountCurrency currentCurrency; private Spinner spinnerBank; private ArrayList<Bank> banks = new ArrayList<Bank>(); private TextViewAdapter<Bank> bankAdapter; private Bank currentBank; private TextView textViewDate; private AmountEditText editTextInitialBal; private LinearLayout layoutCreditLimit; private AmountEditText editTextCreditLimit; private LinearLayout layoutOverdraftLimit; private AmountEditText editTextOverdraftLimit; private Button buttonSave; private Button buttonCancel; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_add_account); // Show the Up button in the action bar. setupActionBar(); // get references to the activity views editTextName = (EditText)this.findViewById(R.id.addAccount_editTextName); editTextNickname = (EditText)this.findViewById(R.id.addAccount_editTextNickname); spinnerType = (Spinner)this.findViewById(R.id.addAccount_spinnerType); spinnerCurrency = (Spinner)this.findViewById(R.id.addAccount_spinnerCurrency); spinnerBank = (Spinner)this.findViewById(R.id.addAccount_spinnerBank); textViewDate = (TextView)this.findViewById(R.id.addAccount_textViewDate); editTextInitialBal = (AmountEditText)this.findViewById(R.id.addAccount_editTextInitialBal); editTextCreditLimit = (AmountEditText)this.findViewById(R.id.addAccount_editTextCreditLimit); layoutCreditLimit = (LinearLayout)this.findViewById(R.id.addAccount_layoutCredit); editTextOverdraftLimit = (AmountEditText)this.findViewById(R.id.addAccount_editTextOverdraftLimit); layoutOverdraftLimit = (LinearLayout)this.findViewById(R.id.addAccount_layoutOverdraft); buttonSave = (Button)this.findViewById(R.id.addAccount_buttonSave); buttonCancel = (Button)this.findViewById(R.id.addAccount_buttonCancel); // setup the activity views setupLayout(); setupAccountTypeSpinner(); setupCurrencySpinner(); setupBankSpinner(); setupInitialBalanceEditText(); setupCreditLimitEditText(); setupOverdraftLimitEditText(); setupDateButton(); setupSaveButton(); setupCancelButton(); } /** * Setup the account type spinner in this activity */ private void setupAccountTypeSpinner() { accTypes = AccountType.loadAccountTypesFromProvider(getContentResolver()); accTypeAdapter = new TextViewAdapter<AccountType>(this, R.layout.text_view_adapter_item, R.id.text_view_adapter_item_textViewItem, accTypes); spinnerType.setAdapter(accTypeAdapter); spinnerType.setOnFocusChangeListener( focusChangeListener ); // create a listener to retrieve the selected currency spinnerType.setOnItemSelectedListener(new OnItemSelectedListener () { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // An item was selected, retrieve the selected item accType = (AccountType) parent.getItemAtPosition(position); // enable/disable optional fields enableOptionalFields(); } @Override public void onNothingSelected(AdapterView<?> parent) { // nop } }); // get the initial currency selection accType = (AccountType) spinnerType.getSelectedItem(); // enable/disable optional fields enableOptionalFields(); } private OnFocusChangeListener focusChangeListener = new OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if ( !hasFocus ) { // enable/disable optional fields enableOptionalFields(); } } }; /** * Setup the currency spinner in this activity */ private void setupCurrencySpinner() { // populate currency spinner currencies = AccountCurrency.loadCurrenciesFromProvider(getContentResolver()); // sort list if ( currencies.size() > 0 ) Collections.sort(currencies, new CompareAccountCurrency()); currencyAdapter = new TextViewAdapter<AccountCurrency>(this, R.layout.text_view_adapter_item, R.id.text_view_adapter_item_textViewItem, currencies); spinnerCurrency.setAdapter(currencyAdapter); spinnerCurrency.setOnFocusChangeListener( focusChangeListener ); // create a listener to retrieve the selected currency spinnerCurrency.setOnItemSelectedListener(new OnItemSelectedListener () { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // An item was selected, retrieve the selected item currentCurrency = (AccountCurrency) parent.getItemAtPosition(position); // set minor units on amount entry fields setAmountsMinorUnits(currentCurrency.getMinorUnits()); } @Override public void onNothingSelected(AdapterView<?> parent) { // nop } }); // get the initial currency selection currentCurrency = (AccountCurrency) spinnerCurrency.getSelectedItem(); // set minor units on amount entry fields setAmountsMinorUnits(currentCurrency.getMinorUnits()); } /** * Setup the bank spinner in this activity */ private void setupBankSpinner() { // populate currency spinner banks = Bank.loadBanksFromProvider(getContentResolver()); bankAdapter = new TextViewAdapter<Bank>(this, R.layout.text_view_adapter_item, R.id.text_view_adapter_item_textViewItem, banks); spinnerBank.setAdapter(bankAdapter); spinnerBank.setOnFocusChangeListener( focusChangeListener ); // create a listener to retrieve the selected currency spinnerBank.setOnItemSelectedListener(new OnItemSelectedListener () { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { // An item was selected, retrieve the selected item currentBank = (Bank) parent.getItemAtPosition(position); } @Override public void onNothingSelected(AdapterView<?> parent) { // nop } }); // get the initial currency selection currentBank = (Bank) spinnerBank.getSelectedItem(); } /** * Set the minor units for amount entry fields */ private void setAmountsMinorUnits( int minorUnits ) { editTextInitialBal.setMinorUnits(minorUnits); editTextCreditLimit.setMinorUnits(minorUnits); editTextOverdraftLimit.setMinorUnits(minorUnits); } /** * Setup the layout of this activity */ private void setupLayout() { if ( DeviceConfiguration.isLargeScreen(getApplicationContext()) ) { int[] layouts = new int[] { R.id.addAccount_layoutName, R.id.addAccount_layoutNickname, R.id.addAccount_layoutType, R.id.addAccount_layoutCurrency, R.id.addAccount_layoutBank, R.id.addAccount_layoutInitBal, R.id.addAccount_layoutCredit, R.id.addAccount_layoutOverdraft, }; for ( int i = layouts.length - 1; i >= 0; --i ) { LinearLayout layout = (LinearLayout)findViewById(layouts[i]); layout.setOrientation(LinearLayout.HORIZONTAL); } } } /** * Setup the initial balance edit text in this activity */ private void setupInitialBalanceEditText() { // add a text changed listener to ensure that the correct number of minor units is entered editTextInitialBal.addTextChangedListener(new TextWatcher(){ public void afterTextChanged(Editable s) { // update save button state buttonSave.setEnabled( allRequiredDataEntered() ); } public void beforeTextChanged(CharSequence s, int start, int count, int after){} public void onTextChanged(CharSequence s, int start, int before, int count){} }); } /** * Setup the credit limit edit text in this activity */ private void setupCreditLimitEditText() { // add a text changed listener to ensure that the correct number of minor units is entered editTextCreditLimit.addTextChangedListener(new TextWatcher(){ public void afterTextChanged(Editable s) { // update save button state buttonSave.setEnabled( allRequiredDataEntered() ); } public void beforeTextChanged(CharSequence s, int start, int count, int after){} public void onTextChanged(CharSequence s, int start, int before, int count){} }); } /** * Setup the overdraft limit edit text in this activity */ private void setupOverdraftLimitEditText() { // add a text changed listener to ensure that the correct number of minor units is entered editTextOverdraftLimit.addTextChangedListener(new TextWatcher(){ public void afterTextChanged(Editable s) { // update save button state buttonSave.setEnabled( allRequiredDataEntered() ); } public void beforeTextChanged(CharSequence s, int start, int count, int after){} public void onTextChanged(CharSequence s, int start, int before, int count){} }); } /** * Setup the save button in this activity */ private void setupSaveButton() { // update save button state buttonSave.setEnabled( allRequiredDataEntered() ); buttonSave.setOnClickListener( new OnClickListener() { @Override public void onClick(View v) { // add the new account ContentResolver cr = getContentResolver(); ContentValues values = new ContentValues(); AccountType type = (AccountType)spinnerType.getSelectedItem(); String balance = editTextInitialBal.getText().toString().trim(); values.put(DatabaseManager.ACCOUNT_NAME, editTextName.getText().toString().trim()); values.put(DatabaseManager.ACCOUNT_NICKNAME, editTextNickname.getText().toString().trim()); values.put(DatabaseManager.ACCOUNT_TYPE, type.getId()); values.put(DatabaseManager.ACCOUNT_CURRENCY, currentCurrency.getNumber()); values.put(DatabaseManager.ACCOUNT_BANK, currentBank.getId()); values.put(DatabaseManager.ACCOUNT_DATE, DatabaseManager.makeDatabaseTimestamp(getAccountDate())); values.put(DatabaseManager.ACCOUNT_INITBAL, balance); values.put(DatabaseManager.ACCOUNT_CURRENTBAL, balance); values.put(DatabaseManager.ACCOUNT_AVAILBAL, balance); switch ( accType.getLimit() ) { case AccountType.LIMIT_CREDIT: values.put(DatabaseManager.ACCOUNT_LIMIT, editTextCreditLimit.getText().toString().trim()); break; case AccountType.LIMIT_OVERDRAFT: values.put(DatabaseManager.ACCOUNT_LIMIT, editTextOverdraftLimit.getText().toString().trim()); break; default: break; } try { cr.insert(DatabaseManager.ACCOUNT_ACC_URI, values); setResult(Activity.RESULT_OK); } catch ( SQLException e ) { Logger.d("Unable to add " + values.toString()); setResult(Activity.RESULT_CANCELED); } finish(); } }); } /** * Setup the cancel button in this activity */ private void setupCancelButton() { buttonCancel.setOnClickListener( new OnClickListener() { @Override public void onClick(View v) { setResult(Activity.RESULT_CANCELED); finish(); } }); } /** * Setup the date button in this activity */ private void setupDateButton() { OnClickListener dateListener = new OnClickListener() { @Override public void onClick(View v) { DialogFragment newFragment = new SelectDateFragment(); Bundle b = new Bundle(); b.putInt(SelectDateFragment.TITLE, R.string.addaccount_date); b.putSerializable(SelectDateFragment.DATE, getAccountDate()); newFragment.setArguments(b); newFragment.show(getFragmentManager(), "datePicker"); } }; RelativeLayout layout = (RelativeLayout)findViewById(R.id.addAccount_layoutDate); layout.setOnClickListener( dateListener ); ImageButton buttonDate = (ImageButton)this.findViewById(R.id.addAccount_buttonDate); buttonDate.setOnClickListener( dateListener ); textViewDate.setOnClickListener( dateListener ); } /** * Verify if all the required data has been entered * @return <code>true</code> is all the required data has been entered, <code>false</code> otherwise. */ private boolean allRequiredDataEntered() { boolean allOK = false; if ( (editTextName.getText().length() > 0) && (editTextNickname.getText().length() > 0) && (editTextInitialBal.getText().length() > 0) ) { switch ( accType.getLimit() ) { case AccountType.LIMIT_CREDIT: if ( editTextCreditLimit.getText().length() > 0 ) allOK = true; break; case AccountType.LIMIT_OVERDRAFT: if ( editTextOverdraftLimit.getText().length() > 0 ) allOK = true; break; default: allOK = true; break; } } return allOK; } /** * Enable/disable optional fields based on selections made */ private void enableOptionalFields() { int creditVisibility; int overdraftVisibility; switch ( accType.getLimit() ) { case AccountType.LIMIT_CREDIT: creditVisibility = View.VISIBLE; overdraftVisibility = View.GONE; break; case AccountType.LIMIT_OVERDRAFT: creditVisibility = View.GONE; overdraftVisibility = View.VISIBLE; break; default: creditVisibility = View.GONE; overdraftVisibility = View.GONE; break; } layoutCreditLimit.setVisibility(creditVisibility); layoutOverdraftLimit.setVisibility(overdraftVisibility); // textViewCreditLimit.setVisibility(creditVisibility); // editTextCreditLimit.setVisibility(creditVisibility); // textViewOverdraftLimit.setVisibility(overdraftVisibility); // editTextOverdraftLimit.setVisibility(overdraftVisibility); // update save button state buttonSave.setEnabled( allRequiredDataEntered() ); } /** * Compare AccountCurrency objects based on follow order of precedence:<br> * <ol> * <li>default currency for current locale</li> * <li>code alphabetic order</li> * </ol> * @author Ian Buttimer * */ private class CompareAccountCurrency implements Comparator<AccountCurrency> { // TODO add default currency to settings private String defaultCode = Currency.getInstance(Locale.getDefault()).getCurrencyCode(); @Override public int compare(AccountCurrency lhs, AccountCurrency rhs) { if ( lhs.equals(rhs) ) return 0; // equal if ( defaultCode.compareToIgnoreCase(lhs.getCode()) == 0 ) return (-1); // lhs first as its the default currency if ( defaultCode.compareToIgnoreCase(rhs.getCode()) == 0 ) return (1); // rhs first as its the default currency // compare alphabetically return lhs.getCode().compareToIgnoreCase(rhs.getCode()); } }; /* (non-Javadoc) * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu) */ @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.add_account, menu); return true; } /* (non-Javadoc) * @see ie.ibuttimer.pmat.BaseActivity#onOptionsItemSelected(android.view.MenuItem) */ @Override public boolean onOptionsItemSelected(MenuItem item) { return super.onOptionsItemSelected(item); } /** * Get the open date of this account * @return */ public GregorianCalendar getAccountDate() { GregorianCalendar date; if ( accountDate == null ) { date = (GregorianCalendar) Calendar.getInstance(); // Use the current time as the default date.set(Calendar.HOUR_OF_DAY, 0); date.set(Calendar.MINUTE, 0); date.set(Calendar.SECOND, 0); } else date = (GregorianCalendar) accountDate.clone(); return date; } @Override public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) { if ( accountDate == null ) accountDate = new GregorianCalendar(year, monthOfYear, dayOfMonth); else accountDate.set(year, monthOfYear, dayOfMonth); // update date display with selected date DateTimeFormat df = new DateTimeFormat(this); Date date = accountDate.getTime(); textViewDate.setText(df.formatMediumDate(date)); } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202202; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; /** * * The API error base class that provides details about an error that occurred * while processing a service request. * * <p>The OGNL field path is provided for parsers to identify the request data * element that may have caused the error.</p> * * * <p>Java class for ApiError complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ApiError"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="fieldPath" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="fieldPathElements" type="{https://www.google.com/apis/ads/publisher/v202202}FieldPathElement" maxOccurs="unbounded" minOccurs="0"/> * &lt;element name="trigger" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="errorString" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ApiError", propOrder = { "fieldPath", "fieldPathElements", "trigger", "errorString" }) @XmlSeeAlso({ AdBreakMarkupError.class, AdRuleTargetingError.class, PackageActionError.class, TemplateInstantiatedCreativeError.class, PublisherQueryLanguageContextError.class, DistinctError.class, CustomFieldError.class, AdSenseAccountError.class, CreativeTemplateError.class, ActivityError.class, CurrencyCodeError.class, ForecastError.class, DateTimeRangeTargetingError.class, GenericTargetingError.class, ServerError.class, DaiEncodingProfileNameError.class, PrecisionError.class, NativeStyleError.class, AdRuleFrequencyCapError.class, SiteError.class, UserDomainTargetingError.class, CustomCreativeError.class, RequiredSizeError.class, RequiredCollectionError.class, ProposalLineItemActionError.class, LiveStreamEventCustomAssetKeyError.class, CompanyCreditStatusError.class, TrafficForecastSegmentError.class, ContactError.class, ContentFilterError.class, RequestPlatformTargetingError.class, QuotaError.class, CreativeWrapperError.class, IdError.class, ClickTrackingLineItemError.class, ParseError.class, MobileApplicationActionError.class, SetTopBoxCreativeError.class, DateError.class, AdRuleDateError.class, TeamError.class, LiveStreamEventActionError.class, SamSessionError.class, ProgrammaticError.class, CompanyError.class, TypeError.class, InvalidPhoneNumberError.class, MobileApplicationTargetingError.class, TimeZoneError.class, AdRuleSlotError.class, FrequencyCapError.class, DaiEncodingProfileUpdateError.class, DaiEncodingProfileVariantSettingsError.class, ProductError.class, CreativeAssetMacroError.class, RequestError.class, AdUnitHierarchyError.class, ImageError.class, StatementError.class, PoddingError.class, AuthenticationError.class, ReservationDetailsError.class, MobileApplicationError.class, InternalApiError.class, CreativeError.class, LineItemFlightDateError.class, PublisherQueryLanguageSyntaxError.class, CdnConfigurationError.class, HtmlBundleProcessorError.class, NotNullError.class, AdRulePriorityError.class, TechnologyTargetingError.class, ForecastAdjustmentError.class, DaiEncodingProfileContainerSettingsError.class, AudienceExtensionError.class, LiveStreamEventDvrWindowError.class, RangeError.class, MetadataMergeSpecError.class, BillingError.class, LineItemCreativeAssociationOperationError.class, LiveStreamEventCdnSettingsError.class, ExchangeRateError.class, LineItemCreativeAssociationError.class, PreferredDealError.class, DaiAuthenticationKeyActionError.class, PlacementError.class, ProposalActionError.class, SwiffyConversionError.class, LabelError.class, NullError.class, ApiVersionError.class, StringLengthError.class, CollectionSizeError.class, InvalidEmailError.class, FeatureError.class, GrpSettingsError.class, TokenError.class, RequiredError.class, OrderActionError.class, LiveStreamEventSlateError.class, OrderError.class, CrossSellError.class, InventoryTargetingError.class, DayPartTargetingError.class, LineItemOperationError.class, CustomTargetingError.class, DaiEncodingProfileAdMatchingError.class, AssetError.class, CreativeSetError.class, InvalidUrlError.class, DealError.class, EntityChildrenLimitReachedError.class, CustomFieldValueError.class, RichMediaStudioCreativeError.class, LineItemError.class, UrlError.class, InventoryUnitError.class, InvalidColorError.class, CreativePreviewError.class, InventoryUnitSizesError.class, StringFormatError.class, ProposalLineItemError.class, PackageError.class, CommonError.class, AudienceSegmentError.class, GeoTargetingError.class, FileError.class, ReportError.class, LiveStreamEventDateTimeError.class, ProposalLineItemProgrammaticError.class, AdUnitCodeError.class, SetTopBoxLineItemError.class, EntityLimitReachedError.class, RequiredNumberError.class, ProposalError.class, LineItemActivityAssociationError.class, RegExError.class, UniqueError.class, PermissionError.class, ProposalLineItemMakegoodError.class, VideoPositionTargetingError.class, AdRuleError.class, CreativeTemplateOperationError.class, YieldError.class, NetworkError.class, InventoryUnitRefreshRateError.class, LabelEntityAssociationError.class, VideoAdTagError.class }) public abstract class ApiError { protected String fieldPath; protected List<FieldPathElement> fieldPathElements; protected String trigger; protected String errorString; /** * Gets the value of the fieldPath property. * * @return * possible object is * {@link String } * */ public String getFieldPath() { return fieldPath; } /** * Sets the value of the fieldPath property. * * @param value * allowed object is * {@link String } * */ public void setFieldPath(String value) { this.fieldPath = value; } /** * Gets the value of the fieldPathElements property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the fieldPathElements property. * * <p> * For example, to add a new item, do as follows: * <pre> * getFieldPathElements().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link FieldPathElement } * * */ public List<FieldPathElement> getFieldPathElements() { if (fieldPathElements == null) { fieldPathElements = new ArrayList<FieldPathElement>(); } return this.fieldPathElements; } /** * Gets the value of the trigger property. * * @return * possible object is * {@link String } * */ public String getTrigger() { return trigger; } /** * Sets the value of the trigger property. * * @param value * allowed object is * {@link String } * */ public void setTrigger(String value) { this.trigger = value; } /** * Gets the value of the errorString property. * * @return * possible object is * {@link String } * */ public String getErrorString() { return errorString; } /** * Sets the value of the errorString property. * * @param value * allowed object is * {@link String } * */ public void setErrorString(String value) { this.errorString = value; } }
package ca.uhn.fhir.rest.server.provider; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor; import ca.uhn.fhir.interceptor.api.Pointcut; import ca.uhn.fhir.rest.client.api.IGenericClient; import ca.uhn.fhir.rest.client.interceptor.LoggingInterceptor; import ca.uhn.fhir.rest.gclient.IDeleteTyped; import ca.uhn.fhir.rest.server.IResourceProvider; import ca.uhn.fhir.rest.server.RestfulServer; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.test.utilities.JettyUtil; import ca.uhn.fhir.util.TestUtil; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.Bundle; import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.ServletException; import java.util.List; import java.util.stream.Collectors; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.matchesPattern; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; @ExtendWith(MockitoExtension.class) public class HashMapResourceProviderTest { private static final Logger ourLog = LoggerFactory.getLogger(HashMapResourceProviderTest.class); private static MyRestfulServer ourRestServer; private static Server ourListenerServer; private static IGenericClient ourClient; private static FhirContext ourCtx = FhirContext.forR4(); private static HashMapResourceProvider<Patient> myPatientResourceProvider; private static HashMapResourceProvider<Observation> myObservationResourceProvider; @Mock private IAnonymousInterceptor myAnonymousInterceptor; @BeforeEach public void before() { ourRestServer.clearData(); myPatientResourceProvider.clearCounts(); myObservationResourceProvider.clearCounts(); } @Test public void testCreateAndRead() { ourRestServer.getInterceptorService().registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED, myAnonymousInterceptor); ourRestServer.getInterceptorService().registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED, myAnonymousInterceptor); // Create Patient p = new Patient(); p.setActive(true); IIdType id = ourClient.create().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id.getVersionIdPart()); verify(myAnonymousInterceptor, Mockito.times(1)).invoke(eq(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED), any()); verify(myAnonymousInterceptor, Mockito.times(1)).invoke(eq(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED), any()); // Read p = (Patient) ourClient.read().resource("Patient").withId(id).execute(); assertEquals(true, p.getActive()); assertEquals(1, myPatientResourceProvider.getCountRead()); } @Test public void testCreateWithClientAssignedIdAndRead() { // Create Patient p = new Patient(); p.setId("ABC"); p.setActive(true); IIdType id = ourClient.update().resource(p).execute().getId(); assertEquals("ABC", id.getIdPart()); assertEquals("1", id.getVersionIdPart()); // Read p = (Patient) ourClient.read().resource("Patient").withId(id).execute(); assertEquals(true, p.getActive()); } @Test public void testDelete() { // Create Patient p = new Patient(); p.setActive(true); IIdType id = ourClient.create().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id.getVersionIdPart()); assertEquals(0, myPatientResourceProvider.getCountDelete()); IDeleteTyped iDeleteTyped = ourClient.delete().resourceById(id.toUnqualifiedVersionless()); ourLog.info("About to execute"); try { iDeleteTyped.execute(); } catch (NullPointerException e) { ourLog.error("NPE", e); fail(e.toString()); } assertEquals(1, myPatientResourceProvider.getCountDelete()); // Read ourClient.read().resource("Patient").withId(id.withVersion("1")).execute(); try { ourClient.read().resource("Patient").withId(id.withVersion("2")).execute(); fail(); } catch (ResourceGoneException e) { // good } } @Test public void testHistoryInstance() { // Create Res 1 Patient p = new Patient(); p.setActive(true); IIdType id1 = ourClient.create().resource(p).execute().getId(); assertThat(id1.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id1.getVersionIdPart()); // Create Res 2 p = new Patient(); p.setActive(true); IIdType id2 = ourClient.create().resource(p).execute().getId(); assertThat(id2.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id2.getVersionIdPart()); // Update Res 2 p = new Patient(); p.setId(id2); p.setActive(false); id2 = ourClient.update().resource(p).execute().getId(); assertThat(id2.getIdPart(), matchesPattern("[0-9]+")); assertEquals("2", id2.getVersionIdPart()); Bundle history = ourClient .history() .onInstance(id2.toUnqualifiedVersionless()) .andReturnBundle(Bundle.class) .encodedJson() .prettyPrint() .execute(); ourLog.debug(ourCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(history)); List<String> ids = history .getEntry() .stream() .map(t -> t.getResource().getIdElement().toUnqualified().getValue()) .collect(Collectors.toList()); assertThat(ids, contains( id2.toUnqualified().withVersion("2").getValue(), id2.toUnqualified().withVersion("1").getValue() )); } @Test public void testHistoryType() { // Create Res 1 Patient p = new Patient(); p.setActive(true); IIdType id1 = ourClient.create().resource(p).execute().getId(); assertThat(id1.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id1.getVersionIdPart()); // Create Res 2 p = new Patient(); p.setActive(true); IIdType id2 = ourClient.create().resource(p).execute().getId(); assertThat(id2.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id2.getVersionIdPart()); // Update Res 2 p = new Patient(); p.setId(id2); p.setActive(false); id2 = ourClient.update().resource(p).execute().getId(); assertThat(id2.getIdPart(), matchesPattern("[0-9]+")); assertEquals("2", id2.getVersionIdPart()); Bundle history = ourClient .history() .onType(Patient.class) .andReturnBundle(Bundle.class) .execute(); List<String> ids = history .getEntry() .stream() .map(t -> t.getResource().getIdElement().toUnqualified().getValue()) .collect(Collectors.toList()); ourLog.info("Received IDs: {}", ids); assertThat(ids, contains( id2.toUnqualified().withVersion("2").getValue(), id2.toUnqualified().withVersion("1").getValue(), id1.toUnqualified().withVersion("1").getValue() )); } @Test public void testSearchAll() { // Create for (int i = 0; i < 100; i++) { Patient p = new Patient(); p.addName().setFamily("FAM" + i); ourClient.registerInterceptor(new LoggingInterceptor(true)); IIdType id = ourClient.create().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id.getVersionIdPart()); } // Search Bundle resp = ourClient .search() .forResource("Patient") .returnBundle(Bundle.class) .execute(); assertEquals(100, resp.getTotal()); assertEquals(100, resp.getEntry().size()); assertEquals(1, myPatientResourceProvider.getCountSearch()); } @Test public void testSearchById() { // Create for (int i = 0; i < 100; i++) { Patient p = new Patient(); p.addName().setFamily("FAM" + i); IIdType id = ourClient.create().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id.getVersionIdPart()); } // Search Bundle resp = ourClient .search() .forResource("Patient") .where(IAnyResource.RES_ID.exactly().codes("2", "3")) .returnBundle(Bundle.class).execute(); assertEquals(2, resp.getTotal()); assertEquals(2, resp.getEntry().size()); List<String> respIds = resp.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList()); assertThat(respIds, containsInAnyOrder("Patient/2", "Patient/3")); // Search resp = ourClient .search() .forResource("Patient") .where(IAnyResource.RES_ID.exactly().codes("2", "3")) .where(IAnyResource.RES_ID.exactly().codes("2", "3")) .returnBundle(Bundle.class).execute(); assertEquals(2, resp.getTotal()); assertEquals(2, resp.getEntry().size()); respIds = resp.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList()); assertThat(respIds, containsInAnyOrder("Patient/2", "Patient/3")); resp = ourClient .search() .forResource("Patient") .where(IAnyResource.RES_ID.exactly().codes("2", "3")) .where(IAnyResource.RES_ID.exactly().codes("4", "3")) .returnBundle(Bundle.class).execute(); respIds = resp.getEntry().stream().map(t -> t.getResource().getIdElement().toUnqualifiedVersionless().getValue()).collect(Collectors.toList()); assertThat(respIds, containsInAnyOrder("Patient/3")); assertEquals(1, resp.getTotal()); assertEquals(1, resp.getEntry().size()); } @Test public void testUpdate() { // Create Patient p = new Patient(); p.setActive(true); IIdType id = ourClient.create().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("1", id.getVersionIdPart()); // Update ourRestServer.getInterceptorService().registerAnonymousInterceptor(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, myAnonymousInterceptor); ourRestServer.getInterceptorService().registerAnonymousInterceptor(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, myAnonymousInterceptor); p = new Patient(); p.setId(id); p.setActive(false); id = ourClient.update().resource(p).execute().getId(); assertThat(id.getIdPart(), matchesPattern("[0-9]+")); assertEquals("2", id.getVersionIdPart()); verify(myAnonymousInterceptor, Mockito.times(1)).invoke(eq(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED), any()); verify(myAnonymousInterceptor, Mockito.times(1)).invoke(eq(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED), any()); assertEquals(1, myPatientResourceProvider.getCountCreate()); assertEquals(1, myPatientResourceProvider.getCountUpdate()); // Read p = (Patient) ourClient.read().resource("Patient").withId(id.withVersion("1")).execute(); assertEquals(true, p.getActive()); p = (Patient) ourClient.read().resource("Patient").withId(id.withVersion("2")).execute(); assertEquals(false, p.getActive()); try { ourClient.read().resource("Patient").withId(id.withVersion("3")).execute(); fail(); } catch (ResourceNotFoundException e) { // good } } private static class MyRestfulServer extends RestfulServer { MyRestfulServer() { super(ourCtx); } void clearData() { for (IResourceProvider next : getResourceProviders()) { if (next instanceof HashMapResourceProvider) { ((HashMapResourceProvider) next).clear(); } } } @Override protected void initialize() throws ServletException { super.initialize(); myPatientResourceProvider = new HashMapResourceProvider<>(ourCtx, Patient.class); myObservationResourceProvider = new HashMapResourceProvider<>(ourCtx, Observation.class); registerProvider(myPatientResourceProvider); registerProvider(myObservationResourceProvider); } } @AfterAll public static void afterClassClearContext() throws Exception { JettyUtil.closeServer(ourListenerServer); TestUtil.clearAllStaticFieldsForUnitTest(); } @BeforeAll public static void startListenerServer() throws Exception { ourRestServer = new MyRestfulServer(); ourListenerServer = new Server(0); ServletContextHandler proxyHandler = new ServletContextHandler(); proxyHandler.setContextPath("/"); ServletHolder servletHolder = new ServletHolder(); servletHolder.setServlet(ourRestServer); proxyHandler.addServlet(servletHolder, "/*"); ourListenerServer.setHandler(proxyHandler); JettyUtil.startServer(ourListenerServer); int ourListenerPort = JettyUtil.getPortForStartedServer(ourListenerServer); String ourBase = "http://localhost:" + ourListenerPort + "/"; ourCtx.getRestfulClientFactory().setSocketTimeout(120000); ourClient = ourCtx.newRestfulGenericClient(ourBase); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.ayax.website.persistencia.controladores; import com.ayax.website.persistencia.controladores.exceptions.IllegalOrphanException; import com.ayax.website.persistencia.controladores.exceptions.NonexistentEntityException; import com.ayax.website.persistencia.controladores.exceptions.PreexistingEntityException; import com.ayax.website.persistencia.entidades.FacturaOferta; import java.io.Serializable; import javax.persistence.Query; import javax.persistence.EntityNotFoundException; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import com.ayax.website.persistencia.entidades.Oferta; import java.util.ArrayList; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; /** * * @author hmcarvajal@ayax.co */ public class FacturaOfertaJpaController implements Serializable { public FacturaOfertaJpaController(EntityManagerFactory emf) { this.emf = emf; } private EntityManagerFactory emf = null; public EntityManager getEntityManager() { return emf.createEntityManager(); } public void create(FacturaOferta facturaOferta) throws IllegalOrphanException, PreexistingEntityException, Exception { List<String> illegalOrphanMessages = null; Oferta ofertaOrphanCheck = facturaOferta.getOferta(); if (ofertaOrphanCheck != null) { FacturaOferta oldFacturaOfertaOfOferta = ofertaOrphanCheck.getFacturaOferta(); if (oldFacturaOfertaOfOferta != null) { if (illegalOrphanMessages == null) { illegalOrphanMessages = new ArrayList<String>(); } illegalOrphanMessages.add("The Oferta " + ofertaOrphanCheck + " already has an item of type FacturaOferta whose oferta column cannot be null. Please make another selection for the oferta field."); } } if (illegalOrphanMessages != null) { throw new IllegalOrphanException(illegalOrphanMessages); } EntityManager em = null; try { em = getEntityManager(); em.getTransaction().begin(); Oferta oferta = facturaOferta.getOferta(); if (oferta != null) { oferta = em.getReference(oferta.getClass(), oferta.getId()); facturaOferta.setOferta(oferta); } em.persist(facturaOferta); if (oferta != null) { oferta.setFacturaOferta(facturaOferta); oferta = em.merge(oferta); } em.getTransaction().commit(); } catch (Exception ex) { if (findFacturaOferta(facturaOferta.getIdOferta()) != null) { throw new PreexistingEntityException("FacturaOferta " + facturaOferta + " already exists.", ex); } throw ex; } finally { if (em != null) { em.close(); } } } public void edit(FacturaOferta facturaOferta) throws IllegalOrphanException, NonexistentEntityException, Exception { EntityManager em = null; try { em = getEntityManager(); em.getTransaction().begin(); FacturaOferta persistentFacturaOferta = em.find(FacturaOferta.class, facturaOferta.getIdOferta()); Oferta ofertaOld = persistentFacturaOferta.getOferta(); Oferta ofertaNew = facturaOferta.getOferta(); List<String> illegalOrphanMessages = null; if (ofertaNew != null && !ofertaNew.equals(ofertaOld)) { FacturaOferta oldFacturaOfertaOfOferta = ofertaNew.getFacturaOferta(); if (oldFacturaOfertaOfOferta != null) { if (illegalOrphanMessages == null) { illegalOrphanMessages = new ArrayList<String>(); } illegalOrphanMessages.add("The Oferta " + ofertaNew + " already has an item of type FacturaOferta whose oferta column cannot be null. Please make another selection for the oferta field."); } } if (illegalOrphanMessages != null) { throw new IllegalOrphanException(illegalOrphanMessages); } if (ofertaNew != null) { ofertaNew = em.getReference(ofertaNew.getClass(), ofertaNew.getId()); facturaOferta.setOferta(ofertaNew); } facturaOferta = em.merge(facturaOferta); if (ofertaOld != null && !ofertaOld.equals(ofertaNew)) { ofertaOld.setFacturaOferta(null); ofertaOld = em.merge(ofertaOld); } if (ofertaNew != null && !ofertaNew.equals(ofertaOld)) { ofertaNew.setFacturaOferta(facturaOferta); ofertaNew = em.merge(ofertaNew); } em.getTransaction().commit(); } catch (Exception ex) { String msg = ex.getLocalizedMessage(); if (msg == null || msg.length() == 0) { String id = facturaOferta.getIdOferta(); if (findFacturaOferta(id) == null) { throw new NonexistentEntityException("The facturaOferta with id " + id + " no longer exists."); } } throw ex; } finally { if (em != null) { em.close(); } } } public void destroy(String id) throws NonexistentEntityException { EntityManager em = null; try { em = getEntityManager(); em.getTransaction().begin(); FacturaOferta facturaOferta; try { facturaOferta = em.getReference(FacturaOferta.class, id); facturaOferta.getIdOferta(); } catch (EntityNotFoundException enfe) { throw new NonexistentEntityException("The facturaOferta with id " + id + " no longer exists.", enfe); } Oferta oferta = facturaOferta.getOferta(); if (oferta != null) { oferta.setFacturaOferta(null); oferta = em.merge(oferta); } em.remove(facturaOferta); em.getTransaction().commit(); } finally { if (em != null) { em.close(); } } } public List<FacturaOferta> findFacturaOfertaEntities() { return findFacturaOfertaEntities(true, -1, -1); } public List<FacturaOferta> findFacturaOfertaEntities(int maxResults, int firstResult) { return findFacturaOfertaEntities(false, maxResults, firstResult); } private List<FacturaOferta> findFacturaOfertaEntities(boolean all, int maxResults, int firstResult) { EntityManager em = getEntityManager(); try { CriteriaQuery cq = em.getCriteriaBuilder().createQuery(); cq.select(cq.from(FacturaOferta.class)); Query q = em.createQuery(cq); if (!all) { q.setMaxResults(maxResults); q.setFirstResult(firstResult); } return q.getResultList(); } finally { em.close(); } } public FacturaOferta findFacturaOferta(String id) { EntityManager em = getEntityManager(); try { return em.find(FacturaOferta.class, id); } finally { em.close(); } } public int getFacturaOfertaCount() { EntityManager em = getEntityManager(); try { CriteriaQuery cq = em.getCriteriaBuilder().createQuery(); Root<FacturaOferta> rt = cq.from(FacturaOferta.class); cq.select(em.getCriteriaBuilder().count(rt)); Query q = em.createQuery(cq); return ((Long) q.getSingleResult()).intValue(); } finally { em.close(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.storage; import com.google.protobuf.Message; import org.apache.commons.codec.binary.Base64; import org.apache.tajo.catalog.Column; import org.apache.tajo.common.TajoDataTypes; import org.apache.tajo.datum.*; import org.apache.tajo.datum.protobuf.ProtobufJsonFormat; import org.apache.tajo.util.Bytes; import java.io.IOException; import java.io.OutputStream; //Compatibility with Apache Hive public class TextSerializerDeserializer implements SerializerDeserializer { public static final byte[] trueBytes = "true".getBytes(); public static final byte[] falseBytes = "false".getBytes(); private ProtobufJsonFormat protobufJsonFormat = ProtobufJsonFormat.getInstance(); @Override public int serialize(Column col, Datum datum, OutputStream out, byte[] nullCharacters) throws IOException { byte[] bytes; int length = 0; TajoDataTypes.DataType dataType = col.getDataType(); if (datum == null || datum instanceof NullDatum) { switch (dataType.getType()) { case CHAR: case TEXT: length = nullCharacters.length; out.write(nullCharacters); break; default: break; } return length; } switch (dataType.getType()) { case BOOLEAN: out.write(datum.asBool() ? trueBytes : falseBytes); length = trueBytes.length; break; case CHAR: byte[] pad = new byte[dataType.getLength() - datum.size()]; bytes = datum.asTextBytes(); out.write(bytes); out.write(pad); length = bytes.length + pad.length; break; case TEXT: case BIT: case INT2: case INT4: case INT8: case FLOAT4: case FLOAT8: case INET4: case DATE: case TIME: case TIMESTAMP: bytes = datum.asTextBytes(); length = bytes.length; out.write(bytes); break; case INET6: case BLOB: bytes = Base64.encodeBase64(datum.asByteArray(), false); length = bytes.length; out.write(bytes, 0, length); break; case PROTOBUF: ProtobufDatum protobuf = (ProtobufDatum) datum; byte[] protoBytes = protobufJsonFormat.printToString(protobuf.get()).getBytes(); length = protoBytes.length; out.write(protoBytes, 0, protoBytes.length); break; case NULL_TYPE: default: break; } return length; } @Override public Datum deserialize(Column col, byte[] bytes, int offset, int length, byte[] nullCharacters) throws IOException { Datum datum; switch (col.getDataType().getType()) { case BOOLEAN: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createBool(bytes[offset] == 't' || bytes[offset] == 'T'); break; case BIT: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createBit(Byte.parseByte(new String(bytes, offset, length))); break; case CHAR: datum = isNullText(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createChar(new String(bytes, offset, length).trim()); break; case INT2: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createInt2((short) Bytes.parseInt(bytes, offset, length)); break; case INT4: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createInt4(Bytes.parseInt(bytes, offset, length)); break; case INT8: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createInt8(new String(bytes, offset, length)); break; case FLOAT4: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createFloat4(new String(bytes, offset, length)); break; case FLOAT8: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createFloat8(Bytes.parseDouble(bytes, offset, length)); break; case TEXT: { byte[] chars = new byte[length]; System.arraycopy(bytes, offset, chars, 0, length); datum = isNullText(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createText(chars); break; } case DATE: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createDate(new String(bytes, offset, length)); break; case TIME: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createTime(new String(bytes, offset, length)); break; case TIMESTAMP: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createTimeStamp(new String(bytes, offset, length)); break; case PROTOBUF: { if (isNull(bytes, offset, length, nullCharacters)) { datum = NullDatum.get(); } else { ProtobufDatumFactory factory = ProtobufDatumFactory.get(col.getDataType()); Message.Builder builder = factory.newBuilder(); try { byte[] protoBytes = new byte[length]; System.arraycopy(bytes, offset, protoBytes, 0, length); protobufJsonFormat.merge(protoBytes, builder); datum = factory.createDatum(builder.build()); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } break; } case INET4: datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get() : DatumFactory.createInet4(new String(bytes, offset, length)); break; case BLOB: { if (isNull(bytes, offset, length, nullCharacters)) { datum = NullDatum.get(); } else { byte[] blob = new byte[length]; System.arraycopy(bytes, offset, blob, 0, length); datum = DatumFactory.createBlob(Base64.decodeBase64(blob)); } break; } default: datum = NullDatum.get(); break; } return datum; } private static boolean isNull(byte[] val, int offset, int length, byte[] nullBytes) { return length == 0 || ((length == nullBytes.length) && Bytes.equals(val, offset, length, nullBytes, 0, nullBytes.length)); } private static boolean isNullText(byte[] val, int offset, int length, byte[] nullBytes) { return length > 0 && length == nullBytes.length && Bytes.equals(val, offset, length, nullBytes, 0, nullBytes.length); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.hive.HiveSplitSourceProvider.HiveSplitSource; import com.facebook.presto.hive.util.SuspendingExecutor; import com.facebook.presto.spi.HostAddress; import com.facebook.presto.spi.Split; import com.google.common.util.concurrent.SettableFuture; import org.testng.annotations.Test; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertSame; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestHiveSplitSource { @Test public void testOutstandingSplitCount() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // add 10 splits for (int i = 0; i < 10; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); } // remove 1 split assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); // remove 4 splits assertEquals(hiveSplitSource.getNextBatch(4).size(), 4); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 5); // try to remove 20 splits, and verify we only got 5 assertEquals(hiveSplitSource.getNextBatch(20).size(), 5); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 0); } @Test public void testSuspendResume() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // almost fill the source for (int i = 0; i < 9; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); assertFalse(suspendingExecutor.isSuspended()); } // add one more split so the source is now full and verify that the executor is suspended hiveSplitSource.addToQueue(new TestSplit(10)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 10); assertTrue(suspendingExecutor.isSuspended()); // remove one split so the source is no longer full and verify the executor is resumed assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); assertFalse(suspendingExecutor.isSuspended()); // add two more splits so the source is now full and verify that the executor is suspended hiveSplitSource.addToQueue(new TestSplit(11)); hiveSplitSource.addToQueue(new TestSplit(12)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 11); assertTrue(suspendingExecutor.isSuspended()); // remove two splits so the source is no longer full and verify the executor is resumed assertEquals(hiveSplitSource.getNextBatch(2).size(), 2); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 9); assertFalse(suspendingExecutor.isSuspended()); } @Test public void testFail() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); // add some splits for (int i = 0; i < 5; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), i + 1); } // remove a split and verify assertEquals(hiveSplitSource.getNextBatch(1).size(), 1); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // fail source hiveSplitSource.fail(new RuntimeException("test")); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // try to remove a split and verify we got the expected exception try { hiveSplitSource.getNextBatch(1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getCause().getMessage(), "test"); } assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // attempt to add another split and verify it does not work hiveSplitSource.addToQueue(new TestSplit(99)); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // fail source again hiveSplitSource.fail(new RuntimeException("another failure")); assertEquals(hiveSplitSource.getOutstandingSplitCount(), 4); // try to remove a split and verify we got the first exception try { hiveSplitSource.getNextBatch(1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getCause().getMessage(), "test"); } } @Test public void testReaderWaitsForSplits() throws Exception { SuspendingExecutor suspendingExecutor = createSuspendingExecutor(); final HiveSplitSource hiveSplitSource = new HiveSplitSource("test", 10, suspendingExecutor); final SettableFuture<Split> splits = SettableFuture.create(); // create a thread that will get a split final CountDownLatch started = new CountDownLatch(1); Thread getterThread = new Thread(new Runnable() { @Override public void run() { try { started.countDown(); List<Split> batch = hiveSplitSource.getNextBatch(1); assertEquals(batch.size(), 1); splits.set(batch.get(0)); } catch (Throwable e) { splits.setException(e); } } }); getterThread.start(); try { // wait for the thread to be started assertTrue(started.await(1, TimeUnit.SECONDS)); // sleep for a bit, and assure the thread is blocked TimeUnit.MILLISECONDS.sleep(200); assertTrue(!splits.isDone()); // add a split hiveSplitSource.addToQueue(new TestSplit(33)); // wait for thread to get the split Split split = splits.get(200, TimeUnit.MILLISECONDS); assertSame(split.getInfo(), 33); } finally { // make sure the thread exits getterThread.interrupt(); } } private SuspendingExecutor createSuspendingExecutor() { return new SuspendingExecutor(new Executor() { @Override public void execute(Runnable command) { throw new UnsupportedOperationException(); } }); } private static class TestSplit implements Split { private final int id; private TestSplit(int id) { this.id = id; } @Override public boolean isRemotelyAccessible() { throw new UnsupportedOperationException(); } @Override public List<HostAddress> getAddresses() { throw new UnsupportedOperationException(); } @Override public Object getInfo() { return id; } } }
package com.bob.utils; import java.lang.invoke.MethodHandles; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Member; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; /** * Created by wangxiang on 17/6/29. */ public class Reflect { // --------------------------------------------------------------------- // Static API used as entrance points to the fluent API // --------------------------------------------------------------------- /** * Wrap a class name. * <p> * This is the same as calling <code>on(Class.forName(name))</code> * * @param name A fully qualified class name * @return A wrapped class object, to be used for further reflection. * @throws ReflectException If any reflection exception occurred. * @see #on(Class) */ public static Reflect on(String name) throws ReflectException { return on(forName(name)); } /** * Wrap a class name, loading it via a given class loader. * <p> * This is the same as calling * <code>on(Class.forName(name, classLoader))</code> * * @param name A fully qualified class name. * @param classLoader The class loader in whose context the class should be * loaded. * @return A wrapped class object, to be used for further reflection. * @throws ReflectException If any reflection exception occurred. * @see #on(Class) */ public static Reflect on(String name, ClassLoader classLoader) throws ReflectException { return on(forName(name, classLoader)); } /** * Wrap a class. * <p> * Use this when you want to access static fields and methods on a * {@link Class} object, or as a basis for constructing objects of that * class using {@link #create(Object...)} * * @param clazz The class to be wrapped * @return A wrapped class object, to be used for further reflection. */ public static Reflect on(Class<?> clazz) { return new Reflect(clazz); } /** * Wrap an object. * <p> * Use this when you want to access instance fields and methods on any * {@link Object} * * @param object The object to be wrapped * @return A wrapped object, to be used for further reflection. */ public static Reflect on(Object object) { return new Reflect(object == null ? Object.class : object.getClass(), object); } private static Reflect on(Class<?> type, Object object) { return new Reflect(type, object); } /** * Conveniently render an {@link AccessibleObject} accessible. * <p> * To prevent {@link SecurityException}, this is only done if the argument * object and its declaring class are non-public. * * @param accessible The object to render accessible * @return The argument object rendered accessible */ public static <T extends AccessibleObject> T accessible(T accessible) { if (accessible == null) { return null; } if (accessible instanceof Member) { Member member = (Member) accessible; if (Modifier.isPublic(member.getModifiers()) && Modifier.isPublic(member.getDeclaringClass().getModifiers())) { return accessible; } } // [jOOQ #3392] The accessible flag is set to false by default, also for public members. if (!accessible.isAccessible()) { accessible.setAccessible(true); } return accessible; } // --------------------------------------------------------------------- // Members // --------------------------------------------------------------------- /* [java-8] */ private static final Constructor<MethodHandles.Lookup> CACHED_LOOKUP_CONSTRUCTOR; static { try { CACHED_LOOKUP_CONSTRUCTOR = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class); if (!CACHED_LOOKUP_CONSTRUCTOR.isAccessible()) CACHED_LOOKUP_CONSTRUCTOR.setAccessible(true); } catch (Exception e) { throw new IllegalStateException(e); } } /* [/java-8] */ /** * The type of the wrapped object. */ private final Class<?> type; /** * The wrapped object. */ private final Object object; // --------------------------------------------------------------------- // Constructors // --------------------------------------------------------------------- private Reflect(Class<?> type) { this(type, type); } private Reflect(Class<?> type, Object object) { this.type = type; this.object = object; } // --------------------------------------------------------------------- // Fluent Reflection API // --------------------------------------------------------------------- /** * Get the wrapped object * * @param <T> A convenience generic parameter for automatic unsafe casting */ @SuppressWarnings("unchecked") public <T> T get() { return (T) object; } /** * Set a field value. * <p> * This is roughly equivalent to {@link Field#set(Object, Object)}. If the * wrapped object is a {@link Class}, then this will set a value to a static * member field. If the wrapped object is any other {@link Object}, then * this will set a value to an instance member field. * <p> * This method is also capable of setting the value of (static) final * fields. This may be convenient in situations where no * {@link SecurityManager} is expected to prevent this, but do note that * (especially static) final fields may already have been inlined by the * javac and/or JIT and relevant code deleted from the runtime verison of * your program, so setting these fields might not have any effect on your * execution. * <p> * For restrictions of usage regarding setting values on final fields check: * <a href= * "http://stackoverflow.com/questions/3301635/change-private-static-final-field-using-java-reflection">http://stackoverflow.com/questions/3301635/change-private-static-final-field-using-java-reflection</a> * ... and <a href= * "http://pveentjer.blogspot.co.at/2017/01/final-static-boolean-jit.html">http://pveentjer.blogspot.co.at/2017/01/final-static-boolean-jit.html</a> * * @param name The field name * @param value The new field value * @return The same wrapped object, to be used for further reflection. * @throws ReflectException If any reflection exception occurred. */ public Reflect set(String name, Object value) throws ReflectException { try { Field field = field0(name); if ((field.getModifiers() & Modifier.FINAL) == Modifier.FINAL) { Field modifiersField = Field.class.getDeclaredField("modifiers"); modifiersField.setAccessible(true); modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL); } field.set(object, unwrap(value)); return this; } catch (Exception e) { throw new ReflectException(e); } } /** * Get a field value. * <p> * This is roughly equivalent to {@link Field#get(Object)}. If the wrapped * object is a {@link Class}, then this will get a value from a static * member field. If the wrapped object is any other {@link Object}, then * this will get a value from an instance member field. * <p> * If you want to "navigate" to a wrapped version of the field, use * {@link #field(String)} instead. * * @param name The field name * @return The field value * @throws ReflectException If any reflection exception occurred. * @see #field(String) */ public <T> T get(String name) throws ReflectException { return field(name).<T>get(); } /** * Get a wrapped field. * <p> * This is roughly equivalent to {@link Field#get(Object)}. If the wrapped * object is a {@link Class}, then this will wrap a static member field. If * the wrapped object is any other {@link Object}, then this wrap an * instance member field. * * @param name The field name * @return The wrapped field * @throws ReflectException If any reflection exception occurred. */ public Reflect field(String name) throws ReflectException { try { Field field = field0(name); return on(field.getType(), field.get(object)); } catch (Exception e) { throw new ReflectException(e); } } private Field field0(String name) throws ReflectException { Class<?> t = type(); // Try getting a public field try { return accessible(t.getField(name)); } // Try again, getting a non-public field catch (NoSuchFieldException e) { do { try { return accessible(t.getDeclaredField(name)); } catch (NoSuchFieldException ignore) { } t = t.getSuperclass(); } while (t != null); throw new ReflectException(e); } } /** * Get a Map containing field names and wrapped values for the fields' * values. * <p> * If the wrapped object is a {@link Class}, then this will return static * fields. If the wrapped object is any other {@link Object}, then this will * return instance fields. * <p> * These two calls are equivalent <code><pre> * on(object).field("myField"); * on(object).fields().get("myField"); * </pre></code> * * @return A map containing field names and wrapped values. */ public Map<String, Reflect> fields() { Map<String, Reflect> result = new LinkedHashMap<String, Reflect>(); Class<?> t = type(); do { for (Field field : t.getDeclaredFields()) { if (type != object ^ Modifier.isStatic(field.getModifiers())) { String name = field.getName(); if (!result.containsKey(name)) result.put(name, field(name)); } } t = t.getSuperclass(); } while (t != null); return result; } /** * Call a method by its name. * <p> * This is a convenience method for calling * <code>call(name, new Object[0])</code> * * @param name The method name * @return The wrapped method result or the same wrapped object if the * method returns <code>void</code>, to be used for further * reflection. * @throws ReflectException If any reflection exception occurred. * @see #call(String, Object...) */ public Reflect call(String name) throws ReflectException { return call(name, new Object[0]); } /** * Call a method by its name. * <p> * This is roughly equivalent to {@link Method#invoke(Object, Object...)}. * If the wrapped object is a {@link Class}, then this will invoke a static * method. If the wrapped object is any other {@link Object}, then this will * invoke an instance method. * <p> * Just like {@link Method#invoke(Object, Object...)}, this will try to wrap * primitive types or unwrap primitive type wrappers if applicable. If * several methods are applicable, by that rule, the first one encountered * is called. i.e. when calling <code><pre> * on(...).call("method", 1, 1); * </pre></code> The first of the following methods will be called: * <code><pre> * public void method(int param1, Integer param2); * public void method(Integer param1, int param2); * public void method(Number param1, Number param2); * public void method(Number param1, Object param2); * public void method(int param1, Object param2); * </pre></code> * <p> * The best matching method is searched for with the following strategy: * <ol> * <li>public method with exact signature match in class hierarchy</li> * <li>non-public method with exact signature match on declaring class</li> * <li>public method with similar signature in class hierarchy</li> * <li>non-public method with similar signature on declaring class</li> * </ol> * * @param name The method name * @param args The method arguments * @return The wrapped method result or the same wrapped object if the * method returns <code>void</code>, to be used for further * reflection. * @throws ReflectException If any reflection exception occurred. */ public Reflect call(String name, Object... args) throws ReflectException { Class<?>[] types = types(args); // Try invoking the "canonical" method, i.e. the one with exact // matching argument types try { Method method = exactMethod(name, types); return on(method, object, args); } // If there is no exact match, try to find a method that has a "similar" // signature if primitive argument types are converted to their wrappers catch (NoSuchMethodException e) { try { Method method = similarMethod(name, types); return on(method, object, args); } catch (NoSuchMethodException e1) { throw new ReflectException(e1); } } } /** * Searches a method with the exact same signature as desired. * <p> * If a public method is found in the class hierarchy, this method is returned. * Otherwise a private method with the exact same signature is returned. * If no exact match could be found, we let the {@code NoSuchMethodException} pass through. */ private Method exactMethod(String name, Class<?>[] types) throws NoSuchMethodException { Class<?> t = type(); // first priority: find a public method with exact signature match in class hierarchy try { return t.getMethod(name, types); } // second priority: find a private method with exact signature match on declaring class catch (NoSuchMethodException e) { do { try { return t.getDeclaredMethod(name, types); } catch (NoSuchMethodException ignore) { } t = t.getSuperclass(); } while (t != null); throw new NoSuchMethodException(); } } /** * Searches a method with a similar signature as desired using * {@link #isSimilarSignature(java.lang.reflect.Method, String, Class[])}. * <p> * First public methods are searched in the class hierarchy, then private * methods on the declaring class. If a method could be found, it is * returned, otherwise a {@code NoSuchMethodException} is thrown. */ private Method similarMethod(String name, Class<?>[] types) throws NoSuchMethodException { Class<?> t = type(); // first priority: find a public method with a "similar" signature in class hierarchy // similar interpreted in when primitive argument types are converted to their wrappers for (Method method : t.getMethods()) { if (isSimilarSignature(method, name, types)) { return method; } } // second priority: find a non-public method with a "similar" signature on declaring class do { for (Method method : t.getDeclaredMethods()) { if (isSimilarSignature(method, name, types)) { return method; } } t = t.getSuperclass(); } while (t != null); throw new NoSuchMethodException("No similar method " + name + " with params " + Arrays.toString(types) + " could be found on type " + type() + "."); } /** * Determines if a method has a "similar" signature, especially if wrapping * primitive argument types would result in an exactly matching signature. */ private boolean isSimilarSignature(Method possiblyMatchingMethod, String desiredMethodName, Class<?>[] desiredParamTypes) { return possiblyMatchingMethod.getName().equals(desiredMethodName) && match(possiblyMatchingMethod.getParameterTypes(), desiredParamTypes); } /** * Call a constructor. * <p> * This is a convenience method for calling * <code>create(new Object[0])</code> * * @return The wrapped new object, to be used for further reflection. * @throws ReflectException If any reflection exception occurred. * @see #create(Object...) */ public Reflect create() throws ReflectException { return create(new Object[0]); } /** * Call a constructor. * <p> * This is roughly equivalent to {@link Constructor#newInstance(Object...)}. * If the wrapped object is a {@link Class}, then this will create a new * object of that class. If the wrapped object is any other {@link Object}, * then this will create a new object of the same type. * <p> * Just like {@link Constructor#newInstance(Object...)}, this will try to * wrap primitive types or unwrap primitive type wrappers if applicable. If * several constructors are applicable, by that rule, the first one * encountered is called. i.e. when calling <code><pre> * on(C.class).create(1, 1); * </pre></code> The first of the following constructors will be applied: * <code><pre> * public C(int param1, Integer param2); * public C(Integer param1, int param2); * public C(Number param1, Number param2); * public C(Number param1, Object param2); * public C(int param1, Object param2); * </pre></code> * * @param args The constructor arguments * @return The wrapped new object, to be used for further reflection. * @throws ReflectException If any reflection exception occurred. */ public Reflect create(Object... args) throws ReflectException { Class<?>[] types = types(args); // Try invoking the "canonical" constructor, i.e. the one with exact // matching argument types try { Constructor<?> constructor = type().getDeclaredConstructor(types); return on(constructor, args); } // If there is no exact match, try to find one that has a "similar" // signature if primitive argument types are converted to their wrappers catch (NoSuchMethodException e) { for (Constructor<?> constructor : type().getDeclaredConstructors()) { if (match(constructor.getParameterTypes(), types)) { return on(constructor, args); } } throw new ReflectException(e); } } /** * Create a proxy for the wrapped object allowing to typesafely invoke * methods on it using a custom interface * * @param proxyType The interface type that is implemented by the proxy * @return A proxy for the wrapped object */ @SuppressWarnings("unchecked") public <P> P as(final Class<P> proxyType) { final boolean isMap = (object instanceof Map); final InvocationHandler handler = new InvocationHandler() { @SuppressWarnings("null") @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String name = method.getName(); // Actual method name matches always come first try { return on(type, object).call(name, args).get(); } // [#14] Emulate POJO behaviour on wrapped map objects catch (ReflectException e) { if (isMap) { Map<String, Object> map = (Map<String, Object>) object; int length = (args == null ? 0 : args.length); if (length == 0 && name.startsWith("get")) { return map.get(property(name.substring(3))); } else if (length == 0 && name.startsWith("is")) { return map.get(property(name.substring(2))); } else if (length == 1 && name.startsWith("set")) { map.put(property(name.substring(3)), args[0]); return null; } } /* [java-8] */ // if (method.isDefault()) { // return CACHED_LOOKUP_CONSTRUCTOR // .newInstance(proxyType) // .unreflectSpecial(method, proxyType) // .bindTo(proxy) // .invokeWithArguments(args); // } /* [/java-8] */ throw e; } } }; return (P) Proxy.newProxyInstance(proxyType.getClassLoader(), new Class[]{proxyType}, handler); } /** * Get the POJO property name of an getter/setter */ private static String property(String string) { int length = string.length(); if (length == 0) { return ""; } else if (length == 1) { return string.toLowerCase(); } else { return string.substring(0, 1).toLowerCase() + string.substring(1); } } // --------------------------------------------------------------------- // Object API // --------------------------------------------------------------------- /** * Check whether two arrays of types match, converting primitive types to * their corresponding wrappers. */ private boolean match(Class<?>[] declaredTypes, Class<?>[] actualTypes) { if (declaredTypes.length == actualTypes.length) { for (int i = 0; i < actualTypes.length; i++) { if (actualTypes[i] == NULL.class) continue; if (wrapper(declaredTypes[i]).isAssignableFrom(wrapper(actualTypes[i]))) continue; return false; } return true; } else { return false; } } /** * {@inheritDoc} */ @Override public int hashCode() { return object.hashCode(); } /** * {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj instanceof Reflect) { return object.equals(((Reflect) obj).get()); } return false; } /** * {@inheritDoc} */ @Override public String toString() { return object.toString(); } // --------------------------------------------------------------------- // Utility methods // --------------------------------------------------------------------- /** * Wrap an object created from a constructor */ private static Reflect on(Constructor<?> constructor, Object... args) throws ReflectException { try { return on(constructor.getDeclaringClass(), accessible(constructor).newInstance(args)); } catch (Exception e) { throw new ReflectException(e); } } /** * Wrap an object returned from a method */ private static Reflect on(Method method, Object object, Object... args) throws ReflectException { try { accessible(method); if (method.getReturnType() == void.class) { method.invoke(object, args); return on(object); } else { return on(method.invoke(object, args)); } } catch (Exception e) { throw new ReflectException(e); } } /** * Unwrap an object */ private static Object unwrap(Object object) { if (object instanceof Reflect) { return ((Reflect) object).get(); } return object; } /** * Get an array of types for an array of objects * * @see Object#getClass() */ private static Class<?>[] types(Object... values) { if (values == null) { return new Class[0]; } Class<?>[] result = new Class[values.length]; for (int i = 0; i < values.length; i++) { Object value = values[i]; result[i] = value == null ? NULL.class : value.getClass(); } return result; } /** * Load a class * * @see Class#forName(String) */ private static Class<?> forName(String name) throws ReflectException { try { return Class.forName(name); } catch (Exception e) { throw new ReflectException(e); } } private static Class<?> forName(String name, ClassLoader classLoader) throws ReflectException { try { return Class.forName(name, true, classLoader); } catch (Exception e) { throw new ReflectException(e); } } /** * Get the type of the wrapped object. * * @see Object#getClass() */ public Class<?> type() { return type; } /** * Get a wrapper type for a primitive type, or the argument type itself, if * it is not a primitive type. */ public static Class<?> wrapper(Class<?> type) { if (type == null) { return null; } else if (type.isPrimitive()) { if (boolean.class == type) { return Boolean.class; } else if (int.class == type) { return Integer.class; } else if (long.class == type) { return Long.class; } else if (short.class == type) { return Short.class; } else if (byte.class == type) { return Byte.class; } else if (double.class == type) { return Double.class; } else if (float.class == type) { return Float.class; } else if (char.class == type) { return Character.class; } else if (void.class == type) { return Void.class; } } return type; } private static class NULL { } }
package com.smartdevicelink.proxy.rpc; import com.smartdevicelink.protocol.enums.FunctionID; import com.smartdevicelink.proxy.RPCRequest; import com.smartdevicelink.proxy.rpc.enums.AppHMIType; import com.smartdevicelink.proxy.rpc.enums.Language; import java.util.Hashtable; import java.util.List; /** * Registers the application's interface with SDL&reg;, declaring properties of * the registration, including the messaging interface version, the app name, * etc. The mobile application must establish its interface registration with * SDL&reg; before any other interaction with SDL&reg; can take place. The * registration lasts until it is terminated either by the application calling * the <i> {@linkplain UnregisterAppInterface}</i> method, or by SDL&reg; * sending an <i> {@linkplain OnAppInterfaceUnregistered}</i> notification, or * by loss of the underlying transport connection, or closing of the underlying * message transmission protocol RPC session * <p></p> * Until the application receives its first <i>{@linkplain OnHMIStatus}</i> * Notification, its HMI Status is assumed to be: <i> * {@linkplain com.smartdevicelink.proxy.rpc.enums.HMILevel}</i>=NONE, <i> * {@linkplain com.smartdevicelink.proxy.rpc.enums.AudioStreamingState} * </i>=NOT_AUDIBLE, <i> * {@linkplain com.smartdevicelink.proxy.rpc.enums.SystemContext}</i>=MAIN * <p></p> * All SDL&reg; resources which the application creates or uses (e.g. Choice * Sets, Command Menu, etc.) are associated with the application's interface * registration. Therefore, when the interface registration ends, the SDL&reg; * resources associated with the application are disposed of. As a result, even * though the application itself may continue to run on its host platform (e.g. * mobile device) after the interface registration terminates, the application * will not be able to use the SDL&reg; HMI without first establishing a new * interface registration and re-creating its required SDL&reg; resources. That * is, SDL&reg; resources created by (or on behalf of) an application do not * persist beyond the life-span of the interface registration * <p></p> * Resources and settings whose lifespan is tied to the duration of an * application's interface registration: * <ul> * <li>Choice Sets</li> * <li>Command Menus (built by successive calls to <i>{@linkplain AddCommand} * </i>)</li> * <li>Media clock timer display value</li> * <li>Media clock timer display value</li> * <li>Media clock timer display value</li> * </ul> * <p></p> * The autoActivateID is used to grant an application the HMILevel and * AudioStreamingState it had when it last disconnected * <p></p> * <b>Notes: </b>The autoActivateID parameter, and associated behavior, is * currently ignored by SDL&reg; * <p></p> * When first calling this method (i.e. first time within life cycle of mobile * app), an autoActivateID should not be included. After successfully * registering an interface, an autoActivateID is returned to the mobile * application for it to use in subsequent connections. If the connection * between SDL&reg; and the mobile application is lost, such as the vehicle is * turned off while the application is running, the autoActivateID can then be * passed in another call to RegisterAppInterface to re-acquire <i> * {@linkplain com.smartdevicelink.proxy.rpc.enums.HMILevel}</i>=FULL * <p></p> * If the application intends to stream audio it is important to indicate so via * the isMediaApp parameter. When set to true, audio will reliably stream * without any configuration required by the user. When not set, audio may * stream, depending on what the user might have manually configured as a media * source on SDL&reg; * <p></p> * There is no time limit for how long the autoActivateID is "valid" (i.e. would * confer focus and opt-in) * *<p> <b>HMILevel is not defined before registering</b></p> * * * <p><b>Parameter List</b></p> * <table border="1" rules="all"> * <tr> * <th>Name</th> * <th>Type</th> * <th>Description</th> * <th>Reg.</th> * <th>Notes</th> * <th>Version</th> * </tr> * <tr> * <td>MsgVersion</td> * <td>MsgVersion</td> * <td>Declares what version of the SDL interface the application expects to use with SDL</td> * <td>Y</td> * <td>To be compatible, app msg major version number must be less than or equal to SDL major version number. <p>If msg versions are incompatible, app has 20 seconds to attempt successful RegisterAppInterface (w.r.t. msg version) on underlying protocol session, else will be terminated. Major version number is a compatibility declaration. Minor version number indicates minor functional variations (e.g. features, capabilities, bug fixes) when sent from SDL to app (in RegisterAppInterface response).</p>However, the minor version number sent from the app to SDL (in RegisterAppInterface request) is ignored by SDL.</td> * <td>SmartDeviceLink 1.0 </td> * </tr> * <tr> * <td>appName</td> * <td>String</td> * <td>The mobile application's name. This name is displayed in the SDL Mobile Applications menu. It also serves as the unique identifier of the application for SDL .</td> * <td>Y</td> * <td><p> Must be 1-100 characters in length. Must consist of following characters: </p><p>May not be the same (by case insensitive comparison) as the name or any synonym of any currently registered application.</p> </td> * <td>SmartDeviceLink 1.0 </td> * </tr> * <tr> * <td>ttsName</td> * <td>TTSChunk</td> * <td>TTS string for VR recognition of the mobile application name. Meant to overcome any failing on speech engine in properly pronouncing / understanding app name.</td> * <td>N</td> * <td><p>Size must be 1-100 Needs to be unique over all applications. May not be empty.<p>May not start with a new line character.</p></td> * <td>SmartDeviceLink 2.0</td> * </tr> * <tr> * <td>ngnMediaScreenAppName</td> * <td>String</td> * <td>Provides an abbreviated version of the app name (if necessary) that will be displayed on the NGN media screen.</td> * <td>N</td> * <td>- Must be 1-5 characters. If not provided, value will be derived from appName truncated to 5 characters.</td> * <td>SmartDeviceLink 1.0 </td> * </tr> * <tr> * <td>vrSynonyms</td> * <td>String</td> * <td>An array of 1-100 elements, each element containing a voice-recognition synonym by which this app can be called when being addressed in the mobile applications menu.</td> * <td>N</td> * <td>Each vr synonym is limited to 40 characters, and there can be 1-100 synonyms in array. May not be the same (by case insensitive comparison) as the name or any synonym of any currently-registered application.</td> * <td>SmartDeviceLink 1.0 </td> * </tr> * <tr> * <td>isMediaApplication</td> * <td>Boolean</td> * <td>Indicates that the application will be streaming audio to SDL (via A2DP) that is audible outside of the BT media source.</td> * <td>Y</td> * <td></td> * <td>SmartDeviceLink 1.0 </td> * </tr> * <tr> * <td>languageDesired</td> * <td>Language</td> * <td>An enumeration indicating what language the application intends to use for user interaction (Display, TTS and VR).</td> * <td>Y</td> * <td>If the language indicated does not match the active language on SDL, the interface registration will be rejected.If the user changes the SDL language while this interface registration is active, the interface registration will be terminated. </td> * <td>SmartDeviceLink 1.0</td> * </tr> * <tr> * <td>hmiDisplayLanguageDesired</td> * <td>Language</td> * <td>An enumeration indicating what language the application intends to use for user interaction ( Display).</td> * <td>Y</td> * <td></td> * <td>SmartDeviceLink 2.0 </td> * </tr> * <tr> * <td>appHMIType</td> * <td>AppHMIType</td> * <td>List of all applicable app types stating which classifications to be given to the app.e.g. for platforms , like GEN2, this will determine which "corner(s)" the app can populate</td> * <td>N</td> * <td>Array Minsize: 1; Array Maxsize: 100</td> * <td>SmartDeviceLink 2.0 </td> * </tr> * <tr> * <td>hashID</td> * <td>String</td> * <td>ID used to uniquely identify current state of all app data that can persist through connection cycles (e.g. ignition cycles).This registered data (commands, submenus, choice sets, etc.) can be reestablished without needing to explicitly reregister each piece. If omitted, then the previous state of an app's commands, etc. will not be restored.When sending hashID, all RegisterAppInterface parameters should still be provided (e.g. ttsName, etc.). </td> * <td>N</td> * <td>maxlength:100</td> * <td>SmartDeviceLink 2.3.1 </td> * </tr> * <tr> * <td>deviceInfo</td> * <td>DeviceInfo</td> * <td>Various information abount connecting device.</td> * <td>N</td> * <td></td> * <td>SmartDeviceLink 2.3.1 </td> * </tr> * <tr> * <td>appID</td> * <td>String</td> * <td>ID used to validate app with policy table entries</td> * <td>Y</td> * <td>Maxlength: 100</td> * <td>SmartDeviceLink 2.0 </td> * </tr> * * <tr> * <td>hmiCapabilities</td> * <td>HMICapabilities</td> * <td>Specifies the HMI capabilities.</td> * <td>N</td> * <td></td> * <td>SmartDeviceLink 2.3.2.2 </td> * </tr> * * <tr> * <td>sdlVersion</td> * <td>String</td> * <td>The SmartDeviceLink version.</td> * <td>N</td> * <td>Maxlength: 100</td> * <td>SmartDeviceLink 2.3.2.2</td> * </tr> * * <tr> * <td>systemSoftwareVersion</td> * <td>String</td> * <td>The software version of the system that implements the SmartDeviceLink core.</td> * <td>N</td> * <td>Maxlength: 100</td> * <td>SmartDeviceLink 2.3.2.2</td> * </tr> * </table> * <p></p> * @since SmartDeviceLink 1.0 * @see UnregisterAppInterface * @see OnAppInterfaceUnregistered */ public class RegisterAppInterface extends RPCRequest { public static final String KEY_TTS_NAME = "ttsName"; public static final String KEY_HMI_DISPLAY_LANGUAGE_DESIRED = "hmiDisplayLanguageDesired"; public static final String KEY_APP_HMI_TYPE = "appHMIType"; public static final String KEY_APP_ID = "appID"; public static final String KEY_LANGUAGE_DESIRED = "languageDesired"; public static final String KEY_DEVICE_INFO = "deviceInfo"; public static final String KEY_APP_NAME = "appName"; public static final String KEY_NGN_MEDIA_SCREEN_APP_NAME = "ngnMediaScreenAppName"; public static final String KEY_IS_MEDIA_APPLICATION = "isMediaApplication"; public static final String KEY_VR_SYNONYMS = "vrSynonyms"; public static final String KEY_SDL_MSG_VERSION = "syncMsgVersion"; public static final String KEY_HASH_ID = "hashID"; /** * Constructs a new RegisterAppInterface object */ public RegisterAppInterface() { super(FunctionID.REGISTER_APP_INTERFACE.toString()); } /** * Constructs a new RegisterAppInterface object indicated by the Hashtable * parameter * <p></p> * * @param hash * The Hashtable to use */ public RegisterAppInterface(Hashtable<String, Object> hash) { super(hash); } /** * Gets the version of the SDL&reg; SmartDeviceLink interface * * @return SdlMsgVersion -a SdlMsgVersion object representing version of * the SDL&reg; SmartDeviceLink interface */ @SuppressWarnings("unchecked") public SdlMsgVersion getSdlMsgVersion() { return (SdlMsgVersion) getObject(SdlMsgVersion.class, KEY_SDL_MSG_VERSION); } /** * Sets the version of the SDL&reg; SmartDeviceLink interface * * @param sdlMsgVersion * a SdlMsgVersion object representing version of the SDL&reg; * SmartDeviceLink interface * <p></p> * <b>Notes: </b>To be compatible, app msg major version number * must be less than or equal to SDL&reg; major version number. * If msg versions are incompatible, app has 20 seconds to * attempt successful RegisterAppInterface (w.r.t. msg version) * on underlying protocol session, else will be terminated. Major * version number is a compatibility declaration. Minor version * number indicates minor functional variations (e.g. features, * capabilities, bug fixes) when sent from SDL&reg; to app (in * RegisterAppInterface response). However, the minor version * number sent from the app to SDL&reg; (in RegisterAppInterface * request) is ignored by SDL&reg; */ public void setSdlMsgVersion(SdlMsgVersion sdlMsgVersion) { setParameters(KEY_SDL_MSG_VERSION, sdlMsgVersion); } @SuppressWarnings("unchecked") public DeviceInfo getDeviceInfo() { return (DeviceInfo) getObject(DeviceInfo.class, KEY_DEVICE_INFO); } public void setDeviceInfo(DeviceInfo deviceInfo) { setParameters(KEY_DEVICE_INFO, deviceInfo); } /** * Gets Mobile Application's Name * * @return String -a String representing the Mobile Application's Name */ public String getAppName() { return getString(KEY_APP_NAME); } /** * Sets Mobile Application's Name, This name is displayed in the SDL&reg; * Mobile Applications menu. It also serves as the unique identifier of the * application for SmartDeviceLink * * @param appName * a String value representing the Mobile Application's Name * <p></p> * <b>Notes: </b> * <ul> * <li>Must be 1-100 characters in length</li> * <li>May not be the same (by case insensitive comparison) as * the name or any synonym of any currently-registered * application</li> * </ul> */ public void setAppName(String appName) { setParameters(KEY_APP_NAME, appName); } /** * Gets TTS string for VR recognition of the mobile application name * * @return List<TTSChunk> -List value representing the TTS string * @since SmartDeviceLink 2.0 */ @SuppressWarnings("unchecked") public List<TTSChunk> getTtsName() { return (List<TTSChunk>) getObject(TTSChunk.class, KEY_TTS_NAME); } /** * * @param ttsName * a List<TTSChunk> value represeting the TTS Name * <p></p> * <b>Notes: </b> * <ul> * <li>Size must be 1-100</li> * <li>Needs to be unique over all applications</li> * <li>May not be empty</li> * <li>May not start with a new line character</li> * <li>May not interfere with any name or synonym of previously * registered applications and the following list of words</li> * <li>Needs to be unique over all applications. Applications * with the same name will be rejected</li> * </ul> * @since SmartDeviceLink 2.0 */ public void setTtsName(List<TTSChunk> ttsName) { setParameters(KEY_TTS_NAME, ttsName); } /** * Gets a String representing an abbreviated version of the mobile * applincation's name (if necessary) that will be displayed on the NGN * media screen * * @return String -a String value representing an abbreviated version of the * mobile applincation's name */ public String getNgnMediaScreenAppName() { return getString(KEY_NGN_MEDIA_SCREEN_APP_NAME); } /** * Sets a String representing an abbreviated version of the mobile * applincation's name (if necessary) that will be displayed on the NGN * media screen * * @param ngnMediaScreenAppName * a String value representing an abbreviated version of the * mobile applincation's name * <p></p> * <b>Notes: </b> * <ul> * <li>Must be 1-5 characters</li> * <li>If not provided, value will be derived from appName * truncated to 5 characters</li> * </ul> */ public void setNgnMediaScreenAppName(String ngnMediaScreenAppName) { setParameters(KEY_NGN_MEDIA_SCREEN_APP_NAME, ngnMediaScreenAppName); } /** * Gets the List<String> representing the an array of 1-100 elements, each * element containing a voice-recognition synonym * * @return List<String> -a List value representing the an array of * 1-100 elements, each element containing a voice-recognition * synonym */ @SuppressWarnings("unchecked") public List<String> getVrSynonyms() { return (List<String>) getObject(String.class, KEY_VR_SYNONYMS); } /** * Sets a vrSynonyms representing the an array of 1-100 elements, each * element containing a voice-recognition synonym * * @param vrSynonyms * a List<String> value representing the an array of 1-100 * elements * <p></p> * <b>Notes: </b> * <ul> * <li>Each vr synonym is limited to 40 characters, and there can * be 1-100 synonyms in array</li> * <li>May not be the same (by case insensitive comparison) as * the name or any synonym of any currently-registered * application</li> * </ul> */ public void setVrSynonyms(List<String> vrSynonyms) { setParameters(KEY_VR_SYNONYMS, vrSynonyms); } /** * Gets a Boolean representing MediaApplication * * @return Boolean -a Boolean value representing a mobile application that is * a media application or not */ public Boolean getIsMediaApplication() { return getBoolean(KEY_IS_MEDIA_APPLICATION); } /** * Sets a Boolean to indicate a mobile application that is a media * application or not * * @param isMediaApplication * a Boolean value */ public void setIsMediaApplication(Boolean isMediaApplication) { setParameters(KEY_IS_MEDIA_APPLICATION, isMediaApplication); } /** * Gets a Language enumeration indicating what language the application * intends to use for user interaction (Display, TTS and VR) * * @return Enumeration -a language enumeration */ public Language getLanguageDesired() { return (Language) getObject(Language.class, KEY_LANGUAGE_DESIRED); } /** * Sets an enumeration indicating what language the application intends to * use for user interaction (Display, TTS and VR) * * @param languageDesired * a Language Enumeration * * */ public void setLanguageDesired(Language languageDesired) { setParameters(KEY_LANGUAGE_DESIRED, languageDesired); } /** * Gets an enumeration indicating what language the application intends to * use for user interaction ( Display) * * @return Language - a Language value representing an enumeration * indicating what language the application intends to use for user * interaction ( Display) * @since SmartDeviceLink 2.0 */ public Language getHmiDisplayLanguageDesired() { return (Language) getObject(Language.class, KEY_HMI_DISPLAY_LANGUAGE_DESIRED); } /** * Sets an enumeration indicating what language the application intends to * use for user interaction ( Display) * * @param hmiDisplayLanguageDesired * @since SmartDeviceLink 2.0 */ public void setHmiDisplayLanguageDesired(Language hmiDisplayLanguageDesired) { setParameters(KEY_HMI_DISPLAY_LANGUAGE_DESIRED, hmiDisplayLanguageDesired); } /** * Gets a list of all applicable app types stating which classifications to * be given to the app.e.g. for platforms , like GEN2, this will determine * which "corner(s)" the app can populate * * @return List<AppHMIType> - a List value representing a list of all * applicable app types stating which classifications to be given to * the app * @since SmartDeviceLinke 2.0 */ @SuppressWarnings("unchecked") public List<AppHMIType> getAppHMIType() { return (List<AppHMIType>) getObject(AppHMIType.class, KEY_APP_HMI_TYPE); } /** * Sets a a list of all applicable app types stating which classifications * to be given to the app. e.g. for platforms , like GEN2, this will * determine which "corner(s)" the app can populate * * @param appHMIType * a List<AppHMIType> * <p></p> * <b>Notes: </b> * <ul> * <li>Array Minsize: = 1</li> * <li>Array Maxsize = 100</li> * </ul> * @since SmartDeviceLink 2.0 */ public void setAppHMIType(List<AppHMIType> appHMIType) { setParameters(KEY_APP_HMI_TYPE, appHMIType); } public String getHashID() { return getString(KEY_HASH_ID); } public void setHashID(String hashID) { setParameters(KEY_HASH_ID, hashID); } /** * Gets the unique ID, which an app will be given when approved * * @return String - a String value representing the unique ID, which an app * will be given when approved * @since SmartDeviceLink 2.0 */ public String getAppID() { return getString(KEY_APP_ID); } /** * Sets a unique ID, which an app will be given when approved * * @param appID * a String value representing a unique ID, which an app will be * given when approved * <p></p> * <b>Notes: </b>Maxlength = 100 * @since SmartDeviceLink 2.0 */ public void setAppID(String appID) { setParameters(KEY_APP_ID, appID); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common.util; import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefArray; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.IntroSorter; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import java.nio.file.Path; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.IdentityHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.RandomAccess; import java.util.Set; /** Collections-related utility methods. */ public class CollectionUtils { /** * Checks if the given array contains any elements. * * @param array The array to check * * @return false if the array contains an element, true if not or the array is null. */ public static boolean isEmpty(Object[] array) { return array == null || array.length == 0; } /** * Return a rotated view of the given list with the given distance. */ public static <T> List<T> rotate(final List<T> list, int distance) { if (list.isEmpty()) { return list; } int d = distance % list.size(); if (d < 0) { d += list.size(); } if (d == 0) { return list; } return new RotatedList<>(list, d); } public static void sortAndDedup(final ObjectArrayList<byte[]> array) { int len = array.size(); if (len > 1) { sort(array); int uniqueCount = 1; for (int i = 1; i < len; ++i) { if (Arrays.equals(array.get(i), array.get(i - 1)) == false) { array.set(uniqueCount++, array.get(i)); } } array.elementsCount = uniqueCount; } } public static void sort(final ObjectArrayList<byte[]> array) { new IntroSorter() { byte[] pivot; @Override protected void swap(int i, int j) { final byte[] tmp = array.get(i); array.set(i, array.get(j)); array.set(j, tmp); } @Override protected int compare(int i, int j) { return compare(array.get(i), array.get(j)); } @Override protected void setPivot(int i) { pivot = array.get(i); } @Override protected int comparePivot(int j) { return compare(pivot, array.get(j)); } private int compare(byte[] left, byte[] right) { for (int i = 0, j = 0; i < left.length && j < right.length; i++, j++) { int a = left[i] & 0xFF; int b = right[j] & 0xFF; if (a != b) { return a - b; } } return left.length - right.length; } }.sort(0, array.size()); } public static int[] toArray(Collection<Integer> ints) { Objects.requireNonNull(ints); return ints.stream().mapToInt(s -> s).toArray(); } /** * Deeply inspects a Map, Iterable, or Object array looking for references back to itself. * @throws IllegalArgumentException if a self-reference is found * @param value The object to evaluate looking for self references * @param messageHint A string to be included in the exception message if the call fails, to provide * more context to the handler of the exception */ public static void ensureNoSelfReferences(Object value, String messageHint) { Iterable<?> it = convert(value); if (it != null) { ensureNoSelfReferences(it, value, Collections.newSetFromMap(new IdentityHashMap<>()), messageHint); } } @SuppressWarnings("unchecked") private static Iterable<?> convert(Object value) { if (value == null) { return null; } if (value instanceof Map) { Map<?,?> map = (Map<?,?>) value; return () -> Iterators.concat(map.keySet().iterator(), map.values().iterator()); } else if ((value instanceof Iterable) && (value instanceof Path == false)) { return (Iterable<?>) value; } else if (value instanceof Object[]) { return Arrays.asList((Object[]) value); } else { return null; } } private static void ensureNoSelfReferences(final Iterable<?> value, Object originalReference, final Set<Object> ancestors, String messageHint) { if (value != null) { if (ancestors.add(originalReference) == false) { String suffix = Strings.isNullOrEmpty(messageHint) ? "" : String.format(Locale.ROOT, " (%s)", messageHint); throw new IllegalArgumentException("Iterable object is self-referencing itself" + suffix); } for (Object o : value) { ensureNoSelfReferences(convert(o), o, ancestors, messageHint); } ancestors.remove(originalReference); } } private static class RotatedList<T> extends AbstractList<T> implements RandomAccess { private final List<T> in; private final int distance; RotatedList(List<T> list, int distance) { if (distance < 0 || distance >= list.size()) { throw new IllegalArgumentException(); } if ((list instanceof RandomAccess) == false) { throw new IllegalArgumentException(); } this.in = list; this.distance = distance; } @Override public T get(int index) { int idx = distance + index; if (idx < 0 || idx >= in.size()) { idx -= in.size(); } return in.get(idx); } @Override public int size() { return in.size(); } } public static void sort(final BytesRefArray bytes, final int[] indices) { sort(new BytesRefBuilder(), new BytesRefBuilder(), bytes, indices); } private static void sort(final BytesRefBuilder scratch, final BytesRefBuilder scratch1, final BytesRefArray bytes, final int[] indices) { final int numValues = bytes.size(); assert indices.length >= numValues; if (numValues > 1) { new InPlaceMergeSorter() { final Comparator<BytesRef> comparator = Comparator.naturalOrder(); @Override protected int compare(int i, int j) { return comparator.compare(bytes.get(scratch, indices[i]), bytes.get(scratch1, indices[j])); } @Override protected void swap(int i, int j) { int value_i = indices[i]; indices[i] = indices[j]; indices[j] = value_i; } }.sort(0, numValues); } } public static int sortAndDedup(final BytesRefArray bytes, final int[] indices) { final BytesRefBuilder scratch = new BytesRefBuilder(); final BytesRefBuilder scratch1 = new BytesRefBuilder(); final int numValues = bytes.size(); assert indices.length >= numValues; if (numValues <= 1) { return numValues; } sort(scratch, scratch1, bytes, indices); int uniqueCount = 1; BytesRefBuilder previous = scratch; BytesRefBuilder current = scratch1; bytes.get(previous, indices[0]); for (int i = 1; i < numValues; ++i) { bytes.get(current, indices[i]); if (previous.get().equals(current.get()) == false) { indices[uniqueCount++] = indices[i]; } BytesRefBuilder tmp = previous; previous = current; current = tmp; } return uniqueCount; } @SuppressWarnings("unchecked") public static <E> ArrayList<E> iterableAsArrayList(Iterable<? extends E> elements) { if (elements == null) { throw new NullPointerException("elements"); } if (elements instanceof Collection) { return new ArrayList<>((Collection) elements); } else { ArrayList<E> list = new ArrayList<>(); for (E element : elements) { list.add(element); } return list; } } @SafeVarargs @SuppressWarnings("varargs") public static <E> ArrayList<E> arrayAsArrayList(E... elements) { if (elements == null) { throw new NullPointerException("elements"); } return new ArrayList<>(Arrays.asList(elements)); } /** * Creates a copy of the given collection with the given element appended. * * @param collection collection to copy * @param element element to append */ @SuppressWarnings("unchecked") public static <E> List<E> appendToCopy(Collection<E> collection, E element) { final int size = collection.size() + 1; final E[] array = collection.toArray((E[]) new Object[size]); array[size - 1] = element; return Collections.unmodifiableList(Arrays.asList(array)); } public static <E> ArrayList<E> newSingletonArrayList(E element) { return new ArrayList<>(Collections.singletonList(element)); } public static <E> List<List<E>> eagerPartition(List<E> list, int size) { if (list == null) { throw new NullPointerException("list"); } if (size <= 0) { throw new IllegalArgumentException("size <= 0"); } List<List<E>> result = new ArrayList<>((int) Math.ceil(list.size() / size)); List<E> accumulator = new ArrayList<>(size); int count = 0; for (E element : list) { if (count == size) { result.add(accumulator); accumulator = new ArrayList<>(size); count = 0; } accumulator.add(element); count++; } if (count > 0) { result.add(accumulator); } return result; } public static <E> List<E> concatLists(List<E> listA, List<E> listB) { List<E> concatList = new ArrayList<>(listA.size() + listB.size()); concatList.addAll(listA); concatList.addAll(listB); return concatList; } }
package org.drip.analytics.date; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /*! * Copyright (C) 2012 Lakshmi Krishnamurthy * Copyright (C) 2011 Lakshmi Krishnamurthy * * This file is part of CreditAnalytics, a free-software/open-source library for fixed income analysts and * developers - http://www.credit-trader.org * * CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special * focus towards the needs of the bonds and credit products community. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * Class provides a comprehensive representation of Julian date and date manipulation functionality * * @author Lakshmi Krishnamurthy */ public class JulianDate implements java.lang.Comparable<JulianDate> { private static final boolean s_bLog = false; private static double HALFSECOND = 0.5; private static int JGREG = 15 + 31 * (10 + 12 * 1582); /** * LEFT_INCLUDE includes the start date in the Feb29 check */ public static final int LEFT_INCLUDE = 1; /** * RIGHT_INCLUDE includes the end date in the Feb29 check */ public static final int RIGHT_INCLUDE = 2; /** * Days of the week - Monday */ public static final int MONDAY = 0; /** * Days of the week - Tuesday */ public static final int TUESDAY = 1; /** * Days of the week - Wednesday */ public static final int WEDNESDAY = 2; /** * Days of the week - Thursday */ public static final int THURSDAY = 3; /** * Days of the week - Friday */ public static final int FRIDAY = 4; /** * Days of the week - Saturday */ public static final int SATURDAY = 5; /** * Days of the week - Sunday */ public static final int SUNDAY = 6; /** * Integer Month - January */ public static final int JANUARY = 1; /** * Integer Month - February */ public static final int FEBRUARY = 2; /** * Integer Month - March */ public static final int MARCH = 3; /** * Integer Month - April */ public static final int APRIL = 4; /** * Integer Month - May */ public static final int MAY = 5; /** * Integer Month - June */ public static final int JUNE = 6; /** * Integer Month - July */ public static final int JULY = 7; /** * Integer Month - August */ public static final int AUGUST = 8; /** * Integer Month - September */ public static final int SEPTEMBER = 9; /** * Integer Month - October */ public static final int OCTOBER = 10; /** * Integer Month - November */ public static final int NOVEMBER = 11; /** * Integer Month - December */ public static final int DECEMBER = 12; private double _dblJulian = java.lang.Double.NaN; /** * Converts YMD to a Julian double. * * @param iYear Year * @param iMonth Month * @param iDay Day * * @return double representing the Julian date */ public static double toJulian ( final int iYear, final int iMonth, final int iDay) { int iJulianYear = iYear; int iJulianMonth = iMonth; if (iYear < 0) ++iJulianYear; if (iMonth > 2) ++iJulianMonth; else { --iJulianYear; iJulianMonth += 13; } double dblJulian = (java.lang.Math.floor (365.25 * iJulianYear) + java.lang.Math.floor (30.6001 * iJulianMonth) + iDay + 1720995.0); if (iDay + 31 * (iMonth + 12 * iYear) >= JGREG) { int iJA = (int)(0.01 * iJulianYear); dblJulian += 2 - iJA + (0.25 * iJA); } return java.lang.Math.floor (dblJulian); } /** * Creates a MM/DD/YYYY string from the input Julian double * * @param dblJulianIn double representing Julian date * * @return MM/DD/YYYY date string */ public static java.lang.String fromJulian ( final double dblJulianIn) { if (java.lang.Double.isNaN (dblJulianIn)) return null; int iJA = (int) (dblJulianIn + HALFSECOND / 86400.0); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); int iDay = iJB - iJD - (int) (30.6001 * iJE); int iMonth = iJE - 1; int iYear = iJC - 4715; if (iMonth > 12) iMonth -= 12; if (iMonth > 2) --iYear; if (iYear <= 0) --iYear; return org.drip.analytics.support.GenericUtil.PrePad (iMonth) + "/" + org.drip.analytics.support.GenericUtil.PrePad (iDay) + "/" + iYear; } /** * Returns the year corresponding to the Julian double * * @param dblJulianIn double representing the Julian date * * @return integer representing the month * * @throws java.lang.Exception thrown if the input date in invalid */ public static int Year ( final double dblJulianIn) throws java.lang.Exception { if (java.lang.Double.isNaN (dblJulianIn)) throw new java.lang.Exception ("JulianDate.Year got NaN input!"); int iJA = (int) (dblJulianIn + HALFSECOND / 86400.0); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); int iMonth = iJE - 1; int iYear = iJC - 4715; if (iMonth > 12) iMonth -= 12; if (iMonth > 2) --iYear; if (iYear <= 0) --iYear; return iYear; } /** * Return the month given the date represented by the Julian double. * * @param dblJulianIn double representing the Julian date * * @return integer representing the month * * @throws java.lang.Exception thrown if input date is invalid */ public static int Month ( final double dblJulianIn) throws java.lang.Exception { if (java.lang.Double.isNaN (dblJulianIn)) throw new java.lang.Exception ("JulianDate.Month got NaN input!"); int iJA = (int) (dblJulianIn + HALFSECOND / 86400.0); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iMonth = (int) ((iJB - iJD) / 30.6001) - 1; if (iMonth > 12) iMonth -= 12; return iMonth; } /** * Returns the day corresponding to the Julain double * * @param dblJulianIn double representing the Julian date * * @return integer representing the year * * @throws java.lang.Exception thrown if input date is invalid */ public static int Day ( final double dblJulianIn) throws java.lang.Exception { if (java.lang.Double.isNaN (dblJulianIn)) throw new java.lang.Exception ("JulianDate.Month got NaN input!"); int iJA = (int) (dblJulianIn + HALFSECOND / 86400.0); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); return iJB - iJD - (int) (30.6001 * iJE); } /** * Numbers of days elapsed in the year represented by the given Julian date * * @param dblDate Double representing the Julian date * * @return Double representing the number of days in the current year * * @throws java.lang.Exception Thrown if the input date is invalid */ public static final int DaysElapsed ( final double dblDate) throws java.lang.Exception { if (java.lang.Double.isNaN (dblDate)) throw new java.lang.Exception ("JulianDate.DaysElapsed got NaN input!"); return (int) (dblDate - toJulian (Year (dblDate), JANUARY, 1)); } /** * Returns the number of days remaining in the year represented by the given Julian year * * @param dblDate Double representing the Julian date * * @return Double representing the number of days remaining * * @throws java.lang.Exception Thrown if input date is invalid */ public static final int DaysRemaining ( final double dblDate) throws java.lang.Exception { if (java.lang.Double.isNaN (dblDate)) throw new java.lang.Exception ("JulianDate.DaysRemaining got NaN input!"); return (int) (toJulian (Year (dblDate), DECEMBER, 31) - dblDate); } /** * Indicates if the year in the given Julian date is a leap year * * @param dblDate Double representing the input Julian date * * @return True indicates leap year * * @throws java.lang.Exception Thrown if input date is invalid */ public static final boolean IsLeapYear ( final double dblDate) throws java.lang.Exception { if (java.lang.Double.isNaN (dblDate)) throw new java.lang.Exception ("JulianDate.IsLeapYear got NaN input!"); return 0 == (Year (dblDate) % 4); } /** * Indicates whether there is at least one leap day between 2 given Julian dates * * @param dblStart Double representing the starting Julian date * @param dblEnd Double representing the ending Julian date * @param iIncludeSide INCLUDE_LEFT or INCLUDE_RIGHT indicating whether the starting date, the ending * date, or both dates are to be included * * @return True indicates there is at least one Feb29 between the dates * * @throws java.lang.Exception If inputs are invalid */ public static final boolean ContainsFeb29 ( final double dblStart, final double dblEnd, final int iIncludeSide) throws java.lang.Exception { if (java.lang.Double.isNaN (dblStart) || java.lang.Double.isNaN (dblEnd)) throw new java.lang.Exception ("JulianDate.ContainsFeb29 got NaN input!"); if (dblStart >= dblEnd) return false; double dblLeft = dblStart; double dblRight = dblEnd; if (0 == (iIncludeSide & LEFT_INCLUDE)) ++dblLeft; if (0 == (iIncludeSide & RIGHT_INCLUDE)) --dblRight; for (double dblDate = dblLeft; dblDate <= dblRight; ++dblDate) { if (FEBRUARY == Month (dblDate) && 29 == Day (dblDate)) return true; } return false; } /** * Calculates how many leap days exist between the 2 given Julian days * * @param dblStart Double representing the starting Julian date * @param dblEnd Double representing the ending Julian date * @param iIncludeSide INCLUDE_LEFT or INCLUDE_RIGHT indicating whether the starting date, the ending * date, or both dates are to be included * * @return Integer representing the number of leap days * * @throws java.lang.Exception Thrown if the inputs are invalid */ public static final int NumFeb29 ( final double dblStart, final double dblEnd, final int iIncludeSide) throws java.lang.Exception { if (java.lang.Double.isNaN (dblStart) || java.lang.Double.isNaN (dblEnd)) throw new java.lang.Exception ("JulianDate.NumFeb29 got NaN input!"); int iNumFeb29 = 0; boolean bLoop = true; double dblDate = dblStart; while (bLoop) { double dblEndDate = dblDate + 365; if (dblEndDate > dblEnd) { bLoop = false; dblEndDate = dblEnd; } if (ContainsFeb29 (dblDate, dblEndDate, iIncludeSide)) ++iNumFeb29; dblDate = dblEndDate; } return iNumFeb29; } /** * Returns the english word corresponding to the input integer month * * @param iMonth Integer representing the month * * @return String of the English word * * @throws java.lang.Exception Thrown if the input month is invalid */ public static java.lang.String getMonthChar ( final int iMonth) throws java.lang.Exception { if (JANUARY == iMonth) return "January"; if (FEBRUARY == iMonth) return "February"; if (MARCH == iMonth) return "March"; if (APRIL == iMonth) return "April"; if (MAY == iMonth) return "May"; if (JUNE == iMonth) return "June"; if (JULY == iMonth) return "July"; if (AUGUST == iMonth) return "August"; if (SEPTEMBER == iMonth) return "September"; if (OCTOBER == iMonth) return "October"; if (NOVEMBER == iMonth) return "November"; if (DECEMBER == iMonth) return "December"; throw new java.lang.Exception ("Invalid month number " + iMonth); } /** * Returns the Oracle DB trigram corresponding to the input integer month * * @param iMonth Integer representing the month * * @return String representing the Oracle DB trigram * * @throws java.lang.Exception thrown if the input month is invalid */ public static java.lang.String getMonthOracleChar ( final int iMonth) throws java.lang.Exception { if (JANUARY == iMonth) return "JAN"; if (FEBRUARY == iMonth) return "FEB"; if (MARCH == iMonth) return "MAR"; if (APRIL == iMonth) return "APR"; if (MAY == iMonth) return "MAY"; if (JUNE == iMonth) return "JUN"; if (JULY == iMonth) return "JUL"; if (AUGUST == iMonth) return "AUG"; if (SEPTEMBER == iMonth) return "SEP"; if (OCTOBER == iMonth) return "OCT"; if (NOVEMBER == iMonth) return "NOV"; if (DECEMBER == iMonth) return "DEC"; throw new java.lang.Exception ("Invalid month number " + iMonth); } /** * Converts the month trigram/word to the corresponding month integer * * @param strMonth Month trigram or english word * * @return Integer representing the month * * @throws java.lang.Exception Thrown on invalid input month */ public static int MonthFromMonthChars ( final java.lang.String strMonth) throws java.lang.Exception { if (null == strMonth) throw new java.lang.Exception ("Null month!"); if (strMonth.equalsIgnoreCase ("JAN") || strMonth.equalsIgnoreCase ("JANUARY")) return JANUARY; if (strMonth.equalsIgnoreCase ("FEB") || strMonth.equalsIgnoreCase ("FEBRUARY")) return FEBRUARY; if (strMonth.equalsIgnoreCase ("MAR") || strMonth.equalsIgnoreCase ("MARCH")) return MARCH; if (strMonth.equalsIgnoreCase ("APR") || strMonth.equalsIgnoreCase ("APRIL")) return APRIL; if (strMonth.equalsIgnoreCase ("MAY")) return MAY; if (strMonth.equalsIgnoreCase ("JUN") || strMonth.equalsIgnoreCase ("JUNE")) return JUNE; if (strMonth.equalsIgnoreCase ("JUL") || strMonth.equalsIgnoreCase ("JULY")) return JULY; if (strMonth.equalsIgnoreCase ("AUG") || strMonth.equalsIgnoreCase ("AUGUST")) return AUGUST; if (strMonth.equalsIgnoreCase ("SEP") || strMonth.equalsIgnoreCase ("SEPTEMBER") || strMonth.equalsIgnoreCase ("SEPT")) return SEPTEMBER; if (strMonth.equalsIgnoreCase ("OCT") || strMonth.equalsIgnoreCase ("OCTOBER")) return OCTOBER; if (strMonth.equalsIgnoreCase ("NOV") || strMonth.equalsIgnoreCase ("NOVEMBER")) return NOVEMBER; if (strMonth.equalsIgnoreCase ("DEC") || strMonth.equalsIgnoreCase ("DECEMBER")) return DECEMBER; throw new java.lang.Exception ("Invalid month " + strMonth); } /** * Gets the english word for day corresponding to the input integer * * @param iDay Integer representing the day * * @return String representing the English word for the day * * @throws java.lang.Exception Thrown if the input day is invalid */ public static java.lang.String getDayChars ( final int iDay) throws java.lang.Exception { if (MONDAY == iDay) return "Monday"; if (TUESDAY == iDay) return "Tuesday"; if (WEDNESDAY == iDay) return "Wednesday"; if (THURSDAY == iDay) return "Thursday"; if (FRIDAY == iDay) return "Friday"; if (SATURDAY == iDay) return "Saturday"; if (SUNDAY == iDay) return "Sunday"; throw new java.lang.Exception ("Invalid WeekDay number " + iDay); } /** * Gets the maximum number of days in the given month and year * * @param iMonth Integer representing the month * @param iYear Integer representing the year * * @return Integer representing the maximum days * * @throws java.lang.Exception Thrown if inputs are invalid */ public static int DaysInMonth ( final int iMonth, final int iYear) throws java.lang.Exception { if (JANUARY == iMonth) return 31; if (FEBRUARY == iMonth) { if (0 == (iYear % 4)) return 29; return 28; } if (MARCH == iMonth) return 31; if (APRIL == iMonth) return 30; if (MAY == iMonth) return 31; if (JUNE == iMonth) return 30; if (JULY == iMonth) return 31; if (AUGUST == iMonth) return 31; if (SEPTEMBER == iMonth) return 30; if (OCTOBER == iMonth) return 31; if (NOVEMBER == iMonth) return 30; if (DECEMBER == iMonth) return 31; throw new java.lang.Exception ("Invalid Month: " + iMonth); } /** * Indicates if the given Julian double corresponds to an end of month day * * @param dblDate Double representing the Julain date * * @return True indicates EOM is true * * @throws java.lang.Exception Thrown if input date is invalid */ public static final boolean IsEOM ( final double dblDate) throws java.lang.Exception { if (java.lang.Double.isNaN (dblDate)) throw new java.lang.Exception ("JulianDate.IsEOM got NaN input!"); return Day (dblDate) == DaysInMonth (Month (dblDate), Year (dblDate)) ? true : false; } /** * Returns a Julian Date corresponding to today * * @return JulianDate corresponding to today */ public static final JulianDate Today() { java.util.Date dtNow = new java.util.Date(); try { return CreateFromYMD (org.drip.analytics.support.GenericUtil.GetYear (dtNow), org.drip.analytics.support.GenericUtil.GetMonth (dtNow), org.drip.analytics.support.GenericUtil.GetDate (dtNow)); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Creates a JulianDate from year, month, and date * * @param iYear Integer year * @param iMonth Integer month * @param iDay Integer day * * @return Julian Date corresponding to the specified year, month, and day */ public static final JulianDate CreateFromYMD ( final int iYear, final int iMonth, final int iDay) { try { return new JulianDate (toJulian (iYear, iMonth, iDay)); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Creates a JulianDate from a string containing date in the DDMMYYYY format * * @param strDate String containing date in the DDMMYYYY format * * @return JulianDate */ public static final JulianDate CreateFromDDMMMYYYY ( final java.lang.String strDate) { if (null == strDate || strDate.isEmpty()) return null; java.lang.String[] astrParts = strDate.split ("-"); if (3 != astrParts.length) return null; try { int iDay = new java.lang.Integer (astrParts[0]); int iYear = new java.lang.Integer (astrParts[2]); return CreateFromYMD (iYear, MonthFromMonthChars (astrParts[1]), iDay); } catch (java.lang.Exception e) { if (s_bLog) e.printStackTrace(); } return null; } /** * Create JulianDate from a double Julian * * @param dblJulian Double representing the JulianDate * * @throws java.lang.Exception Thrown if the input date is invalid */ public JulianDate ( final double dblJulian) throws java.lang.Exception { if (java.lang.Double.isNaN (dblJulian)) throw new java.lang.Exception ("JulianDate ctr got NaN input!"); _dblJulian = dblJulian; } /** * Returns the double Julian * * @return The double Julian */ public double getJulian() { return _dblJulian; } /** * Add the given number of days and returns a new JulianDate * * @param iDays Integer representing the number of days to be added * * @return The new JulianDate */ public JulianDate addDays ( final int iDays) { try { return new JulianDate (_dblJulian + iDays); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Subtracts the given number of days and returns a new JulianDate * * @param iDays Integer representing the number of days to be subtracted * * @return The new JulianDate */ public JulianDate subtractDays ( final int iDays) { try { return new JulianDate (_dblJulian - iDays); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Add the given number of business days and returns a new JulianDate * * @param iDays Integer representing the number of days to be subtracted * * @param strCalendarSet String representing the calendar set containing the business days * * @return The new JulianDate */ public JulianDate addBusDays ( final int iDays, final java.lang.String strCalendarSet) { int iNumDaysToAdd = iDays; double dblAdjusted = _dblJulian; try { while (0 != iNumDaysToAdd--) { ++dblAdjusted; while (org.drip.analytics.daycount.Convention.IsHoliday (dblAdjusted, strCalendarSet)) ++dblAdjusted; } return new JulianDate (dblAdjusted); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Subtract the given number of business days and returns a new JulianDate * * @param iDays Integer representing the number of days to be subtracted * * @param strCalendarSet String representing the calendar set containing the business days * * @return The new JulianDate */ public JulianDate subtractBusDays ( final int iDays, final java.lang.String strCalendarSet) { int iNumDaysToAdd = iDays; double dblAdjusted = _dblJulian; try { while (0 != iNumDaysToAdd--) { --dblAdjusted; while (org.drip.analytics.daycount.Convention.IsHoliday (dblAdjusted, strCalendarSet)) --dblAdjusted; } return new JulianDate (dblAdjusted); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Add the given number of years and returns a new JulianDate * * @param iNumYears Integer representing the number of years to be added * * @return The new JulianDate */ public JulianDate addYears ( final int iNumYears) { int iJA = (int) (_dblJulian + HALFSECOND / 86400.); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); int iDay = iJB - iJD - (int) (30.6001 * iJE); int iMonth = iJE - 1; int iYear = iJC - 4715; if (iMonth > 12) iMonth -= 12; if (iMonth > 2) --iYear; if (iYear <= 0) --iYear; try { return CreateFromYMD (iYear + iNumYears, iMonth, iDay); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Add the given number of months and returns a new JulianDate * * @param iNumMonths Integer representing the number of months to be added * * @return The new JulianDate */ public JulianDate addMonths ( final int iNumMonths) { int iJA = (int) (_dblJulian + HALFSECOND / 86400.); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680.0 + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); int iDay = iJB - iJD - (int) (30.6001 * iJE); int iMonth = iJE - 1; int iYear = iJC - 4715; if (iMonth > 12) iMonth -= 12; if (iMonth > 2) --iYear; if (iYear <= 0) --iYear; if (12 < (iMonth += iNumMonths)) { while (12 < iMonth) { ++iYear; iMonth -= 12; } } else if (0 >= iMonth) { --iYear; iMonth += 12; } try { while (iDay > DaysInMonth (iMonth, iYear)) --iDay; return CreateFromYMD (iYear, iMonth, iDay); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Generates the First EDSF start date from this JulianDate * * @param iNumRollMonths Integer representing number of months to roll * * @return The new JulianDate */ public JulianDate getFirstEDFStartDate ( final int iNumRollMonths) { int iJA = (int) (_dblJulian + HALFSECOND / 86400.); if (iJA >= JGREG) { int iJAlpha = (int) (((iJA - 1867216) - 0.25) / 36524.25); iJA = iJA + 1 + iJAlpha - iJAlpha / 4; } int iJB = iJA + 1524; int iJC = (int) (6680. + ((iJB - 2439870) - 122.1) / 365.25); int iJD = 365 * iJC + iJC / 4; int iJE = (int) ((iJB - iJD) / 30.6001); int iDay = iJB - iJD - (int) (30.6001 * iJE); int iMonth = iJE - 1; int iYear = iJC - 4715; if (iMonth > 12) iMonth -= 12; if (iMonth > 2) --iYear; if (iYear <= 0) --iYear; if (20 <= iDay) { if (12 < ++iMonth) { ++iYear; iMonth -= 12; } } while (0 != iMonth % iNumRollMonths) ++iMonth; try { return CreateFromYMD (iYear, iMonth, 20); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } /** * Adds the tenor to the JulianDate to create a new date * * @param strTenor String representing the tenor to add * * @return The new JulianDate */ public JulianDate addTenor ( final java.lang.String strTenor) { if (null == strTenor || strTenor.isEmpty()) return null; char chTenor = strTenor.charAt (strTenor.length() - 1); int iTimeUnit = -1; try { iTimeUnit = (int) new java.lang.Double (strTenor.substring (0, strTenor.length() - 1)).doubleValue(); } catch (java.lang.Exception e) { System.out.println ("Bad time unit " + iTimeUnit + " in tenor " + strTenor); return null; } if ('d' == chTenor || 'D' == chTenor) return addDays (iTimeUnit); if ('w' == chTenor || 'W' == chTenor) return addDays (iTimeUnit * 7); if ('m' == chTenor || 'M' == chTenor) return addMonths (iTimeUnit); if ('y' == chTenor || 'Y' == chTenor) return addYears (iTimeUnit); System.out.println ("Unknown tenor format " + strTenor); return null; } /** * Subtracts the tenor to the JulianDate to create a new date * * @param strTenor String representing the tenor to add * * @return The new JulianDate */ public JulianDate subtractTenor ( final java.lang.String strTenor) { if (null == strTenor || strTenor.isEmpty()) return null; char chTenor = strTenor.charAt (strTenor.length() - 1); int iTimeUnit = -1; try { iTimeUnit = (int) new java.lang.Double (strTenor.substring (0, strTenor.length() - 1)).doubleValue(); } catch (java.lang.Exception e) { System.out.println ("Bad time unit " + iTimeUnit + " in tenor " + strTenor); return null; } if ('d' == chTenor || 'D' == chTenor) return addDays (-iTimeUnit); if ('w' == chTenor || 'W' == chTenor) return addDays (-iTimeUnit * 7); if ('m' == chTenor || 'M' == chTenor) return addMonths (-iTimeUnit); if ('y' == chTenor || 'Y' == chTenor) return addYears (-iTimeUnit); System.out.println ("Unknown tenor format " + strTenor); return null; } /** * Difference in days between the current and the input date * * @param dt JulianDate representing the input date * * @return Integer representing the difference in days * * @throws java.lang.Exception Thrown if input date is invalid */ public int daysDiff ( final JulianDate dt) throws java.lang.Exception { if (null == dt) throw new java.lang.Exception ("JulianDate.daysDiff got NaN date input!"); return (int) (_dblJulian - dt.getJulian()); } /** * Returns a trigram representation of date * * @return String representing the trigram representation of date */ public java.lang.String toOracleDate() { try { return Day (_dblJulian) + "-" + getMonthOracleChar (Month (_dblJulian)) + "-" + Year (_dblJulian); } catch (java.lang.Exception e) { e.printStackTrace(); } return null; } @Override public boolean equals ( final java.lang.Object o) { if (!(o instanceof JulianDate)) return false; return (int) _dblJulian == (int) ((JulianDate) o)._dblJulian; } @Override public int hashCode() { long lBits = java.lang.Double.doubleToLongBits ((int) _dblJulian); return (int) (lBits ^ (lBits >>> 32)); } @Override public java.lang.String toString() { return fromJulian (_dblJulian); } @Override public int compareTo ( final JulianDate dtOther) { if ((int) _dblJulian > (int) (dtOther._dblJulian)) return 1; if ((int) _dblJulian < (int) (dtOther._dblJulian)) return -1; return 0; } public static final void main ( final java.lang.String[] astrArgs) { System.out.println (JulianDate.Today()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.llama.am.impl; import com.cloudera.llama.am.api.LlamaAM; import com.cloudera.llama.am.api.LlamaAMEvent; import com.cloudera.llama.util.Clock; import com.cloudera.llama.util.ErrorCode; import com.cloudera.llama.util.LlamaException; import com.cloudera.llama.util.ManualClock; import com.cloudera.llama.am.api.LlamaAMListener; import com.cloudera.llama.am.api.PlacedReservation; import com.cloudera.llama.am.api.PlacedResource; import com.cloudera.llama.am.spi.RMResource; import com.cloudera.llama.am.api.TestUtils; import com.cloudera.llama.am.spi.RMConnector; import com.cloudera.llama.am.spi.RMEvent; import com.cloudera.llama.util.UUID; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.junit.After; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; import java.util.Collection; import java.util.List; public class TestMultiQueueLlamaAM { private static List<String> EXPECTED = Arrays.asList("setConf", "setRMListener", "start", "register", "reserve", "release", "unregister", "stop"); private static MyRMConnector rmConnector; public static class MyRMConnector extends RecordingMockRMConnector implements Configurable { private Configuration conf; public MyRMConnector() { rmConnector = this; } @Override public void setConf(Configuration conf) { invoked.add("setConf"); this.conf = conf; } @Override public Configuration getConf() { return null; } @Override public void start() throws LlamaException { super.start(); if (conf.getBoolean("fail.start", false)) { throw new LlamaException(ErrorCode.TEST, "fail.start","TRUE"); } } @Override public void register(String queue) throws LlamaException { super.register(queue); if (conf.getBoolean("fail.register", false)) { throw new LlamaException(ErrorCode.TEST, "fail.register", "TRUE"); } } @Override public void release(Collection<RMResource> resources, boolean doNotCache) throws LlamaException { super.release(resources, doNotCache); if (conf.getBoolean("fail.release", false)) { throw new LlamaException(ErrorCode.TEST, "fail.release", "TRUE"); } } } @After public void tearDown() { Clock.setClock(Clock.SYSTEM); } @Test public void testMultiQueueDelegation() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); multiQueueTestImpl(conf); } @Test public void testMultiQueueDelegationWithoutThrottling() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.setBoolean(LlamaAM.THROTTLING_ENABLED_KEY, false); multiQueueTestImpl(conf); } private void multiQueueTestImpl(Configuration conf) throws Exception { LlamaAM am = LlamaAM.create(conf); try { am.start(); LlamaAMListener listener = new LlamaAMListener() { @Override public void onEvent(LlamaAMEvent event) { } }; UUID handle = UUID.randomUUID(); UUID id = am.reserve(TestUtils.createReservation(handle, "q", 1, true)); am.getNodes(); am.addListener(listener); am.removeListener(listener); am.getReservation(id); am.releaseReservation(handle, id, false); am.releaseReservationsForHandle(UUID.randomUUID(), false); am.stop(); Assert.assertEquals(EXPECTED, rmConnector.invoked); } finally { am.stop(); } } @Test(expected = LlamaException.class) public void testReleaseReservationForClientException() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.setBoolean("fail.release", true); LlamaAM am = LlamaAM.create(conf); try { am.start(); UUID cId = UUID.randomUUID(); am.reserve(TestUtils.createReservation(cId, "q", 1, true)); am.releaseReservationsForHandle(cId, false); } finally { am.stop(); } } @Test(expected = LlamaException.class) public void testReleaseReservationForClientDiffQueuesException() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.setBoolean("fail.release", true); LlamaAM am = LlamaAM.create(conf); try { am.start(); UUID cId = UUID.randomUUID(); am.reserve(TestUtils.createReservation(cId, "q1", 1, true)); am.reserve(TestUtils.createReservation(cId, "q2", 1, true)); am.releaseReservationsForHandle(cId, false); } finally { am.stop(); } } @Test(expected = LlamaException.class) public void testStartOfDelegatedLlamaAmFail() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.setBoolean("fail.start", true); conf.set(LlamaAM.CORE_QUEUES_KEY, "q"); LlamaAM am = LlamaAM.create(conf); am.start(); } @Test(expected = LlamaException.class) public void testRegisterOfDelegatedLlamaAmFail() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.setBoolean("fail.register", true); conf.set(LlamaAM.CORE_QUEUES_KEY, "q"); LlamaAM am = LlamaAM.create(conf); am.start(); } @Test public void testGetReservationUnknown() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); LlamaAM am = LlamaAM.create(conf); am.start(); Assert.assertNull(am.getReservation(UUID.randomUUID())); } @Test public void testReleaseReservationUnknown() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); LlamaAM am = LlamaAM.create(conf); am.start(); am.releaseReservation(UUID.randomUUID(), UUID.randomUUID(), false); } private boolean listenerCalled; @SuppressWarnings("unchecked") @Test public void testMultiQueueListener() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); LlamaAM am = LlamaAM.create(conf); try { am.start(); LlamaAMListener listener = new LlamaAMListener() { @Override public void onEvent(LlamaAMEvent event) { listenerCalled = true; } }; UUID handle = UUID.randomUUID(); PlacedReservation rr = am.getReservation( am.reserve(TestUtils.createReservation(handle, "q", 1, true))); UUID id = rr.getReservationId(); am.getNodes(); am.addListener(listener); am.getReservation(id); Assert.assertFalse(listenerCalled); List<RMResource> resources = (List<RMResource>) rmConnector.args.get(3); rmConnector.callback.onEvent(Arrays.asList(RMEvent .createStatusChangeEvent(resources.get(0).getResourceId(), PlacedResource.Status.REJECTED))); Assert.assertTrue(listenerCalled); am.releaseReservation(handle, id, false); am.releaseReservationsForHandle(UUID.randomUUID(), false); am.removeListener(listener); listenerCalled = false; Assert.assertFalse(listenerCalled); am.stop(); } finally { am.stop(); } } @Test public void testQueueExpiry() throws Exception { ManualClock clock = new ManualClock(); Clock.setClock(clock); Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); conf.set(LlamaAM.CORE_QUEUES_KEY, "root.corequeue"); MultiQueueLlamaAM am = new MultiQueueLlamaAM(conf); am.amCheckExpiryIntervalMs = 20; am.start(); // Core queue should exist Assert.assertEquals(1, am.ams.keySet().size()); UUID handle = UUID.randomUUID(); UUID resId = am.reserve(TestUtils.createReservation(handle, "root.someotherqueue", 1, true)); Assert.assertEquals(2, am.ams.keySet().size()); am.releaseReservation(handle, resId, true); clock.increment(LlamaAM.QUEUE_AM_EXPIRE_DEFAULT * 2); Thread.sleep(300); // am expiry check should run in this time // Other queue should get cleaned up Assert.assertEquals(1, am.ams.keySet().size()); handle = UUID.randomUUID(); resId = am.reserve(TestUtils.createReservation(handle, "root.corequeue", 1, true)); am.releaseReservation(handle, resId, true); clock.increment(LlamaAM.QUEUE_AM_EXPIRE_DEFAULT * 2); Thread.sleep(300); // am expiry check should run in this time // Core queue should still exist Assert.assertEquals(1, am.ams.keySet().size()); Assert.assertFalse(am.ams.containsKey("root.someotherqueue")); handle = UUID.randomUUID(); am.reserve(TestUtils.createReservation(handle, "root.someotherqueue", 1, true)); Assert.assertTrue(am.ams.containsKey("root.someotherqueue")); } @Test public void testReleaseReservationsForQueue() throws Exception { Configuration conf = new Configuration(false); conf.setClass(LlamaAM.RM_CONNECTOR_CLASS_KEY, MyRMConnector.class, RMConnector.class); MultiQueueLlamaAM am = new MultiQueueLlamaAM(conf); am.start(); UUID handle = UUID.randomUUID(); am.reserve(TestUtils.createReservation(handle, "root.q1", 1, true)); Assert.assertTrue(am.ams.containsKey("root.q1")); // Release the queue without caching. am.releaseReservationsForQueue("root.q1", true); Assert.assertFalse(am.ams.containsKey("root.q1")); // Try to release it again, the queue does not exist. without cache. am.releaseReservationsForQueue("root.q1", true); // Try to release it again, the queue does not exist. with cache. am.releaseReservationsForQueue("root.q1", false); Assert.assertFalse(am.ams.containsKey("root.q1")); // Now create another reservation and try to release the queue by using cache, am.reserve(TestUtils.createReservation(handle, "root.q2", 1, true)); Assert.assertTrue(am.ams.containsKey("root.q2")); // Release the queue without caching. am.releaseReservationsForQueue("root.q2", false); Assert.assertTrue(am.ams.containsKey("root.q2")); // Try to release it again, the queue does not exist. with cache. am.releaseReservationsForQueue("root.q2", false); // Try to release it again, the queue does not exist. without cache. am.releaseReservationsForQueue("root.q2", true); Assert.assertFalse(am.ams.containsKey("root.q2")); } }
/******************************************************************************* * Copyright 2016 NovaTec Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package info.novatec.ita.check; import javax.enterprise.context.ApplicationScoped; import javax.interceptor.Interceptors; import org.junit.Test; import com.puppycrawl.tools.checkstyle.DefaultConfiguration; import info.novatec.ita.check.testclasses.app1.main.bl.bs.SampleBsBean; import info.novatec.ita.check.testclasses.app1.main.bl.bs.SampleTest; import info.novatec.ita.check.testclasses.app1.main.bl.bs.StreamBsBean; import info.novatec.ita.check.testclasses.app1.main.bl.is.Sample2Interfaces2Is; import info.novatec.ita.check.testclasses.app1.main.bl.is.Sample2InterfacesIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleIsWithPackageNames; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleNoStereotype; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleWithInnerClassIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleWithPackageNamesIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleWithoutAnnotationIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleWithoutInterfaceIs; import info.novatec.ita.check.testclasses.app1.main.bl.is.SampleWithoutPostfix; import info.novatec.ita.check.testclasses.app1.main.bl.is.tf.BaseTf; import info.novatec.ita.check.testclasses.app1.main.bl.is.tf.SampleApplicationScoped; import info.novatec.ita.check.testclasses.app1.main.bl.is.tf.SampleTf; import info.novatec.ita.check.testclasses.app1.main.bl.is.tf.SampleTfExtendsSampleNoTf; import info.novatec.ita.check.testclasses.app1.main.bl.is.tf.SampleTfExtendsSampleTf; import info.novatec.ita.check.testclasses.app1.main.bl.wrong.SampleWrongPackageIs; import info.novatec.ita.check.testclasses.app1.main.data.dto.SampleDto; import info.novatec.ita.check.testclasses.app1.main.data.dto.SampleDtoExtendsSampleDto; import info.novatec.ita.check.testclasses.app1.main.data.dto.SampleDtoExtendsSampleNoDto; import info.novatec.ita.check.testclasses.app1.main.data.dto.SampleNoBaseClassDto; import info.novatec.ita.check.testclasses.app1.main.data.entity.AdditionalOperation; import info.novatec.ita.check.testclasses.app1.main.data.entity.CustomerConcern; import info.novatec.ita.check.testclasses.app1.main.data.entity.SampleEntity; import info.novatec.ita.check.testclasses.app1.main.data.entity.SampleEntityWithoutBaseclass; import info.novatec.ita.check.testclasses.app1.main.data.entity.common.BaseEntity; import info.novatec.ita.check.testclasses.app1.main.init.SampleIf; import info.novatec.ita.check.testclasses.app1.main.init.SampleNoStartupIf; import info.novatec.ita.check.testclasses.app1.main.ul.App1TypedViewStereotype; import info.novatec.ita.check.testclasses.app1.main.ul.wt.test.SampleApp1View; import info.novatec.ita.check.testclasses.app1.main.ul.wt.test.SampleCoreView; import info.novatec.ita.check.testclasses.app1.shared.bl.bs.SampleBs; import info.novatec.ita.check.testclasses.app1.shared.bl.bs.SampleNoInterfaceBs; import info.novatec.ita.check.testclasses.core.fwk.common.bl.is.IntegrationService; import info.novatec.ita.check.testclasses.core.fwk.common.bl.is.IntegrationServiceStereotype; import info.novatec.ita.check.testclasses.core.fwk.common.ul.ViewStereotype; import info.novatec.ita.check.testclasses.core.fwk.common.util.DateUtil; import info.novatec.ita.check.testclasses.core.fwk.common.util.Util; import info.novatec.ita.check.testclasses.core.fwk.main.ts.VersionTsBean; /** * Tests to validate stereotypes. * */ public class StereotypeCheckTest extends AbstractStereotypeCheckTest { /** * The {@link SampleIs} is a valid integration service. * * * @throws Exception * in case of an unexpected test execution */ @Test public void testStreamBsBean() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(StreamBsBean.class), expected); } /** * The {@link SampleIs} is a valid integration service. * * * @throws Exception * in case of an unexpected test execution */ @Test public void testOK() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleIs.class), expected); } /** * The {@link SampleEntity} is a valid entity. * * @throws Exception * in case of an unexpected test execution */ @Test public void testOKWithBaseclass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleEntity.class), expected); } /** * The {@link SampleNoBaseClassDto} has no baseclass for entiies * * @throws Exception * in case of an unexpected test execution */ @Test public void testFailWithoutBaseclass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype dto: does not extend info.novatec.ita.check.testclasses.core.fwk.api.data.dto.Pojo"; verify(main, getPath(SampleNoBaseClassDto.class), expected); } /** * The {@link SampleEntity} is a valid dto. * * @throws Exception * in case of an unexpected test execution */ @Test public void testOKWithoutAnnotation() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleDto.class), expected); } /** * The {@link SampleWithPackageNamesIs} is a not a valid integration * service. The integration service interface is specified by full * classname. * * @throws Exception * in case of an unexpected test execution */ @Test public void testOKWithPackageNames() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleWithPackageNamesIs.class), expected); } /** * The {@link SampleIsWithPackageNames} is a not a valid integration * service. The integration service interface is specified by full * classname. * * @throws Exception * in case of an unexpected test execution */ @Test public void testMissingWithPackageNames() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype integrationservice: missing postfix Is"; verify(main, getPath(SampleIsWithPackageNames.class), expected); } /** * The class {@link SampleNoStereotype} is in the same package as * integrationservices but is not an integration service which is allowed * * @throws Exception * in case of an unexpected test execution */ @Test public void testNoStereotype() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleNoStereotype.class), expected); } /** * The InnerClass of {@link SampleWithInnerClassIs} should not throw an * error. * * @throws Exception * in case of an unexpected test execution */ @Test public void testWithInnerClass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleWithInnerClassIs.class), expected); } /** * The {@link SampleWithoutAnnotationIs} is not annotated by * {@link IntegrationServiceStereotype}. * * @throws Exception * in case of an unexpected test execution */ @Test public void testWithoutAnnotation() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype integrationservice: missing annotation info.novatec.ita.check.testclasses.core.fwk.common.bl.is.IntegrationServiceStereotype"; verify(main, getPath(SampleWithoutAnnotationIs.class), expected); } /** * The class {@link SampleWithoutInterfaceIs} does not implement * {@link IntegrationService}. * * @throws Exception * in case of an unexpected test execution */ @Test public void testWithoutInterface() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype integrationservice: does not implement info.novatec.ita.check.testclasses.core.fwk.common.bl.is.IntegrationService"; verify(main, getPath(SampleWithoutInterfaceIs.class), expected); } /** * The classes {@link Sample2InterfacesIs} and {@link Sample2Interfaces2Is} * can implement more interface than the interface * {@link IntegrationService}. * * @throws Exception * in case of an unexpected test execution */ @Test public void testWith2Interfaces() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(Sample2InterfacesIs.class), expected); verify(main, getPath(Sample2Interfaces2Is.class), expected); } /** * The {@link SampleWithoutPostfix} has no Postfix Is. * * @throws Exception * in case of an unexpected test execution */ @Test public void testWithoutPostfix() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype integrationservice: missing postfix Is"; verify(main, getPath(SampleWithoutPostfix.class), expected); } /** * The {@link SampleWrongPackageIs} is not in package *.main.bl.is.* * * @throws Exception * in case of an unexpected test execution */ @Test public void testWrongPackage() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype integrationservice: is not in package ^([a-z]+[a-z0-9\\.]*)\\.main(\\.[a-z][a-z0-9]*)*\\.bl\\.is(\\.[a-z][a-z0-9]*)*$"; verify(main, getPath(SampleWrongPackageIs.class), expected); } /** * The{@link SampleTf} is a valid transformer. * * @throws Exception * in case of an unexpected test execution */ @Test public void testOKTf() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleTf.class), expected); } /** * The {@link SampleApplicationScoped} is a not a transformer but uses the * annotation {@link ApplicationScoped}. * * @throws Exception * in case of an unexpected test execution */ @Test public void testOKApplicationScoped() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleApplicationScoped.class), expected); } /** * The annotation {@link IntegrationServiceStereotype} is ignored. * * @throws Exception * in case of an unexpected test execution */ @Test public void testAnnotationClass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(IntegrationServiceStereotype.class), expected); } /** * The view stereotype can be annotated with either * {@link App1TypedViewStereotype} or {@link ViewStereotype}. * * @throws Exception * in case of an unexpected test execution */ @Test public void testTwoPossibleAnnotations() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleApp1View.class), expected); verify(main, getPath(SampleCoreView.class), expected); } /** * The init function stereotype must be annotated with three annotations. * * @throws Exception * in case of an unexpected test execution */ @Test public void testTwoSeparatAnnotations() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleIf.class), expected); } /** * The init function stereotype must be annotated with three annotations bu * one is missing. * * @throws Exception * in case of an unexpected test execution */ @Test public void testTwoSeparatAnnotationsNoStartup() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype initfunction: missing annotation javax.ejb.LocalBean or javax.ejb.Singleton or javax.ejb.Startup"; verify(main, getPath(SampleNoStartupIf.class), expected); } /** * The technical service bean must be annotated with {@link Interceptors} * and one of three possible others. * * @throws Exception * in case of an unexpected test execution */ @Test public void testAnnotationsAndOr() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(VersionTsBean.class), expected); } /** * Check stereotype for interfaces. * * @throws Exception * in case of an unexpected test execution */ @Test public void testInterfaceOk() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleBs.class), expected); } /** * Check stereotype for interfaces, but has no base interface. * * @throws Exception * in case of an unexpected test execution */ @Test public void testInterfaceNoInterface() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype businessservice: does not extend info.novatec.ita.check.testclasses.core.fwk.api.bl.bs.BusinessService"; verify(main, getPath(SampleNoInterfaceBs.class), expected); } /** * Because checkstyle only analyses source code, an indirect extension from * a base class cannot be validated. All classes that can be extended have * to be in the configuraton if the stereotype has no suffix. * * @throws Exception * in case of an unexpected test execution */ @Test public void testThreePossibleBaseclasses() throws Exception { DefaultConfiguration main = createDefaultConfig(); String expectedFail = "Stereotype entity: does not extend info.novatec.ita.check.testclasses.app1.main.data.entity.CustomerConcern or info.novatec.ita.check.testclasses.app1.main.data.entity.common.BaseEntity or info.novatec.ita.check.testclasses.core.fwk.api.data.dto.Pojo"; verify(main, getPath(SampleEntityWithoutBaseclass.class), expectedFail); String[] expected = new String[] {}; verify(main, getPath(BaseEntity.class), expected); verify(main, getPath(CustomerConcern.class), expected); verify(main, getPath(AdditionalOperation.class), expected); } /** * If the stereotype has a postfix and a class A extends from another class * B of the same stereotype, A must not have the implements/extends * statement in its own file, because for B this check is already done. * * @throws Exception * in case of an unexpected test execution */ @Test public void testBaseClassIsStereotypeViaBaseClass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleDtoExtendsSampleDto.class), expected); } /** * The baseclass of dto {@link SampleDtoExtendsSampleNoDto} is no dto. * * @throws Exception * in case of an unexpected test execution */ @Test public void testBaseClassIsNotStereotypeViaBaseClass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype dto: does not extend info.novatec.ita.check.testclasses.core.fwk.api.data.dto.Pojo"; verify(main, getPath(SampleDtoExtendsSampleNoDto.class), expected); } /** * The base interface of {@link SampleTfExtendsSampleTf} is a transformer. * * @throws Exception * in case of an unexpected test execution */ @Test public void testBaseClassIsStereotypeViaInterface() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleTfExtendsSampleTf.class), expected); } /** * The base interface of {@link SampleTfExtendsSampleNoTf} is not a * transformer. * * @throws Exception * in case of an unexpected test execution */ /** * The base interface of {@link SampleTfExtendsSampleNoTf} is not a * transformer. * * @throws Exception * in case of an unexpected test execution */ public void testBaseClassIsNotStereotypeViaInterface() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String expected = "Stereotype transformer: does not implement org.apache.commons.collections4.Transformer"; verify(main, getPath(SampleTfExtendsSampleNoTf.class), expected); } /** * If the class is abstract but fulfills some condition of a stereotype, no * errors occur. * * @throws Exception * in case of an unexpected test execution */ @Test public void testAbstractClass() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(BaseTf.class), expected); } /** * Ignore all classes that end to Test. * * @throws Exception * in case of an unexpected test execution */ @Test public void testTestClassExcluded() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleTest.class), expected); } /** * Don't ignore classes that end to Test. * * @throws Exception * in case of an unexpected test execution */ @Test public void testTestClassNotExcluded() throws Exception { DefaultConfiguration main = createConfig("src/test/resources/stereotypeWithoutExcludedClass.xml"); final String[] expected = {"Stereotype businessservicebean: missing annotation javax.ejb.Stateless or javax.interceptor.Interceptors", "Stereotype businessservicebean: missing postfix BsBean"}; verify(main, getPath(SampleTest.class), expected); } /** * The {@link SampleBsBean} is a valid business service bean. * * @throws Exception * in case of an unexpected test execution */ @Test public void testBsBean() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(SampleBsBean.class), expected); } /** * The interface {@link Util} is in the same package as {@link DateUtil}, * which is an util stereotype. * * @throws Exception * in case of an unexpected test execution */ @Test public void testInterfaceInSamePackage() throws Exception { DefaultConfiguration main = createDefaultConfig(); final String[] expected = {}; verify(main, getPath(DateUtil.class), expected); } }
/* * Copyright 2015 John Persano * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.org.crimetalk.adapters; import android.content.Context; import android.graphics.Typeface; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; import uk.org.crimetalk.R; import uk.org.crimetalk.utils.PreferenceUtils; /** * {@link android.widget.ArrayAdapter} that will be used to display primary items in the * {@link uk.org.crimetalk.fragments.NavigationDrawerFragment}. */ public class NavigationDrawerPrimaryAdapter extends ArrayAdapter<String> { private final Context mContext; private final LayoutInflater mLayoutInflater; private final String[] mStrings; private int mSelection; /** * ViewHolder pattern as described * <a href="http://developer.android.com/training/improving-layouts/smooth-scrolling.html">here</a>. */ private static class ViewHolder { TextView textView; } /** * Public constructor for the {@link uk.org.crimetalk.adapters.NavigationDrawerPrimaryAdapter}. * The resource parameter is required but it will be ignored. * * @param context A valid {@link android.content.Context} * @param resource Can be any int as it will be ignored * @param strings The items to ce shown in the adapter */ @SuppressWarnings("SameParameterValue") public NavigationDrawerPrimaryAdapter(Context context, int resource, String[] strings) { super(context, resource, strings); this.mContext = context; this.mLayoutInflater = LayoutInflater.from(context); this.mStrings = strings; } @Override public View getView(int position, View view, ViewGroup parent) { ViewHolder viewHolder; if (view != null) { viewHolder = (ViewHolder) view.getTag(); } else { view = mLayoutInflater.inflate(R.layout.row_primary_navigation, parent, false); viewHolder = new ViewHolder(); viewHolder.textView = (TextView) view.findViewById(R.id.text); view.setTag(viewHolder); } viewHolder.textView.setText(mStrings[position]); viewHolder.textView.setCompoundDrawablesWithIntrinsicBounds(getIcon(position), 0, 0, 0); // If current item is not selected, use default text and icon color values if (position != mSelection) { viewHolder.textView.setTypeface(Typeface.create(viewHolder.textView.getTypeface(), Typeface.NORMAL)); // If user has selected the dark theme, set text color as white if (PreferenceUtils.getDarkTheme(getContext())) { viewHolder.textView.setTextColor(mContext.getResources().getColor(android.R.color.white)); } else { viewHolder.textView.setTextColor(mContext.getResources().getColor(android.R.color.black)); } } else { // Current item is selected so bold the text, make it red, and change the icon to red viewHolder.textView.setTypeface(viewHolder.textView.getTypeface(), Typeface.BOLD); viewHolder.textView.setTextColor(mContext.getResources().getColor(R.color.crimetalk_red)); viewHolder.textView.setCompoundDrawablesWithIntrinsicBounds(getSelectedIcon(position), 0, 0, 0); } return view; } /** * Private method. * Returns a black or white icon depending on the theme preference. * Throws a {@link java.lang.IllegalArgumentException} if the position * is not within the required index. * * @param position The current {@link uk.org.crimetalk.adapters.NavigationDrawerPrimaryAdapter#getView(int, * android.view.View, android.view.ViewGroup)} position * @return The icon resource of the current item */ private int getIcon(int position) { switch (position) { case 0: if (PreferenceUtils.getDarkTheme(getContext())) { return R.drawable.ic_library_dark; } return R.drawable.ic_library_light; case 1: if (PreferenceUtils.getDarkTheme(getContext())) { return R.drawable.ic_world_dark; } return R.drawable.ic_world_light; case 2: if (PreferenceUtils.getDarkTheme(getContext())) { return R.drawable.ic_shop_dark; } return R.drawable.ic_shop_light; default: throw new IllegalArgumentException("No icon can be produced for the selection."); } } /** * Private method. * Returns a red icon for the selected position. * Throws a {@link java.lang.IllegalArgumentException} if the position * is not within the required index. * * @param position The current {@link uk.org.crimetalk.adapters.NavigationDrawerPrimaryAdapter#getView(int, * android.view.View, android.view.ViewGroup)} position * @return The icon resource of the selected item */ private int getSelectedIcon(int position) { switch (position) { case 0: return R.drawable.ic_library_selected; case 1: return R.drawable.ic_world_selected; case 2: return R.drawable.ic_shop_selected; default: throw new IllegalArgumentException("No icon can be produced for the selection."); } } /** * Sets the currently selected item of the {@link uk.org.crimetalk.adapters.NavigationDrawerPrimaryAdapter}. * * @param selection The current selection */ public void setSelection(int selection) { this.mSelection = selection; notifyDataSetChanged(); } }
/* * Copyright (c) 2009 - 2019 by Oli B. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express orimplied. * See the License for the specific language governing permissions and * limitations under the License. * * (c)reated 04.10.2009 by Oli B. (ob@aosd.de) */ package gdv.xport.satz; import gdv.xport.Datenpaket; import gdv.xport.feld.Bezeichner; import gdv.xport.feld.Datum; import gdv.xport.feld.Feld; import gdv.xport.feld.Version; import gdv.xport.util.SatzRegistry; import gdv.xport.util.SatzTyp; import org.junit.Test; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.Map; import static org.junit.Assert.*; /** * @author oliver * @since 04.10.2009 * @version $Revision$ * */ public final class VorsatzTest extends AbstractSatzTest { private final Vorsatz vorsatz = new Vorsatz(); /** * Hier erzeugen wir einen Satz zum Testen. * * @return Satz zum Testen * @see gdv.xport.satz.AbstractSatzTest#getSatz() */ @Override protected Satz getSatz() { return new Vorsatz(); } /** * Test method for {@link gdv.xport.satz.Vorsatz#Vorsatz()}. * * @throws IOException falls der Export schief geht */ @Test public void testVorsatz() throws IOException { vorsatz.setVuNummer("08/15"); String expected = "000108/15"; checkExport(1, 9, expected); checkExport(257, 265, expected); checkExport(256+246, 256+256, " 2"); checkExport(768, 768, "3"); } /** * Wird das Absender-Feld richtig gesetzt? Schau 'mer mal. * * @throws IOException falls der Export schief geht */ @Test public void testSetAbsender() throws IOException { String absender = "agentes AG "; vorsatz.setAbsender(absender.trim()); Feld absenderFeld = vorsatz.getFeld(Bezeichner.ABSENDER); assertEquals(absenderFeld.getInhalt().trim(), vorsatz.getAbsender()); checkExport(10, 39, absender); } /** * Hier wird das Start- und End-Datum ueberprueft. * * @throws IOException falls der Export schief geht */ @Test public void testSetErstellungsZeitraum() throws IOException { String startDatum = "01011900"; String endDatum = "09102009"; vorsatz.setErstellungsZeitraum(startDatum, endDatum); checkExport(70, 85, startDatum + endDatum); assertEquals(startDatum + endDatum, vorsatz.getErstellungsZeitraum()); assertEquals(startDatum, vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_VOM).getInhalt()); assertEquals(endDatum, vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_BIS).getInhalt()); } @Test public void testGetFeldErstellungsdat() { Datum von = new Datum(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_VOM, 8, 70); Datum bis = new Datum(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_BIS, 8, 78); von.setInhalt("20201220"); bis.setInhalt("20201222"); vorsatz.setErstellungsZeitraum(von, bis); assertEquals(von, vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_VOM)); assertEquals(bis, vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_BIS)); } /** * Hier ueberpruefen wir den Export. * Damit ein Datensatz auch 256 Bytes lang ist, setzen wir das * EOD-Zeichen auf nichts (""). * * @param startByte beginnend bei 1 * @param endByte beginnend bei 1 * @param expected erwartetes Ergebnis * @throws IOException falls was schief geht */ private void checkExport(final int startByte, final int endByte, final String expected) throws IOException { super.checkExport(this.vorsatz, startByte, endByte, expected, 768); } /** * Hier testen wir den Import. */ @Test public void testImport() { String content = vorsatz.toLongString(); Vorsatz imported = new Vorsatz(content); assertEquals(content, imported.toLongString()); assertEquals(vorsatz, imported); } @Test public void testImportVersion() throws IOException { File musterdatei = new File("src/test/resources/musterdatei_041222.txt"); vorsatz.importFrom(musterdatei); Map<SatzTyp, Version> versionen = vorsatz.getSatzartVersionen(); assertEquals("1.9", versionen.get(SatzTyp.of("0001")).getInhalt()); assertEquals("1.9", versionen.get(SatzTyp.of("0100")).getInhalt()); assertEquals("1.9", versionen.get(SatzTyp.of("0200")).getInhalt()); assertEquals("2.1", versionen.get(SatzTyp.of("0210.050")).getInhalt()); } /** * Zum Testen verwenden wir hier die Musterdatei. * * @throws IOException falls die Musterdatei nicht importiert werden kann */ @Test public void testImportReader() throws IOException { try (InputStream istream = this.getClass().getResourceAsStream("/musterdatei_041222.txt")) { vorsatz.importFrom(istream); assertTrue(vorsatz + " should be valid", vorsatz.isValid()); assertEquals("9999", vorsatz.getVuNummer()); assertEquals("XXX Versicherung AG", vorsatz.getAbsender()); assertEquals("BRBRIENNEE,J\u00dcRGEN", vorsatz.getAdressat()); } } /** * Zum Testen nehmen wir hier den Vorsatz aus der Musterdatei, allerdings * ohne Umlaute. * * @throws IOException falls der Im- oder Export schief geht */ @Test public void testExport() throws IOException { String input = "00019999 XXX Versicherung AG BRBRIENNEE,JURGEN " + " 220720042207200499990099991.91.91.92.12.12.12.12.1 1.51.3" + "1.62.0 1.51.4 " + " 1.1 1 0000 Z0ZAG0011" + "\n" + "00019999 XXX Versicherung AG BRBRIENNEE,JURGEN " + " 220720042207200499990099991.01.01.01.01.0 1.01.01.1 " + " 1.01.0 " + " Z0ZAG0022" + "\n"; vorsatz.importFrom(input); StringWriter swriter = new StringWriter(input.length()); vorsatz.export(swriter); swriter.close(); assertEquals(input, swriter.toString()); } /** * Test-Methode fuer {@link Vorsatz#setAdressat(String)}. */ @Test public void testSetAdressat() { String adressat = "Obelix"; vorsatz.setAdressat(adressat); assertEquals(adressat, vorsatz.getAdressat()); } @Test public void testSetVersion() { SatzTyp satzTyp = SatzTyp.of("0100"); vorsatz.setVersion(satzTyp); String expected = SatzRegistry.getInstance().getSatz(satzTyp).getSatzversion().getInhalt(); assertEquals(expected, vorsatz.getVersion(100)); } @Test public void testSetVersionSatzartSparte() { SatzTyp satzTyp = SatzTyp.of("0210.050"); vorsatz.setVersion(satzTyp); String expected = SatzRegistry.getInstance().getSatz(satzTyp).getSatzversion().getInhalt(); assertEquals(expected, vorsatz.getVersion(210, 50)); } @Test public void testSetVersionVorsatz() { assertNotNull(vorsatz.getVersion(Bezeichner.VERSION_SATZART_0001)); } @Test public void testSetVersionNachsatz() { assertNotNull(vorsatz.getVersion(Bezeichner.VERSION_SATZART_9999)); } @Test public void testSetVersion100() { vorsatz.setVersion(Bezeichner.VERSION_SATZART_0100, "2.1"); assertEquals("2.1", vorsatz.getVersion(100)); } @Test public void testSetVersionString() { vorsatz.setVersion(Bezeichner.VERSION_SATZART_0102, "1.2"); assertEquals("1.2", vorsatz.getVersion(102)); } @Test public void testSetVersionSatzart() { vorsatz.setVersion(Bezeichner.SATZART_0200, "2.0"); assertEquals("2.0", vorsatz.getVersion(200)); } @Test public void testSetVersionSaztartSparte() { vorsatz.setVersion(210, 50,"2.5"); assertEquals("2.5", vorsatz.getVersion(210, 50)); } @Test public void testBezeichner() { assertNotNull(vorsatz.getFeld(Bezeichner.ABSENDER)); assertNotNull(vorsatz.getFeld(Bezeichner.ADRESSAT)); assertNotNull(vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_VOM)); assertNotNull(vorsatz.getFeld(Bezeichner.ERSTELLUNGSDAT_ZEITRAUM_BIS)); assertNotNull(vorsatz.getFeld(Bezeichner.VERMITTLER)); } @Test public void testImportVersionPreferSparte() throws IOException { Datenpaket dp = SatzRegistry.getInstance("VUVM2018.xml").getAllSupportedSaetze(); for (Satz satz : dp.getAllSaetze()) { AbstractSatzTest.setUp(satz); } File exportFile = new File("target/export/testVersionenHashMapPreferSpalte.txt"); dp.export(exportFile); vorsatz.importFrom(exportFile); Map<SatzTyp, Version> versionen = vorsatz.getSatzartVersionen(); assertEquals("2.4", versionen.get(SatzTyp.of(1)).getInhalt()); assertEquals("2.4", versionen.get(SatzTyp.of(220, 30)).getInhalt()); assertEquals("1.7", versionen.get(SatzTyp.of(210, 190)).getInhalt()); assertEquals("1.3", versionen.get(SatzTyp.of(220, 0)).getInhalt()); assertEquals("1.5", versionen.get(SatzTyp.of(220, 80)).getInhalt()); assertEquals("1.5", versionen.get(SatzTyp.of(220, 81)).getInhalt()); assertEquals("1.3", versionen.get(SatzTyp.of(220, 296)).getInhalt()); } @Test public void testSetVermittler() { vorsatz.setVermittler("12345"); assertEquals("12345", vorsatz.getVermittler()); assertEquals("12345", vorsatz.getFeld(Bezeichner.VERMITTLER).getInhalt().trim()); } }
/**************************************************************************** Copyright 2010, Colorado School of Mines and others. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ****************************************************************************/ package edu.mines.engelsma.sgl; import edu.mines.jtk.sgl.*; import edu.mines.jtk.awt.Mode; import edu.mines.jtk.awt.ModeManager; import java.awt.Component; import java.awt.event.*; import java.util.Iterator; import javax.swing.*; /** * A mode for painting ellipsoids representing eigen-tensors. * @author Chris Engelsma, Colorado School of Mines. * @version 2010.01.05 */ public class EigenTensorsPaintMode extends Mode { private static final long serialVersionUID = 1L; /** * Constructs an eigen-tensors edu.mines.engelsma.paint mode with specified * manager. * @param modeManager the mode manager for this mode. */ public EigenTensorsPaintMode(ModeManager modeManager) { super(modeManager); setName("Paint"); Class<EigenTensorsPaintMode> cls = EigenTensorsPaintMode.class; setIcon(loadIcon(cls,"resources/EigenTensorsPaintIcon16.png")); setCursor(loadCursor(cls,"resources/EigenTensorsPaintCursor16.png",1,1)); setMnemonicKey(KeyEvent.VK_P); setAcceleratorKey(KeyStroke.getKeyStroke(KeyEvent.VK_P,0)); setShortDescription("Paint Tensors"); } /////////////////////////////////////////////////////////////////////////// // protected protected void setActive(Component component, boolean active) { if (component instanceof ViewCanvas) { if (active) { component.addMouseListener(_ml); component.addKeyListener(_kl); component.addMouseWheelListener(_mwl); } else { component.removeMouseListener(_ml); component.removeKeyListener(_kl); component.removeMouseWheelListener(_mwl); } } } /////////////////////////////////////////////////////////////////////////// // private private ViewCanvas _canvas; // canvas when mouse pressed private View _view; // view when mouse pressed; null, if none private World _world; // world when mouse pressed; null, if none private PickResult _pickResult; // pick result when mouse is pressed. private EigenTensorsGroup _etg; // eigen-tensors private boolean _sticking; // true used iff sticking ellipsoids. private boolean _erasing; // true iff erasing ellipsoids. private KeyListener _kl = new KeyListener() { public void keyPressed(KeyEvent e) { if (e.getKeyChar()=='c') { _etg.clearAll(); } } // Not used public void keyTyped(KeyEvent e) {} public void keyReleased(KeyEvent e) {} }; private MouseListener _ml = new MouseAdapter() { public void mousePressed(MouseEvent e) { // We first assume that we are not sticking or erasing ellipsoids _sticking = false; _erasing = false; // Pick and look in the result for the eigen-tensors group _pickResult = pick(e,true); _canvas = (ViewCanvas)e.getSource(); _view = _canvas.getView(); if (_view!=null) _world = _view.getWorld(); // The check for eigen-tensors group... if (_pickResult!=null) { Class<EigenTensorsGroup> clsetg = EigenTensorsGroup.class; Iterator<Node> itn = _world.getChildren(); while(itn.hasNext()) { Node n = itn.next(); if (n.getClass()==clsetg) { _etg = (EigenTensorsGroup)n; } } if (_etg!=null) { Point3 p = _pickResult.getPointLocal(); // If shift selected, stick the ellipsoids. if (e.isShiftDown()) _sticking = true; else _sticking = false; // If control selected, erase ellipsoids. if (e.isAltDown()) _erasing = true; else _erasing = false; if (_erasing) _etg.clearClosestTensor(p.x,p.y,p.z); else _etg.pullTensor(p.x,p.y,p.z,_sticking); } } // Begin listening for mouse movement. _canvas.addMouseMotionListener(_mml); } public void mouseReleased(MouseEvent e) { // Make sure the temporary ellipsoid returns to null _etg.clearTempTensor(); // No longer painting. _canvas.removeMouseMotionListener(_mml); } }; // The mouse wheel changes the size of all the ellipsoids. private MouseWheelListener _mwl = new MouseWheelListener() { public void mouseWheelMoved(MouseWheelEvent e) { // Measure the wheel rotations and scale by +/- rotations^-1. if (_etg!=null) { int nclicks = e.getWheelRotation(); float size = _etg.getEllipsoidSize(); if (!(size<0.5f && nclicks<0)) { float scroll = (float)nclicks/10.0f; size += scroll; _etg.setEllipsoidSize(size); } } } }; private MouseMotionListener _mml = new MouseMotionAdapter() { public void mouseDragged(MouseEvent e) { if (_etg!=null) { // Same rules apply as mousePressed _pickResult = pick(e,false); if (_pickResult!=null) { Point3 p = _pickResult.getPointLocal(); if (e.isShiftDown()) _sticking = true; else _sticking = false; if (e.isAltDown()) _erasing = true; else _erasing = false; if (_erasing) _etg.clearClosestTensor(p.x,p.y,p.z); else _etg.pullTensor(p.x,p.y,p.z,_sticking); } } } }; private PickResult pick(MouseEvent event, boolean loud) { ViewCanvas canvas = (ViewCanvas)event.getSource(); View view = canvas.getView(); if (view==null) return null; World world = view.getWorld(); if (world==null) return null; PickContext pc = new PickContext(event); // world.pickApply(pc); PickResult pickResult = pc.getClosest(); if (pickResult!=null) { Point3 pointLocal = pickResult.getPointLocal(); Point3 pointWorld = pickResult.getPointWorld(); if (loud) { System.out.println("Paint Pick"); System.out.println(" local="+pointLocal); System.out.println(" world="+pointWorld); } } else { if (loud) System.out.println("Paint Pick nothing"); } return pickResult; } }
/* * Copyright (c) 2017 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.icecp.node.security.keymanagement.impl; import com.intel.icecp.core.Channel; import com.intel.icecp.core.management.Channels; import com.intel.icecp.core.metadata.Persistence; import com.intel.icecp.core.misc.ChannelIOException; import com.intel.icecp.core.misc.ChannelLifetimeException; import com.intel.icecp.core.misc.Configuration; import com.intel.icecp.core.permissions.KeyManagementPermissions; import com.intel.icecp.node.messages.security.CertificateMessage; import com.intel.icecp.core.security.keymanagement.exception.KeyManagerException; import com.intel.icecp.core.security.crypto.key.asymmetric.PrivateKey; import com.intel.icecp.core.security.crypto.key.asymmetric.PublicKey; import com.intel.icecp.core.security.crypto.key.symmetric.SymmetricKey; import com.intel.icecp.node.security.SecurityConstants; import com.intel.icecp.node.security.utils.PemEncodingUtils; import com.intel.icecp.node.utils.SecurityUtils; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.nio.file.Path; import java.nio.file.Paths; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.SignatureException; import java.security.UnrecoverableKeyException; import java.security.cert.CertPath; import java.security.cert.CertPathValidator; import java.security.cert.CertPathValidatorException; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateExpiredException; import java.security.cert.CertificateFactory; import java.security.cert.CertificateNotYetValidException; import java.security.cert.PKIXBuilderParameters; import java.security.cert.X509CertSelector; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutionException; import javax.crypto.SecretKey; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.intel.icecp.core.security.keymanagement.KeyManager; /** * Implementation of key manager using Java {@link java.security.KeyStore}, which * works ONLY with JCEKS format. * This implementation uses two separate instances of {@link java.security.KeyStore} * <ol> * <li> A key store contains all the keys, i.e., both node's keys and channel keys; * this store can persist both public/private keys and symmetric keys; </li> * <li> trust store contains the trust anchors X.509 certificates (e.g., administrator * and/or CA); node's certificate (if any) goes here as well. </li> * </ol> * Access control is performed on keys by key ID using {@link SecurityUtils} * and the operations specified in {@link KeyManagementPermissions} class * * */ public class KeyStoreBasedManager implements KeyManager, AutoCloseable { /** Constant error messages */ private static final String ERR_MSG_UNABLE_TO_FETCH_CERTIFICATE = "Unable to retrieve certificate "; /** Store settings */ private static final String STORE_TYPE = "JCEKS"; /** Constants and default parameters for properties parsing */ // @TODO: Move these constants to SecurityConstants private static final String TRUSTORE_TAG = "truststore"; private static final String DEFAULT_TRUSTSTORE = "keystores/stores/truststore.jceks"; private static final String KEYSTORE_TAG = "keystore"; private static final String DEFAULT_KEYSTORE = "keystores/stores/keystore.jceks"; private static final String PASSWORD_TAG = "storespassword"; /** Logger */ private static final Logger LOGGER = LogManager.getLogger(); /** Key store and trust store */ // @TODO: Move trust store into a separate TrustManager, to // reduce the functions provided by this class private KeyStore trustStore = null; private KeyStore keyStore = null; /** Local location of the key stores (e.g., path in the file system) */ private final String trustStoreLocation; private final String keyStoreLocation; /** Key stores password */ // @TODO: We should (try to) remove the need to keep the password in memory all the time private final char[] password; /** handles configuration for the key store */ private final Configuration configuration; /** Reference to channels that may be used to fetch certificates remotely */ private final Channels channels; /** Tells whether key stores have been initialized/loaded */ private boolean init = false; public KeyStoreBasedManager(Channels channels, Configuration configuration) { this.channels = channels; // Set a file configuration manager to look for configuration file in the default path this.configuration = configuration; try { // Load the configuration this.configuration.load(); } catch(NullPointerException | ChannelIOException ex) { // If fails, we use the default parameters LOGGER.warn("Unable to read key manager configuration from file.", ex); } // Load parameters this.trustStoreLocation = this.configuration.getOrDefault(DEFAULT_TRUSTSTORE, TRUSTORE_TAG); this.keyStoreLocation = this.configuration.getOrDefault(DEFAULT_KEYSTORE, KEYSTORE_TAG); this.password = this.configuration.getOrDefault(SecurityConstants.getKeyStoreDefaultPassword(), PASSWORD_TAG); } /** * One-time initialization of the key manager * * @throws KeyManagerException In case of initialization error */ public synchronized void load() throws KeyManagerException { // It has not been initialized yet if (!init) { try { keyStore = KeyStore.getInstance(STORE_TYPE); trustStore = KeyStore.getInstance(STORE_TYPE); } catch (KeyStoreException ex) { throw new KeyManagerException("Error loading keystore or truststore.", ex); } // Initalize both key stores init(keyStore, this.password, Paths.get(keyStoreLocation), true); // This call may fail if we find no trust store file init(trustStore, this.password, Paths.get(trustStoreLocation), false); init = true; } } /** * Perform teardown operations * * {@inheritDoc } * */ @Override public void close() { // Erase the content of password Arrays.fill(password, '0'); init = false; } /** * Utility method that fetches a key from a given {@link java.security.KeyStore}, * and returns it in the following cases: * <ol> * <li> The entry exists </li> * <li> The entry is a key entry </li> * <li> The key is of (or assignable from) the given {@link java.security.Key} type </li> * </ol> * * @param <T> * @param keyAlias Alias under which the key is stored inside the {@link java.security.KeyStore} * @param keyType Expected key type, as a subclass of {@link java.security.Key} * @param keyStore Key store * @return The key if the above requirements are met * @throws KeyManagerException In case of error */ protected <T extends Key> T getKey(String keyAlias, Class<T> keyType, KeyStore keyStore) throws KeyManagerException { // The key manager must have been initialized assert init; // Check READ permission for the given key SecurityUtils.checkPermission(new KeyManagementPermissions(keyAlias, KeyManagementPermissions.READ)); try { // First, we check whether an entry exists; if not, we throw an exception if (!keyStore.containsAlias(keyAlias)) { throw new KeyManagerException("Unable to retrieve the given entry " + keyAlias + ": key not found"); } // Then, we check if the entry is a key; if not, we throw an exception if(!keyStore.isKeyEntry(keyAlias)) { throw new KeyManagerException("Unable to retrieve the given entry " + keyAlias + ": is not a key"); } Key key = keyStore.getKey(keyAlias, password); // Check whether the type corresponds; otherwise we simply throw an exception if (keyType.isAssignableFrom(key.getClass())) { // This cast is safe return (T) key; } throw new KeyManagerException("Unable to retrieve symmetric key " + keyAlias + ": key does not have the expected type"); } catch (NoSuchAlgorithmException | UnrecoverableKeyException | KeyStoreException ex) { throw new KeyManagerException("Unable to retrieve symmetric key " + keyAlias + ".", ex); } } /** * {@inheritDoc} * */ @Override public PrivateKey getPrivateKey(URI keyAlias) throws KeyManagerException { return new PrivateKey(getKey(keyAlias.toASCIIString(), java.security.PrivateKey.class, keyStore)); } /** * {@inheritDoc} * */ @Override public SymmetricKey getSymmetricKey(URI keyAlias) throws KeyManagerException { return new SymmetricKey(getKey(keyAlias.toASCIIString(), SecretKey.class, keyStore)); } /** * {@inheritDoc} * */ @Override public PublicKey getPublicKey(URI keyAlias) throws KeyManagerException { // The key manager must have been initialized assert init; // Check READ permissions for the key String keyAliasString = keyAlias.toASCIIString(); SecurityUtils.checkPermission(new KeyManagementPermissions(keyAliasString, KeyManagementPermissions.READ)); try { if (!keyStore.containsAlias(keyAliasString)) { throw new KeyManagerException("Unable to retrieve public key " + keyAliasString + ": certificate not found"); } // Unless we have some KeyStore related error, we are sure to have the public key return new PublicKey(keyStore.getCertificate(keyAliasString).getPublicKey()); } catch (KeyStoreException e) { throw new KeyManagerException("Unable to retrieve public key " + keyAliasString, e); } } /** * {@inheritDoc} * */ @Override public synchronized void addSymmetricKey(URI keyAlias, SymmetricKey k) throws KeyManagerException { // The key manager MUST have been initialized assert init; String keyAliasString = keyAlias.toASCIIString(); try { // Check CREATE or UPDATE permissions if (keyStore.containsAlias(keyAliasString)) { SecurityUtils.checkPermission(new KeyManagementPermissions(keyAliasString, KeyManagementPermissions.UPDATE)); } else { SecurityUtils.checkPermission(new KeyManagementPermissions(keyAliasString, KeyManagementPermissions.CREATE)); } // Set the entry keyStore.setKeyEntry(keyAliasString, k.getWrappedKey(), password, null); // Save changes to file this.write(keyStore, keyStoreLocation); } catch (KeyStoreException ex) { throw new KeyManagerException("Unable to add symmetric key " + keyAliasString, ex); } } /** * {@inheritDoc} * */ @Override public void deleteSymmetricKey(URI keyAlias) throws KeyManagerException { // The key manager must have been initialized assert init; String keyAliasString = keyAlias.toASCIIString(); SecurityUtils.checkPermission(new KeyManagementPermissions(keyAliasString, KeyManagementPermissions.DELETE)); try { // Thread safe operation deleteStoreEntry(keyStore, keyStoreLocation, keyAliasString); } catch (KeyStoreException ex) { throw new KeyManagerException("Unable to delete key " + keyAliasString + ".", ex); } } /** * {@inheritDoc} * */ @Override public Certificate getCertificate(URI certificateID) throws KeyManagerException { // The key manager must have been initialized assert init; // Try first to load it from the keystore try { return getTrustedCertificate(certificateID.toASCIIString()); } catch (KeyManagerException ex) { // Do nothing LOGGER.info("Unable to find the certificate with name '{}' inside trust store", certificateID, ex); } // Try to retrieve the certificate form a channel, using certificate's name CertificateMessage msg; try (Channel<? extends CertificateMessage> channel = channels.openChannel(certificateID, CertificateMessage.class, Persistence.FOREVER)) { msg = channel.latest().get(); } catch (ChannelIOException | ChannelLifetimeException | ExecutionException | InterruptedException ex) { throw new KeyManagerException(ERR_MSG_UNABLE_TO_FETCH_CERTIFICATE + certificateID.toASCIIString() + " from a channel.", ex); } // Decode the certificates (assume X.509); if not X.509, returns an empty list List<Certificate> certs = PemEncodingUtils.decodeX509CertificateChain(msg.certificate.getBytes()); // This call may fail (if the certificate can not be verified); // if the chain can be verified, we return the first in the chain (as the list // returned by the verify method is sorted) return verify(certs).get(0); } /** * {@inheritDoc} * */ @Override public Certificate verifyCertificateChain(byte[] certificate) throws KeyManagerException { // The key manager must have been initialized assert init; // Simply call verify and return the first certificate in the returned // (sorted) certificate path. List<Certificate> certs = PemEncodingUtils.decodeX509CertificateChain(certificate); return verify(certs).get(0); } /** PROTECTED/PRIVATE METHODS */ /** * Reads (and loads if necessary) a given {@link KeyStore}. * * @param store Reference to the key store to initialize * @param password Password to open the key store * @param path Path in the file system * @param acceptEmpty Tells whether is acceptable to load an empty key store * @throws KeyManagerException If the given store name is not valid or if * store loading or creation fails */ protected final void init(KeyStore store, char[] password, Path path, boolean acceptEmpty) throws KeyManagerException { // Cannot be null if (store == null) { throw new KeyManagerException("Invalid null key store " + store + "."); } // "Synchronize" on the store before proceding synchronized (store) { // Try to open the key store at the given path try (InputStream keystoreFileInputStream = new FileInputStream(path.toFile())) { // If the given inputStream is null, this call creates a new empty KeyStore store.load(keystoreFileInputStream, password); } catch (FileNotFoundException e) { if(acceptEmpty) { // No file found, but empty store allowed. // In this case we can still proceed trying to // create a new empty keystore LOGGER.warn("Error reading from path {}.", path.toString(), e); try { store.load(null, password); } catch (IOException | NoSuchAlgorithmException | CertificateException ex) { // Something went wrong throw new KeyManagerException("Unable to load empty keystore " + store, ex); } } else { // Empty key store not allowed; throw an exception throw new KeyManagerException("Unable to load keystore " + store + " from path " + path.toString(), e); } } catch (CertificateException | IOException | NoSuchAlgorithmException ex) { // Something went wrong throw new KeyManagerException("Unable to load keystore " + store + " from path " + path.toString(), ex); } } } /** * Writes on a given {@link KeyStore}; to use if we modified the key store and want * to change its state. * * Note that, this method does NOT perform any permission check, which should be * enforced by the caller * * @param store Key store * @param keyStoreLocation Key store location * @throws KeyManagerException If the given store name is not valid or if writing in the * key store fails */ protected void write(KeyStore store, String keyStoreLocation) throws KeyManagerException { // Cannot be null if (store == null) { throw new KeyManagerException("Invalid key store " + store + ": null"); } try { // Write the keystore to file (take a lock in the key store to prevent // inconsistencies while writing to file) synchronized(store) { try (FileOutputStream fos = new FileOutputStream(keyStoreLocation)) { store.store(fos, password); } } } catch (CertificateException | IOException | KeyStoreException | NoSuchAlgorithmException ex) { throw new KeyManagerException("Unable to save the changes to " + keyStoreLocation + "", ex); } } /** * Utility method that removes an entry from a given store, and saves the * result on file. NOTE: This method DOES NOT check for permissions; the * caller should enforce access control * * @param keyStore Key store * @param entryId Entry identifier * @throws KeyStoreException If call to {@link KeyStore#deleteEntry(java.lang.String) } fails * @throws KeyManagerException If call to {@link KeyStoreBasedManager#write(java.security.KeyStore, java.lang.String) } fails */ private void deleteStoreEntry(KeyStore keyStore, String keyStoreLocation, String entryId) throws KeyStoreException, KeyManagerException { synchronized (keyStore) { // Remove the entry keyStore.deleteEntry(entryId); // Save changes (if we are allowed to do so) this.write(keyStore, keyStoreLocation); } } /** * Tells whether a given {@link java.security.cert.Certificate} is self signed. * * @param certificate The certificate to check * @return {@code true} if the certificate is self signed; {@code false} otherwise */ protected boolean isSelfSigned(Certificate certificate) { try { certificate.verify(certificate.getPublicKey()); return true; } catch (CertificateException | NoSuchAlgorithmException | InvalidKeyException | NoSuchProviderException | SignatureException ex) { // If the call fails, it means that either the certificate is // not self-signed, or that there are other issues with the certificate. // In both cases, the method will return false. LOGGER.warn("Self-signed test failed (Certificate is not self-signed).", ex); return false; } } /** * Utility method that verifies the validity of a certificate; this method * checks whether the certificate is a subtype of {@link X509Certificate} * and only in this case, tries to verify the validity of the certificate * * @param certificate The certificate to verify * @throws UnsupportedEncodingException If the certificate format is not supported * @throws CertificateExpiredException If the certificate is expired * @throws CertificateNotYetValidException If the certificate is not yet valid */ protected void checkCertificateValidity(Certificate certificate) throws UnsupportedEncodingException, CertificateExpiredException, CertificateNotYetValidException { // Check if certificate is a X509Certificate, and only in this case, // check its validity if (X509Certificate.class.isAssignableFrom(certificate.getClass())) { // Check validity of this certificate (cast is safe) ((X509Certificate) certificate).checkValidity(); } else { throw new UnsupportedEncodingException("Unknown certificate format"); } } /** * Returns a certificate from the trust store ONLY if it is valid; throws an * exception otherwise. * If invalid, the method tries to remove it from the trust store. * * @param certificateID Certificate unique identifier * @return The certificate corresponding to the given id * @throws KeyManagerException If certificate not found inside the trust store, or * the certificate is expired/not yet valid */ protected Certificate getTrustedCertificate(String certificateID) throws KeyManagerException { Certificate certificate; try { // Inside trust store we have only certificaes; therefore this test is sufficient if (!trustStore.containsAlias(certificateID)) { // Failure (a): certificate not in trust store throw new KeyManagerException(ERR_MSG_UNABLE_TO_FETCH_CERTIFICATE + certificateID + ": not found in trust store"); } // Fetches a cert. chain from the trust store; // Note that this implies that the first element, in case of // certificate chain, is given. // Source: (https://docs.oracle.com/javase/8/docs/api/java/security/KeyStore.html#getCertificate(java.lang.String)) certificate = trustStore.getCertificate(certificateID); // Check the validity of the certificate checkCertificateValidity(certificate); // If passed the check, we can return it return certificate; } catch (CertificateExpiredException ex) { // We remove the certificate since expired try { // Thread safe operation this.deleteStoreEntry(trustStore, trustStoreLocation, certificateID); } catch (KeyStoreException e) { // Do nothing LOGGER.warn(e); } // Failure (b): Certificate is expired throw new KeyManagerException(ERR_MSG_UNABLE_TO_FETCH_CERTIFICATE + certificateID, ex); } catch (UnsupportedEncodingException | KeyStoreException | CertificateNotYetValidException ex) { // Failure (c): Certificate is not yet valid or not a X509, or another key store failure occurred throw new KeyManagerException(ERR_MSG_UNABLE_TO_FETCH_CERTIFICATE + certificateID, ex); } } /** * Given a (possibly unsorted) certificate chain returns the corresponding ordered * certificates list if the chain can be verified against the trust anchors; * throws an exception otherwise. * Returned certificates are sorted according to: * {@literal https://docs.oracle.com/javase/8/docs/api/java/security/cert/CertPath.html} * * @param certificateChain Certificate chain as a {@link List} of {@link Certificate} * @return An ordered certificate chain * @throws KeyManagerException If the certificate chain is not verified */ protected List<Certificate> verify(List<Certificate> certificateChain) throws KeyManagerException { // Check if we have no certs, or if the cert is self signed, to avoid useless computation if (certificateChain.isEmpty()) { throw new KeyManagerException("Empty certificate chain supplied."); } else if (this.isSelfSigned(certificateChain.get(0))) { // In this case, the first certificate of the chain is self signed, and // therefore there is no chain; furthermore, the certificate is untrusted // (otherwise we would have found it into the trusted store). throw new KeyManagerException("Certificate is self signed but untrusted."); } LOGGER.info("Certificate is not self signed; continue verifying the certificate chain."); // We can now verify the validity of the certificate chain try { // Validator used for verification of the chain // Every Java implementation is required to support PKIX // Source: https://docs.oracle.com/javase/8/docs/api/java/security/cert/CertPathValidator.html CertPathValidator validator = CertPathValidator.getInstance("PKIX"); // The chain may have 1 or more certificates; this creates a path // to follow when verifing the certificate chain validity // Every Java implementation is required to support "X.509" // Source: https://docs.oracle.com/javase/8/docs/api/java/security/cert/CertificateFactory.html CertPath path = CertificateFactory.getInstance("X.509").generateCertPath(certificateChain); // Set Selector for the certificate to check (in our case, the first of the chain) X509CertSelector selector = new X509CertSelector(); selector.setCertificate((X509Certificate) path.getCertificates().get(0)); // We tell the verification process to load trusted certificates from trustedKeyStore. // The chain should terminate with, or contain a certifiacte in this store. PKIXBuilderParameters params = new PKIXBuilderParameters(trustStore, selector); // @TODO: CRL or OCSP not yet supported params.setRevocationEnabled(false); // We let the validator check the validity of the chain (throws an exception if invalid) validator.validate(path, params); LOGGER.info("Certificate chain is valid."); // Return the sorted certificate chain // (convert to non generic-wildcard type) List<Certificate> result = new ArrayList<>(); result.addAll(path.getCertificates()); return result; } catch (CertPathValidatorException | CertificateException | InvalidAlgorithmParameterException | NoSuchAlgorithmException | KeyStoreException ex) { throw new KeyManagerException("Unable to verify key chain.", ex); } } }
package com.ralitski.util.math.geom.d2; import com.ralitski.util.math.Matrix; public class Vector2d implements Cloneable { public Vector2d getI() { return new Vector2d(1, 0); } public Vector2d getJ() { return new Vector2d(0, 1); } //obj private float x; private float y; private float magnitude; private boolean magnitudeDirty = true; public Vector2d(Matrix m) { if(m.getColumns() == 1) { x = m.getValue(1, 1); y = m.getValue(2, 1); } else { throw new IllegalArgumentException("Vectors can only be constructed from vector matrices"); } } public Vector2d() { this(0, 0); } public Vector2d(Point2d terminal) { this(terminal.getX(), terminal.getY()); } public Vector2d(Vector2d terminal) { this(terminal.getX(), terminal.getY()); } public Vector2d(Point2d initial, Point2d terminal) { this(terminal.getX() - initial.getX(), terminal.getY() - initial.getY()); } public Vector2d(float x, float y) { this.x = x; this.y = y; } //add, set, get public void addX(float x) { this.setX(getX() + x); } public void setX(float x) { this.x = x; magnitudeDirty = true; } public float getX() { return x; } public void addY(float y) { this.setY(getY() + y); } public void setY(float y) { this.y = y; magnitudeDirty = true; } public float getY() { return y; } //angles, rotation public float getAngle() { return (float)Math.atan2(y, x); } public float getAngleDegrees() { return (float)Math.toDegrees(getAngle()); } public void rotateDegrees(float angle) { rotate((float)Math.toRadians(angle)); } public void rotate(float angle) { float cos = (float)Math.cos(angle); float sin = (float)Math.sin(angle); float xPrime = cos * x - sin * y; float yPrime = sin * x + cos * y; x = xPrime; y = yPrime; } //misc public boolean isEmpty() { return x == 0 && y == 0; } public Vector2d crossProduct() { return new Vector2d(y, -x); } public float dot(Vector2d other) { return (x * other.x) + (y * other.y); } public void add(Vector2d other) { x += other.x; y += other.y; magnitudeDirty = true; } public void add(float magnitude) { float oldMag = magnitude(); float newMag = oldMag + magnitude; float mod = newMag / oldMag; multiply(mod); } public void normalize() { setMagnitude(1F); } public void setMagnitude(float m) { float mag = magnitude(); if(mag != 0F) multiply(m / mag); else { x = m; y = 0; } } public void multiply(float magnitude) { x *= magnitude; y *= magnitude; magnitudeDirty = true; } public void invert() { x = -x; y = -y; } public float magnitude() { recalcMagnitude(); return magnitude; } private void recalcMagnitude() { if(magnitudeDirty) { magnitude = (float)Math.sqrt(x * x + y * y); magnitudeDirty = false; } } /** * note: this method is not necessarily faster than magnitude() due to the use of a cache. * @return */ public float magnitudeSquared() { return x * x + y * y; } public float getAngleTo(Vector2d other) { float dot = this.dot(other); float div = this.magnitude() * other.magnitude(); if(div == 0) return 0; float cosAngle = dot / div; return (float)Math.acos(cosAngle); } public Matrix toMatrix(boolean extra) { return extra ? new Matrix(new float[][]{ {x}, {y}, {1} }) : new Matrix(new float[][]{ {x}, {y} }); } public boolean isAligned(Vector2d other) { //do the alignment calculation first because it's faster...maybe //also can I get a whoop whoop for nested tertiary statements return (x > 0F ? other.x > 0F : (x < 0F ? other.x < 0F : (y > 0F ? other.y > 0F : (y < 0F ? other.y < 0F : other.y == 0F)))) && isParallel(other); } public boolean isParallel(Vector2d other) { float xx = x / other.x; float yy = y / other.y; if(x == 0F) { if(y == 0F) { return other.x == 0F && other.y == 0F; } else return other.x == 0; } else if(y == 0F) { return other.y == 0F; } else return xx == yy; } public boolean isOrthogonal(Vector2d other) { return this.dot(other) == 0; } public boolean equals(Object o) { if(this == o) return true; if(o instanceof Vector2d) { Vector2d v = (Vector2d)o; return x == v.x && y == v.y; } return false; } public String toString() { return "<" + x + ", " + y + ">"; } public Vector2d clone() { return new Vector2d(x, y); } public Vector2d addCopy(Vector2d v) { return new Vector2d(x + v.x, y + v.y); } public Vector2d subtractCopy(Vector2d v) { return new Vector2d(x - v.x, y - v.y); } public Vector2d scaleCopy(float f) { return new Vector2d(x * f, y * f); } public Vector2d scaleCopy(Vector2d v) { return new Vector2d(x * v.x, y * v.y); } public Vector2d descaleCopy(float f) { return new Vector2d(x / f, y / f); } public Vector2d descaleCopy(Vector2d v) { return new Vector2d(x / v.x, y / v.y); } public Vector2d negateCopy() { return new Vector2d(-x, -y); } public boolean isNaN() { return Float.isNaN(x) || Float.isNaN(y); } public float[] toArray() { return new float[]{x, y}; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.stax.impl; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.stax.ext.OutputProcessorChain; import org.apache.xml.security.stax.ext.SecurePart; import org.apache.xml.security.stax.ext.XMLSecurityConstants; import org.apache.xml.security.stax.ext.stax.XMLSecAttribute; import org.apache.xml.security.stax.ext.stax.XMLSecEvent; import org.apache.xml.security.stax.ext.stax.XMLSecEventFactory; import org.apache.xml.security.stax.ext.stax.XMLSecNamespace; import javax.xml.XMLConstants; import javax.xml.namespace.NamespaceContext; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Custom XMLStreamWriter to map XMLStreamWriter method calls into XMLEvent's * */ public class XMLSecurityStreamWriter implements XMLStreamWriter { private final OutputProcessorChain outputProcessorChain; private Element elementStack; private Element openStartElement; private NSContext namespaceContext = new NSContext(null); private boolean endDocumentWritten = false; private boolean haveToWriteEndElement = false; private SecurePart signEntireRequestPart; private SecurePart encryptEntireRequestPart; public XMLSecurityStreamWriter(OutputProcessorChain outputProcessorChain) { this.outputProcessorChain = outputProcessorChain; } private void chainProcessEvent(XMLSecEvent xmlSecEvent) throws XMLStreamException { try { outputProcessorChain.reset(); outputProcessorChain.processEvent(xmlSecEvent); } catch (XMLSecurityException e) { throw new XMLStreamException(e); } catch (XMLStreamException e) { String msg = e.getMessage(); if (msg != null && msg.contains("Trying to declare prefix xmlns (illegal as per NS 1.1 #4)")) { throw new XMLStreamException("If you hit this exception this most probably means" + "you are using the javax.xml.transform.stax.StAXResult. Don't use " + "it. It is buggy as hell.", e); } //NB1: net.java.dev.stax-utils also doesn work: [Fatal Error] // :4:425: Attribute "xmlns" was already specified for element ... //NB2: The spring version also doesn't work... //it seems it is not trivial to write a StAXResult because I couldn't find an implementation // which passes the testcases...hmm throw e; } } private void outputOpenStartElement() throws XMLStreamException { if (openStartElement != null) { chainProcessEvent( XMLSecEventFactory.createXmlSecStartElement( openStartElement.getQName(), openStartElement.getAttributes(), openStartElement.getNamespaces())); openStartElement = null; } if (haveToWriteEndElement) { haveToWriteEndElement = false; writeEndElement(); } } private String getNamespacePrefix(String namespaceURI) { if (elementStack == null) { return namespaceContext.getPrefix(namespaceURI); } else { return elementStack.getNamespaceContext().getPrefix(namespaceURI); } } @Override public void writeStartElement(String localName) throws XMLStreamException { writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, localName, XMLConstants.NULL_NS_URI); } @Override public void writeStartElement(String namespaceURI, String localName) throws XMLStreamException { writeStartElement(getNamespacePrefix(namespaceURI), localName, namespaceURI); } @Override public void writeStartElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { outputOpenStartElement(); Element element; if (elementStack == null) { element = new Element(elementStack, namespaceContext, namespaceURI, localName, prefix); if (signEntireRequestPart != null) { signEntireRequestPart.setName(new QName(namespaceURI, localName, prefix)); outputProcessorChain.getSecurityContext().putAsMap( XMLSecurityConstants.SIGNATURE_PARTS, signEntireRequestPart.getName(), signEntireRequestPart ); } if (encryptEntireRequestPart != null) { encryptEntireRequestPart.setName(new QName(namespaceURI, localName, prefix)); outputProcessorChain.getSecurityContext().putAsMap( XMLSecurityConstants.ENCRYPTION_PARTS, encryptEntireRequestPart.getName(), encryptEntireRequestPart ); } } else { element = new Element(elementStack, namespaceURI, localName, prefix); } elementStack = element; openStartElement = element; } @Override public void writeEmptyElement(String localName) throws XMLStreamException { writeEmptyElement(XMLConstants.DEFAULT_NS_PREFIX, localName, XMLConstants.NULL_NS_URI); } @Override public void writeEmptyElement(String namespaceURI, String localName) throws XMLStreamException { writeEmptyElement(getNamespacePrefix(namespaceURI), localName, namespaceURI); } @Override public void writeEmptyElement(String prefix, String localName, String namespaceURI) throws XMLStreamException { writeStartElement(prefix, localName, namespaceURI); openStartElement.setEmptyElement(true); haveToWriteEndElement = true; } @Override public void writeEndElement() throws XMLStreamException { outputOpenStartElement(); Element element = this.elementStack; this.elementStack = this.elementStack.getParentElement(); chainProcessEvent(XMLSecEventFactory.createXmlSecEndElement(element.getQName())); } @Override public void writeEndDocument() throws XMLStreamException { if (!endDocumentWritten) { outputOpenStartElement(); while (this.elementStack != null) { Element element = this.elementStack; this.elementStack = element.getParentElement(); chainProcessEvent(XMLSecEventFactory.createXmlSecEndElement(element.getQName())); } chainProcessEvent(XMLSecEventFactory.createXMLSecEndDocument()); endDocumentWritten = true; } } @Override public void close() throws XMLStreamException { try { writeEndDocument(); outputProcessorChain.reset(); outputProcessorChain.doFinal(); } catch (XMLSecurityException e) { throw new XMLStreamException(e); } } @Override public void flush() throws XMLStreamException { } @Override public void writeAttribute(String localName, String value) throws XMLStreamException { writeAttribute(XMLConstants.DEFAULT_NS_PREFIX, XMLConstants.NULL_NS_URI, localName, value); } @Override public void writeAttribute(String namespaceURI, String localName, String value) throws XMLStreamException { writeAttribute(getNamespacePrefix(namespaceURI), namespaceURI, localName, value); } @Override public void writeAttribute(String prefix, String namespaceURI, String localName, String value) throws XMLStreamException { if (openStartElement == null) { throw new XMLStreamException("No open start element."); } openStartElement.addAttribute( XMLSecEventFactory.createXMLSecAttribute( new QName(namespaceURI, localName, prefix), value)); } @Override public void writeNamespace(String prefix, String namespaceURI) throws XMLStreamException { if (openStartElement == null) { throw new XMLStreamException("No open start element."); } this.openStartElement.addNamespace(XMLSecEventFactory.createXMLSecNamespace(prefix, namespaceURI)); } @Override public void writeDefaultNamespace(String namespaceURI) throws XMLStreamException { if (openStartElement == null) { throw new XMLStreamException("No open start element."); } //workaround for sun's stax parser if (this.openStartElement.getElementPrefix().equals(XMLConstants.DEFAULT_NS_PREFIX)) { this.openStartElement.setElementNamespace(namespaceURI); this.openStartElement.setElementPrefix(XMLConstants.DEFAULT_NS_PREFIX); } this.openStartElement.addNamespace( XMLSecEventFactory.createXMLSecNamespace(XMLConstants.DEFAULT_NS_PREFIX, namespaceURI)); } @Override public void writeComment(String data) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent(XMLSecEventFactory.createXMLSecComment(data)); } @Override public void writeProcessingInstruction(String target) throws XMLStreamException { writeProcessingInstruction(target, XMLConstants.DEFAULT_NS_PREFIX); } @Override public void writeProcessingInstruction(String target, String data) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent(XMLSecEventFactory.createXMLSecProcessingInstruction(target, data)); } @Override public void writeCData(String data) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent(XMLSecEventFactory.createXMLSecCData(data)); } @Override public void writeDTD(String dtd) throws XMLStreamException { if (elementStack != null) { throw new XMLStreamException("Not in proLOG"); } chainProcessEvent(XMLSecEventFactory.createXMLSecDTD(dtd)); } @Override public void writeEntityRef(final String name) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent( XMLSecEventFactory.createXMLSecEntityReference( name, XMLSecEventFactory.createXmlSecEntityDeclaration(name) ) ); } @Override public void writeStartDocument() throws XMLStreamException { writeStartDocument(null, null); } @Override public void writeStartDocument(String version) throws XMLStreamException { writeStartDocument(null, version); } @Override public void writeStartDocument(String encoding, String version) throws XMLStreamException { chainProcessEvent(XMLSecEventFactory.createXmlSecStartDocument(null, encoding, null, version)); } @Override public void writeCharacters(String text) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent(XMLSecEventFactory.createXmlSecCharacters(text)); } @Override public void writeCharacters(char[] text, int start, int len) throws XMLStreamException { outputOpenStartElement(); chainProcessEvent(XMLSecEventFactory.createXmlSecCharacters(text, start, len)); } @Override public String getPrefix(String uri) throws XMLStreamException { return getNamespacePrefix(uri); } @Override public void setPrefix(String prefix, String uri) throws XMLStreamException { if (elementStack == null) { this.namespaceContext.add(prefix, uri); } else { this.elementStack.getNamespaceContext().add(prefix, uri); } } @Override public void setDefaultNamespace(String uri) throws XMLStreamException { if (elementStack == null) { this.namespaceContext.add(XMLConstants.DEFAULT_NS_PREFIX, uri); } else { this.elementStack.getNamespaceContext().add(XMLConstants.DEFAULT_NS_PREFIX, uri); } } @Override public void setNamespaceContext(NamespaceContext context) throws XMLStreamException { if (context == null) { throw new NullPointerException("context must not be null"); } this.namespaceContext = new NSContext(context); } @Override public NamespaceContext getNamespaceContext() { if (this.elementStack == null) { return namespaceContext; } return elementStack.getNamespaceContext(); } @Override public Object getProperty(String name) throws IllegalArgumentException { throw new IllegalArgumentException("Properties not supported"); } public SecurePart getSignEntireRequestPart() { return signEntireRequestPart; } public void setSignEntireRequestPart(SecurePart signEntireRequestPart) { this.signEntireRequestPart = signEntireRequestPart; } public SecurePart getEncryptEntireRequestPart() { return encryptEntireRequestPart; } public void setEncryptEntireRequestPart(SecurePart encryptEntireRequestPart) { this.encryptEntireRequestPart = encryptEntireRequestPart; } private static class Element { private Element parentElement; private QName qName; private String elementName; private String elementNamespace; private String elementPrefix; private boolean emptyElement; private List<XMLSecNamespace> namespaces = Collections.emptyList(); private List<XMLSecAttribute> attributes = Collections.emptyList(); private NSContext namespaceContext; public Element(Element parentElement, String elementNamespace, String elementName, String elementPrefix) { this(parentElement, null, elementNamespace, elementName, elementPrefix); } public Element(Element parentElement, NSContext namespaceContext, String elementNamespace, String elementName, String elementPrefix) { this.parentElement = parentElement; this.namespaceContext = namespaceContext; this.elementName = elementName; setElementNamespace(elementNamespace); setElementPrefix(elementPrefix); } private Element getParentElement() { return parentElement; } private void setEmptyElement(boolean emptyElement) { this.emptyElement = emptyElement; } private String getElementName() { return elementName; } private String getElementNamespace() { return elementNamespace; } private void setElementNamespace(String elementNamespace) { if (elementNamespace == null) { this.elementNamespace = XMLConstants.NULL_NS_URI; } else { this.elementNamespace = elementNamespace; } this.qName = null; } private String getElementPrefix() { return elementPrefix; } private void setElementPrefix(String elementPrefix) { if (elementPrefix == null) { this.elementPrefix = XMLConstants.DEFAULT_NS_PREFIX; } else { this.elementPrefix = elementPrefix; } this.qName = null; } private List<XMLSecNamespace> getNamespaces() { return namespaces; } private void addNamespace(XMLSecNamespace namespace) { if (this.namespaces == Collections.<XMLSecNamespace>emptyList()) { this.namespaces = new ArrayList<>(1); } this.namespaces.add(namespace); //also add namespace to namespace-context getNamespaceContext().add(namespace.getPrefix(), namespace.getNamespaceURI()); } private List<XMLSecAttribute> getAttributes() { return attributes; } private void addAttribute(XMLSecAttribute attribute) { if (this.attributes == Collections.<XMLSecAttribute>emptyList()) { this.attributes = new ArrayList<>(1); } this.attributes.add(attribute); } private NSContext getNamespaceContext() { if (this.namespaceContext == null) { if (emptyElement) { this.namespaceContext = parentElement.getNamespaceContext(); } else if (parentElement != null) { this.namespaceContext = new NSContext(parentElement.getNamespaceContext()); } else { this.namespaceContext = new NSContext(null); } } return this.namespaceContext; } private QName getQName() { if (this.qName == null) { this.qName = new QName(this.getElementNamespace(), this.getElementName(), this.getElementPrefix()); } return this.qName; } } private static class NSContext implements NamespaceContext { private NamespaceContext parentNamespaceContext; private List<String> prefixNsList = Collections.emptyList(); NSContext(NamespaceContext parentNamespaceContext) { this.parentNamespaceContext = parentNamespaceContext; } @Override public String getNamespaceURI(String prefix) { for (int i = 0; i < prefixNsList.size(); i += 2) { String s = prefixNsList.get(i); if (s.equals(prefix)) { return prefixNsList.get(i + 1); } } if (parentNamespaceContext != null) { return parentNamespaceContext.getNamespaceURI(prefix); } return null; } @Override public String getPrefix(String namespaceURI) { for (int i = 1; i < prefixNsList.size(); i += 2) { String s = prefixNsList.get(i); if (s.equals(namespaceURI)) { return prefixNsList.get(i - 1); } } if (parentNamespaceContext != null) { return parentNamespaceContext.getPrefix(namespaceURI); } return null; } @SuppressWarnings("rawtypes") @Override public Iterator getPrefixes(String namespaceURI) { List<String> prefixes = new ArrayList<>(1); for (int i = 1; i < prefixNsList.size(); i += 2) { String s = prefixNsList.get(i); if (s.equals(namespaceURI)) { prefixes.add(prefixNsList.get(i - 1)); } } if (parentNamespaceContext != null) { @SuppressWarnings("unchecked") Iterator<String> parentPrefixes = parentNamespaceContext.getPrefixes(namespaceURI); while (parentPrefixes.hasNext()) { prefixes.add(parentPrefixes.next()); } } return prefixes.iterator(); } private void add(String prefix, String namespace) { if (this.prefixNsList == Collections.<String>emptyList()) { this.prefixNsList = new ArrayList<>(1); } this.prefixNsList.add(prefix); this.prefixNsList.add(namespace); } } }
/* * Copyright 2018-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.function; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Date; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Ignore; import org.junit.Test; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.cloud.stream.annotation.EnableBinding; import org.springframework.cloud.stream.binder.test.InputDestination; import org.springframework.cloud.stream.binder.test.OutputDestination; import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration; import org.springframework.cloud.stream.converter.CompositeMessageConverterFactory; import org.springframework.cloud.stream.messaging.Source; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.http.HttpMethod; import org.springframework.integration.channel.DirectChannel; import org.springframework.integration.channel.QueueChannel; import org.springframework.integration.dsl.IntegrationFlow; import org.springframework.integration.dsl.IntegrationFlows; import org.springframework.integration.http.dsl.Http; import org.springframework.integration.http.dsl.HttpRequestHandlerEndpointSpec; import org.springframework.integration.http.inbound.HttpRequestHandlingEndpointSupport; import org.springframework.messaging.Message; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.PollableChannel; import org.springframework.messaging.support.GenericMessage; import org.springframework.messaging.support.MessageBuilder; import static org.assertj.core.api.Assertions.assertThat; /** * This test validates proper function binding for applications where EnableBinding is * declared. * * @author Oleg Zhurakousky * @author Artem Bilan */ public class GreenfieldFunctionEnableBindingTests { @Test public void testSourceFromSupplier() { try (ConfigurableApplicationContext context = new SpringApplicationBuilder( TestChannelBinderConfiguration .getCompleteConfiguration(SourceFromSupplier.class)) .web(WebApplicationType.NONE) .run("--spring.cloud.function.definition=date", "--spring.jmx.enabled=false")) { OutputDestination target = context.getBean(OutputDestination.class); Message<byte[]> sourceMessage = target.receive(10000); Date date = (Date) new CompositeMessageConverterFactory() .getMessageConverterForAllRegistered() .fromMessage(sourceMessage, Date.class); assertThat(date).isEqualTo(new Date(12345L)); sourceMessage = target.receive(10000); date = (Date) new CompositeMessageConverterFactory() .getMessageConverterForAllRegistered() .fromMessage(sourceMessage, Date.class); assertThat(date).isEqualTo(new Date(12345L)); } } @Test public void testProcessorFromFunction() { try (ConfigurableApplicationContext context = new SpringApplicationBuilder( TestChannelBinderConfiguration.getCompleteConfiguration( ProcessorFromFunction.class)).web(WebApplicationType.NONE).run( "--spring.cloud.function.definition=toUpperCase", "--spring.jmx.enabled=false")) { InputDestination source = context.getBean(InputDestination.class); source.send(new GenericMessage<byte[]>("John Doe".getBytes())); OutputDestination target = context.getBean(OutputDestination.class); assertThat(target.receive(10000).getPayload()) .isEqualTo("JOHN DOE".getBytes(StandardCharsets.UTF_8)); } } @Test public void testSinkFromConsumer() { try (ConfigurableApplicationContext context = new SpringApplicationBuilder( TestChannelBinderConfiguration .getCompleteConfiguration(SinkFromConsumer.class)) .web(WebApplicationType.NONE) .run("--spring.cloud.function.definition=sink", "--spring.jmx.enabled=false")) { InputDestination source = context.getBean(InputDestination.class); PollableChannel result = context.getBean("result", PollableChannel.class); source.send(new GenericMessage<byte[]>("John Doe".getBytes())); assertThat(result.receive(10000).getPayload()).isEqualTo("John Doe"); } } @Test @Ignore public void testHttpEndpoint() { try (ConfigurableApplicationContext context = new SpringApplicationBuilder( TestChannelBinderConfiguration.getCompleteConfiguration( HttpInboundEndpoint.class)).web(WebApplicationType.SERVLET).run( "--spring.cloud.function.definition=upperCase", "--spring.jmx.enabled=false", "--server.port=0")) { TestRestTemplate restTemplate = new TestRestTemplate(); restTemplate.postForLocation( "http://localhost:" + context.getEnvironment().getProperty("local.server.port"), "hello"); OutputDestination target = context.getBean(OutputDestination.class); String result = new String(target.receive(10000).getPayload()); System.out.println(result); assertThat(result).isEqualTo("HELLO"); } } @Test @Ignore public void testPojoReturn() throws IOException { try (ConfigurableApplicationContext context = new SpringApplicationBuilder( TestChannelBinderConfiguration.getCompleteConfiguration( FooTransform.class)).web(WebApplicationType.NONE).run( "--spring.cloud.function.definition=fooFunction", "--spring.jmx" + ".enabled=false", "--logging.level.org.springframework.integration=TRACE")) { MessageChannel input = context.getBean("input", MessageChannel.class); OutputDestination target = context.getBean(OutputDestination.class); ObjectMapper mapper = context.getBean(ObjectMapper.class); input.send(MessageBuilder.withPayload("bar").build()); byte[] payload = target.receive(2000).getPayload(); Foo result = mapper.readValue(payload, Foo.class); assertThat(result.getBar()).isEqualTo("bar"); } } @EnableAutoConfiguration public static class SourceFromSupplier { @Bean public Supplier<Date> date() { return () -> new Date(12345L); } } @EnableAutoConfiguration public static class ProcessorFromFunction { @Bean public Function<String, String> toUpperCase() { return String::toUpperCase; } } @EnableAutoConfiguration public static class SinkFromConsumer { @Bean public PollableChannel result() { return new QueueChannel(); } @Bean public Consumer<String> sink(PollableChannel result) { return s -> { result.send(new GenericMessage<String>(s)); System.out.println(s); }; } } @EnableAutoConfiguration @EnableBinding(Source.class) public static class HttpInboundEndpoint { @Bean public Function<String, String> upperCase() { return String::toUpperCase; } @Bean public HttpRequestHandlingEndpointSupport doFoo(Source source) { HttpRequestHandlerEndpointSpec httpRequestHandler = Http .inboundChannelAdapter("/*") .requestMapping(requestMapping -> requestMapping .methods(HttpMethod.POST).consumes("*/*")) .requestChannel(source.output()); return httpRequestHandler.get(); } } @EnableAutoConfiguration @EnableBinding(Source.class) public static class FooTransform { @Bean public MessageChannel input() { return new DirectChannel(); } @Bean public IntegrationFlow flow() { return IntegrationFlows.from(input()).bridge().channel(Source.OUTPUT).get(); } @Bean public Function<Message<?>, Message<?>> fooFunction() { return m -> { Foo foo = new Foo(); foo.setBar(m.getPayload().toString()); return MessageBuilder.withPayload(foo).setHeader("foo", "foo").build(); }; } } static class Foo { String bar; public String getBar() { return this.bar; } public void setBar(String bar) { this.bar = bar; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.blueprint.container; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.XMLConstants; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import org.apache.aries.blueprint.NamespaceHandler; import org.apache.aries.blueprint.ext.impl.ExtNamespaceHandler; import org.apache.aries.blueprint.parser.NamespaceHandlerSet; import org.w3c.dom.ls.LSInput; import org.w3c.dom.ls.LSResourceResolver; import org.xml.sax.SAXException; public class SimpleNamespaceHandlerSet implements NamespaceHandlerSet { public static final URI EXT_1_2_NAMESPACE = URI.create("http://aries.apache.org/blueprint/xmlns/blueprint-ext/v1.2.0"); private Map<URI, URL> namespaces; private Map<URI, NamespaceHandler> handlers; private Schema schema; public SimpleNamespaceHandlerSet() { this.namespaces = new LinkedHashMap<URI, URL>(); this.handlers = new LinkedHashMap<URI, NamespaceHandler>(); addNamespace(EXT_1_2_NAMESPACE, getClass().getResource("/org/apache/aries/blueprint/ext/impl/blueprint-ext-1.2.xsd"), new ExtNamespaceHandler()); } public Set<URI> getNamespaces() { return Collections.unmodifiableSet(namespaces.keySet()); } public void addNamespace(URI namespace, URL schema, NamespaceHandler handler) { namespaces.put(namespace, schema); handlers.put(namespace, handler); } public boolean isComplete() { return true; } public NamespaceHandler getNamespaceHandler(URI uri) { return handlers.get(uri); } public Schema getSchema() throws SAXException, IOException { if (schema == null) { final List<StreamSource> schemaSources = new ArrayList<StreamSource>(); final List<InputStream> streams = new ArrayList<InputStream>(); try { InputStream is = getClass().getResourceAsStream("/org/apache/aries/blueprint/blueprint.xsd"); streams.add(is); schemaSources.add(new StreamSource(is)); for (URI uri : namespaces.keySet()) { is = namespaces.get(uri).openStream(); streams.add(is); schemaSources.add(new StreamSource(is)); } SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); schemaFactory.setResourceResolver(new LSResourceResolver() { public LSInput resolveResource(String type, String namespace, String publicId, String systemId, String baseURI) { try { URL namespaceURL = namespaces.get(URI.create(namespace)); if (systemId != null && namespaceURL != null) { URI systemIdUri = namespaceURL.toURI(); if (!URI.create(systemId).isAbsolute()) { systemIdUri = systemIdUri.resolve(systemId); } if (!systemIdUri.isAbsolute() && "jar".equals(namespaceURL.getProtocol())) { String urlString = namespaceURL.toString(); int jarFragmentIndex = urlString.lastIndexOf('!'); if (jarFragmentIndex > 0 && jarFragmentIndex < urlString.length() - 1) { String jarUrlOnly = urlString.substring(0, jarFragmentIndex); String oldFragment = urlString.substring(jarFragmentIndex + 1); String newFragment = URI.create(oldFragment).resolve(systemId).toString(); String newJarUri = jarUrlOnly + '!' + newFragment; systemIdUri = URI.create(newJarUri); } } InputStream resourceStream = systemIdUri.toURL().openStream(); return new LSInputImpl(publicId, systemId, resourceStream); } } catch (Exception ex) { // ignore } return null; } }); schema = schemaFactory.newSchema(schemaSources.toArray(new Source[schemaSources.size()])); } finally { for (InputStream is : streams) { is.close(); } } } return schema; } public void addListener(Listener listener) { throw new IllegalStateException(); } public void removeListener(Listener listener) { throw new IllegalStateException(); } public void destroy() { schema = null; } private static class LSInputImpl implements LSInput { protected String fPublicId; protected String fSystemId; protected String fBaseSystemId; protected InputStream fByteStream; protected Reader fCharStream; protected String fData; protected String fEncoding; protected boolean fCertifiedText; LSInputImpl(String publicId, String systemId, InputStream byteStream) { fPublicId = publicId; fSystemId = systemId; fByteStream = byteStream; } public InputStream getByteStream() { return fByteStream; } public void setByteStream(InputStream byteStream) { fByteStream = byteStream; } public Reader getCharacterStream() { return fCharStream; } public void setCharacterStream(Reader characterStream) { fCharStream = characterStream; } public String getStringData() { return fData; } public void setStringData(String stringData) { fData = stringData; } public String getEncoding() { return fEncoding; } public void setEncoding(String encoding) { fEncoding = encoding; } public String getPublicId() { return fPublicId; } public void setPublicId(String publicId) { fPublicId = publicId; } public String getSystemId() { return fSystemId; } public void setSystemId(String systemId) { fSystemId = systemId; } public String getBaseURI() { return fBaseSystemId; } public void setBaseURI(String baseURI) { fBaseSystemId = baseURI; } public boolean getCertifiedText() { return fCertifiedText; } public void setCertifiedText(boolean certifiedText) { fCertifiedText = certifiedText; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.job; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.text.SimpleDateFormat; import java.util.List; import java.util.TimeZone; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.ToolRunner; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.lock.ZookeeperJobLock; import org.apache.kylin.common.util.AbstractKylinTestCase; import org.apache.kylin.common.util.ClassUtil; import org.apache.kylin.common.util.HBaseMetadataTestCase; import org.apache.kylin.invertedindex.IIInstance; import org.apache.kylin.invertedindex.IIManager; import org.apache.kylin.invertedindex.IISegment; import org.apache.kylin.job.engine.JobEngineConfig; import org.apache.kylin.job.execution.AbstractExecutable; import org.apache.kylin.job.execution.ExecutableState; import org.apache.kylin.job.hadoop.cube.StorageCleanupJob; import org.apache.kylin.job.impl.threadpool.DefaultScheduler; import org.apache.kylin.job.invertedindex.IIJob; import org.apache.kylin.job.invertedindex.IIJobBuilder; import org.apache.kylin.job.manager.ExecutableManager; import org.apache.kylin.metadata.realization.RealizationStatusEnum; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.Lists; /** * * @author shaoshi * */ public class BuildIIWithEngineTest { private JobEngineConfig jobEngineConfig; private IIManager iiManager; private DefaultScheduler scheduler; protected ExecutableManager jobService; protected static final String TEST_II_NAME = "test_kylin_ii"; private static final Log logger = LogFactory.getLog(BuildCubeWithEngineTest.class); protected void waitForJob(String jobId) { while (true) { AbstractExecutable job = jobService.getJob(jobId); if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) { break; } else { try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } } } } @BeforeClass public static void beforeClass() throws Exception { logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath()); ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath()); System.setProperty("hdp.version", "2.2.4.2-2"); // mapred-site.xml ref this } @Before public void before() throws Exception { HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); DeployUtil.initCliWorkDir(); // DeployUtil.deployMetadata(); DeployUtil.overrideJobJarLocations(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock()); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } iiManager = IIManager.getInstance(kylinConfig); jobEngineConfig = new JobEngineConfig(kylinConfig); for (String jobId : jobService.getAllJobIds()) { if (jobService.getJob(jobId) instanceof IIJob) { jobService.deleteJob(jobId); } } IIInstance ii = iiManager.getII(TEST_II_NAME); if (ii.getStatus() != RealizationStatusEnum.DISABLED) { ii.setStatus(RealizationStatusEnum.DISABLED); iiManager.updateII(ii); } } @After public void after() throws Exception { IIInstance ii = iiManager.getII(TEST_II_NAME); if (ii.getStatus() != RealizationStatusEnum.READY) { ii.setStatus(RealizationStatusEnum.READY); iiManager.updateII(ii); } backup(); } @Test public void testBuildII() throws Exception { String[] testCase = new String[] { "buildII" }; ExecutorService executorService = Executors.newFixedThreadPool(testCase.length); final CountDownLatch countDownLatch = new CountDownLatch(testCase.length); List<Future<List<String>>> tasks = Lists.newArrayListWithExpectedSize(testCase.length); for (int i = 0; i < testCase.length; i++) { tasks.add(executorService.submit(new TestCallable(testCase[i], countDownLatch))); } countDownLatch.await(); for (int i = 0; i < tasks.size(); ++i) { Future<List<String>> task = tasks.get(i); final List<String> jobIds = task.get(); for (String jobId : jobIds) { assertJobSucceed(jobId); } } } private void assertJobSucceed(String jobId) { assertEquals(ExecutableState.SUCCEED, jobService.getOutput(jobId).getState()); } private class TestCallable implements Callable<List<String>> { private final String methodName; private final CountDownLatch countDownLatch; public TestCallable(String methodName, CountDownLatch countDownLatch) { this.methodName = methodName; this.countDownLatch = countDownLatch; } @SuppressWarnings("unchecked") @Override public List<String> call() throws Exception { try { final Method method = BuildIIWithEngineTest.class.getDeclaredMethod(methodName); method.setAccessible(true); return (List<String>) method.invoke(BuildIIWithEngineTest.this); } finally { countDownLatch.countDown(); } } } protected List<String> buildII() throws Exception { clearSegment(TEST_II_NAME); SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd"); f.setTimeZone(TimeZone.getTimeZone("GMT")); // this cube's start date is 0, end date is 2015-1-1 long date1 = 0; long date2 = f.parse("2015-01-01").getTime(); // this cube doesn't support incremental build, always do full build List<String> result = Lists.newArrayList(); result.add(buildSegment(TEST_II_NAME, date1, date2)); return result; } private void clearSegment(String iiName) throws Exception { IIInstance ii = iiManager.getII(iiName); ii.getSegments().clear(); iiManager.updateII(ii); } private String buildSegment(String iiName, long startDate, long endDate) throws Exception { IIInstance iiInstance = iiManager.getII(iiName); IISegment segment = iiManager.buildSegment(iiInstance, startDate, endDate); iiInstance.getSegments().add(segment); iiManager.updateII(iiInstance); IIJobBuilder iiJobBuilder = new IIJobBuilder(jobEngineConfig); IIJob job = iiJobBuilder.buildJob(segment); jobService.addJob(job); waitForJob(job.getId()); return job.getId(); } private int cleanupOldStorage() throws Exception { String[] args = { "--delete", "true" }; int exitCode = ToolRunner.run(new StorageCleanupJob(), args); return exitCode; } private void backup() throws Exception { int exitCode = cleanupOldStorage(); if (exitCode == 0) { exportHBaseData(); } } private void exportHBaseData() throws IOException { ExportHBaseData export = new ExportHBaseData(); export.exportTables(); export.tearDown(); } public static void main(String[] args) throws Exception { BuildIIWithEngineTest instance = new BuildIIWithEngineTest(); BuildIIWithEngineTest.beforeClass(); instance.before(); instance.testBuildII(); instance.after(); } }
/** * */ package org.egokituz.arduino2android.fragments; import java.util.ArrayList; import java.util.Collections; import org.egokituz.arduino2android.R; import org.egokituz.arduino2android.TestApplication; import org.egokituz.arduino2android.activities.MainActivity; import android.bluetooth.BluetoothAdapter; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.Spinner; /** * Main {@link Fragment} of the {@link MainActivity}. It contains the control buttons for a new test. * * @author Xabier Gardeazabal * */ public class TestSectionFragment extends Fragment{ private static final String TAG = "TestSectionFragment"; public static final int REQUEST_ENABLE_BT_RESULT = 1; Spinner spinnerBluetooth; ListView devicesListView; /** * Main context from the MainActivity */ private Context m_mainContext; /** * The main Application for centralized data management and test control */ private TestApplication m_mainApp; private final int m_status_initial = 0; private final int m_status_ongoingTest = 1; private Button m_testButton; /** * Constructor */ public TestSectionFragment() { super(); } /** * * @param c The main context * @param app The main Application for centralized data management and test control */ public void setArguments(Context c, TestApplication app) { m_mainContext = c; m_mainApp = app; } // this method is only called once for this fragment @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // retain this fragment (so that when the activity's state changes, // the configuration of this fragment is not lost setRetainInstance(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Load the layout of this fragment View rootView = inflater.inflate(R.layout.fragment_section_main_activity, container, false); // Action of the "Begin test" button onClick event m_testButton = (Button) rootView.findViewById(R.id.buttonBeginTest); if(m_mainApp.isTestOngoing()){ m_testButton.setTag(m_status_ongoingTest); m_testButton.setText(getResources().getString(R.string.stopTestButton)); } else { m_testButton.setTag(m_status_initial); m_testButton.setText(getResources().getString(R.string.beginTestButton)); } m_testButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int status =(Integer) view.getTag(); switch (status) { case m_status_initial: requestBluetoothEnable(); m_mainApp.beginTest(); m_testButton.setText(getResources().getString(R.string.stopTestButton)); break; case m_status_ongoingTest: m_mainApp.stopTest(); m_testButton.setText(getResources().getString(R.string.beginTestButton)); break; default: break; } } }); // Action of the "Refresh list" button onClick event rootView.findViewById(R.id.buttonRefresh).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { updateSpinner(); } }); return rootView; } private void requestBluetoothEnable() { BluetoothAdapter _BluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); // Check if this device supports Bluetooth if (_BluetoothAdapter == null) { // TODO Device does not support Bluetooth }; // If Bluetooth is not already enabled, prompt and ask the ser to enable it if (!_BluetoothAdapter .isEnabled()){ Log.e(TAG, "Bluetooth disabled"); Log.v(TAG, "Asking for user permission to activate Bluetooth"); Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); //enableBtIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); //m_mainContext.startActivity(enableBtIntent); // Start a new activity to turn Bluetooth ON //((Activity) m_mainContext).startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT_RESULT); startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT_RESULT); //TODO implement onActivityResult in main Activity } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { // Check which request we're responding to if (requestCode == REQUEST_ENABLE_BT_RESULT) { // Bluetooth enable requested switch (resultCode){ case android.app.Activity.RESULT_OK : Log.v(TAG, "Jay! User enabled Bluetooth!"); //this.spinnerBluetooth.setClickable(true); break; case android.app.Activity.RESULT_CANCELED: Log.v(TAG, "User did not enable Bluetooth"); //this.spinnerBluetooth.setSelected(false); //this.spinnerBluetooth.setClickable(false); break; } } } /** * Updates the items of the Bluetooth devices' spinner */ public void updateSpinner(){ try { ArrayList<String> threads = new ArrayList<String>(); Collections.addAll(threads, m_mainApp.getBTManager().getConnectedArduinos()); ArrayAdapter<String> adapter = new ArrayAdapter<String>(m_mainContext, android.R.layout.simple_spinner_item, threads); Spinner devSpin = (Spinner)getView().findViewById(R.id.spinnerBluetooth); devSpin.setAdapter(adapter); } catch (Exception e) { e.printStackTrace(); } } /** * Inquires the Bluetooth-Manager for the currently connected Arduino devices. * @return String[] array with the connected device IDs (name-MAC) */ public String[] getConnectedDevices(){ String[] result = null; if(m_mainApp.getBTManager() != null && m_mainApp.getBTManager().isAlive()) result = m_mainApp.getBTManager().getConnectedArduinos(); return result; } }
// Copyright 2016 The Bazel Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax; import static com.google.common.truth.Truth.assertThat; import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable; import com.google.devtools.build.lib.skylarkinterface.SkylarkInterfaceUtils; import com.google.devtools.build.lib.skylarkinterface.SkylarkModule; import java.lang.reflect.Method; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Test Skylark interface annotations and utilities. */ @RunWith(JUnit4.class) public class SkylarkInterfaceUtilsTest { /** MockClassA */ @SkylarkModule(name = "MockClassA", doc = "MockClassA") public static class MockClassA { @SkylarkCallable(doc = "MockClassA#foo") public void foo() {} @SkylarkCallable(doc = "MockClassA#bar") public void bar() {} public void baz() {} } /** MockInterfaceB1 */ @SkylarkModule(name = "MockInterfaceB1", doc = "MockInterfaceB1") public static interface MockInterfaceB1 { @SkylarkCallable(doc = "MockInterfaceB1#foo") void foo(); @SkylarkCallable(doc = "MockInterfaceB1#bar") void bar(); @SkylarkCallable(doc = "MockInterfaceB1#baz") void baz(); } /** MockInterfaceB2 */ @SkylarkModule(name = "MockInterfaceB2", doc = "MockInterfaceB2") public static interface MockInterfaceB2 { @SkylarkCallable(doc = "MockInterfaceB2#baz") void baz(); @SkylarkCallable(doc = "MockInterfaceB2#qux") void qux(); } /** MockClassC */ @SkylarkModule(name = "MockClassC", doc = "MockClassC") public static class MockClassC extends MockClassA implements MockInterfaceB1, MockInterfaceB2 { @Override @SkylarkCallable(doc = "MockClassC#foo") public void foo() {} @Override public void bar() {} @Override public void baz() {} @Override public void qux() {} } /** MockClassD */ public static class MockClassD extends MockClassC { @Override @SkylarkCallable(doc = "MockClassD#foo") public void foo() {} } /** MockClassZ */ public static class MockClassZ { } // The tests for getSkylarkModule() double as tests for getParentWithSkylarkModule(), // since they share an implementation. @Test public void testGetSkylarkModuleBasic() throws Exception { // Normal case. SkylarkModule ann = SkylarkInterfaceUtils.getSkylarkModule(MockClassA.class); Class<?> cls = SkylarkInterfaceUtils.getParentWithSkylarkModule(MockClassA.class); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassA"); assertThat(cls).isNotNull(); assertThat(cls).isEqualTo(MockClassA.class); } @Test public void testGetSkylarkModuleSubclass() throws Exception { // Subclass's annotation is used. SkylarkModule ann = SkylarkInterfaceUtils.getSkylarkModule(MockClassC.class); Class<?> cls = SkylarkInterfaceUtils.getParentWithSkylarkModule(MockClassC.class); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassC"); assertThat(cls).isNotNull(); assertThat(cls).isEqualTo(MockClassC.class); } @Test public void testGetSkylarkModuleSubclassNoSubannotation() throws Exception { // Falls back on superclass's annotation. SkylarkModule ann = SkylarkInterfaceUtils.getSkylarkModule(MockClassD.class); Class<?> cls = SkylarkInterfaceUtils.getParentWithSkylarkModule(MockClassD.class); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassC"); assertThat(cls).isNotNull(); assertThat(cls).isEqualTo(MockClassC.class); } @Test public void testGetSkylarkModuleNotFound() throws Exception { // Doesn't exist. SkylarkModule ann = SkylarkInterfaceUtils.getSkylarkModule(MockClassZ.class); Class<?> cls = SkylarkInterfaceUtils.getParentWithSkylarkModule(MockClassZ.class); assertThat(ann).isNull(); assertThat(cls).isNull(); } @Test public void testGetSkylarkCallableBasic() throws Exception { // Normal case. Ensure two-arg form is consistent with one-arg form. Method method = MockClassA.class.getMethod("foo"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassA#foo"); SkylarkCallable ann2 = SkylarkInterfaceUtils.getSkylarkCallable(MockClassA.class, method); assertThat(ann2).isEqualTo(ann); } @Test public void testGetSkylarkCallableSubclass() throws Exception { // Subclass's annotation is used. Method method = MockClassC.class.getMethod("foo"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassC#foo"); } @Test public void testGetSkylarkCallableSubclassNoSubannotation() throws Exception { // Falls back on superclass's annotation. Superclass takes precedence over interface. Method method = MockClassC.class.getMethod("bar"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassA#bar"); } @Test public void testGetSkylarkCallableTwoargForm() throws Exception { // Ensure that when passing superclass in directly, we bypass subclass's annotation. Method method = MockClassC.class.getMethod("foo"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(MockClassA.class, method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassA#foo"); } @Test public void testGetSkylarkCallableNotFound() throws Exception { // Null result when no annotation present... Method method = MockClassA.class.getMethod("baz"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNull(); // ... including when it's only present in a subclass that was bypassed... method = MockClassC.class.getMethod("baz"); ann = SkylarkInterfaceUtils.getSkylarkCallable(MockClassA.class, method); assertThat(ann).isNull(); // ... or when the method itself is only in the subclass that was bypassed. method = MockClassC.class.getMethod("qux"); ann = SkylarkInterfaceUtils.getSkylarkCallable(MockClassA.class, method); assertThat(ann).isNull(); } @Test public void testGetSkylarkCallableInterface() throws Exception { // Search through parent interfaces. First interface takes priority. Method method = MockClassC.class.getMethod("baz"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockInterfaceB1#baz"); // Make sure both are still traversed. method = MockClassC.class.getMethod("qux"); ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockInterfaceB2#qux"); } @Test public void testGetSkylarkCallableIgnoreNonModules() throws Exception { // Don't return SkylarkCallable annotations in classes and interfaces // not marked @SkylarkModule. Method method = MockClassD.class.getMethod("foo"); SkylarkCallable ann = SkylarkInterfaceUtils.getSkylarkCallable(method); assertThat(ann).isNotNull(); assertThat(ann.doc()).isEqualTo("MockClassC#foo"); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.client.editors.expressions; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.function.Supplier; import javax.enterprise.event.Event; import com.ait.lienzo.client.core.event.INodeXYEvent; import com.ait.lienzo.client.core.types.Point2D; import com.ait.lienzo.shared.core.types.EventPropagationMode; import org.jboss.errai.ui.client.local.spi.TranslationService; import org.kie.workbench.common.dmn.api.definition.HasExpression; import org.kie.workbench.common.dmn.api.definition.HasName; import org.kie.workbench.common.dmn.api.definition.HasVariable; import org.kie.workbench.common.dmn.api.definition.model.DMNModelInstrumentedBase; import org.kie.workbench.common.dmn.api.definition.model.Expression; import org.kie.workbench.common.dmn.api.property.dmn.Name; import org.kie.workbench.common.dmn.client.commands.factory.DefaultCanvasCommandFactory; import org.kie.workbench.common.dmn.client.commands.general.ClearExpressionTypeCommand; import org.kie.workbench.common.dmn.client.commands.general.SetHasNameCommand; import org.kie.workbench.common.dmn.client.editors.expressions.types.ExpressionEditorDefinitions; import org.kie.workbench.common.dmn.client.editors.expressions.types.context.ExpressionCellValue; import org.kie.workbench.common.dmn.client.editors.expressions.types.context.ExpressionEditorColumn; import org.kie.workbench.common.dmn.client.editors.expressions.types.undefined.UndefinedExpressionColumn; import org.kie.workbench.common.dmn.client.resources.i18n.DMNEditorConstants; import org.kie.workbench.common.dmn.client.widgets.grid.BaseExpressionGrid; import org.kie.workbench.common.dmn.client.widgets.grid.BaseGrid; import org.kie.workbench.common.dmn.client.widgets.grid.ExpressionGridCache; import org.kie.workbench.common.dmn.client.widgets.grid.controls.container.CellEditorControlsView; import org.kie.workbench.common.dmn.client.widgets.grid.controls.list.ListSelectorView; import org.kie.workbench.common.dmn.client.widgets.grid.model.BaseUIModelMapper; import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridColumn; import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData; import org.kie.workbench.common.dmn.client.widgets.grid.model.ExpressionEditorGridRow; import org.kie.workbench.common.dmn.client.widgets.grid.model.GridCellTuple; import org.kie.workbench.common.dmn.client.widgets.layer.DMNGridLayer; import org.kie.workbench.common.stunner.core.client.api.SessionManager; import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler; import org.kie.workbench.common.stunner.core.client.canvas.event.selection.DomainObjectSelectionEvent; import org.kie.workbench.common.stunner.core.client.command.SessionCommandManager; import org.kie.workbench.common.stunner.forms.client.event.RefreshFormPropertiesEvent; import org.uberfire.ext.wires.core.grids.client.model.GridCell; import org.uberfire.ext.wires.core.grids.client.model.GridCellValue; import org.uberfire.ext.wires.core.grids.client.model.GridData; import org.uberfire.ext.wires.core.grids.client.model.impl.BaseHeaderMetaData; import org.uberfire.ext.wires.core.grids.client.widget.layer.GridWidgetRegistry; import org.uberfire.mvp.ParameterizedCommand; public class ExpressionContainerGrid extends BaseGrid<Expression> { private static final String COLUMN_GROUP = "ExpressionContainerGrid$Expression0"; private final Supplier<ExpressionGridCache> expressionGridCache; private final GridCellTuple parent = new GridCellTuple(0, 0, this); private final ExpressionEditorColumn expressionColumn; private final ParameterizedCommand<Optional<Expression>> onHasExpressionChanged; private final ParameterizedCommand<Optional<HasName>> onHasNameChanged; private ExpressionContainerUIModelMapper uiModelMapper; private static class ExpressionEditorColumnWrapper extends ExpressionEditorColumn { public ExpressionEditorColumnWrapper(final GridWidgetRegistry registry, final HeaderMetaData headerMetaData, final double width, final BaseGrid<? extends Expression> gridWidget) { super(registry, headerMetaData, width, gridWidget); } @Override protected void setComponentWidth(final double width) { //NOP. Synchronization with the HasComponentWidths is handled by the child grids. } } public ExpressionContainerGrid(final DMNGridLayer gridLayer, final CellEditorControlsView.Presenter cellEditorControls, final TranslationService translationService, final ListSelectorView.Presenter listSelector, final SessionManager sessionManager, final SessionCommandManager<AbstractCanvasHandler> sessionCommandManager, final DefaultCanvasCommandFactory canvasCommandFactory, final Supplier<ExpressionEditorDefinitions> expressionEditorDefinitions, final Supplier<ExpressionGridCache> expressionGridCache, final ParameterizedCommand<Optional<Expression>> onHasExpressionChanged, final ParameterizedCommand<Optional<HasName>> onHasNameChanged, final Event<RefreshFormPropertiesEvent> refreshFormPropertiesEvent, final Event<DomainObjectSelectionEvent> domainObjectSelectionEvent) { super(gridLayer, new DMNGridData(), new ExpressionContainerRenderer(), sessionManager, sessionCommandManager, canvasCommandFactory, refreshFormPropertiesEvent, domainObjectSelectionEvent, cellEditorControls, translationService); this.expressionGridCache = expressionGridCache; this.onHasExpressionChanged = onHasExpressionChanged; this.onHasNameChanged = onHasNameChanged; this.uiModelMapper = new ExpressionContainerUIModelMapper(parent, this::getModel, getExpression(), () -> nodeUUID.get(), () -> hasExpression, () -> hasName, () -> isOnlyVisualChangeAllowed, expressionEditorDefinitions, expressionGridCache, listSelector); setEventPropagationMode(EventPropagationMode.NO_ANCESTORS); expressionColumn = new ExpressionEditorColumnWrapper(gridLayer, new BaseHeaderMetaData(COLUMN_GROUP), UndefinedExpressionColumn.DEFAULT_WIDTH, this); expressionColumn.setMovable(false); expressionColumn.setResizable(true); model.appendColumn(expressionColumn); model.appendRow(new ExpressionEditorGridRow()); getRenderer().setColumnRenderConstraint((isSelectionLayer, gridColumn) -> !isSelectionLayer || gridColumn.equals(expressionColumn)); } @Override public boolean onDragHandle(final INodeXYEvent event) { return false; } @Override public void deselect() { getModel().clearSelections(); super.deselect(); } public void setExpression(final String nodeUUID, final HasExpression hasExpression, final Optional<HasName> hasName, final boolean isOnlyVisualChangeAllowed) { this.nodeUUID = Optional.of(nodeUUID); this.hasExpression = spyHasExpression(hasExpression); this.hasName = spyHasName(hasName); this.isOnlyVisualChangeAllowed = isOnlyVisualChangeAllowed; uiModelMapper.fromDMNModel(0, 0); expressionColumn.setWidthInternal(getExistingEditorWidth()); selectExpressionEditorFirstCell(); } double getExistingEditorWidth() { double existingWidth = DMNGridColumn.DEFAULT_WIDTH; final GridCell<?> cell = model.getRow(0).getCells().get(0); if (cell != null) { final GridCellValue<?> value = cell.getValue(); if (value instanceof ExpressionCellValue) { final ExpressionCellValue ecv = (ExpressionCellValue) value; final Optional<BaseExpressionGrid<? extends Expression, ? extends GridData, ? extends BaseUIModelMapper>> editor = ecv.getValue(); if (editor.isPresent()) { final BaseExpressionGrid beg = editor.get(); existingWidth = Collections.max(Arrays.asList(existingWidth, beg.getWidth() + beg.getPadding() * 2, beg.getMinimumWidth() + beg.getPadding() * 2)); } } } return existingWidth; } Optional<BaseExpressionGrid> getExistingEditor() { Optional<BaseExpressionGrid> beg = Optional.empty(); final GridCell<?> cell = model.getRow(0).getCells().get(0); if (cell != null) { final GridCellValue<?> value = cell.getValue(); if (value instanceof ExpressionCellValue) { final ExpressionCellValue ecv = (ExpressionCellValue) value; final Optional<BaseExpressionGrid<? extends Expression, ? extends GridData, ? extends BaseUIModelMapper>> editor = ecv.getValue(); if (editor.isPresent()) { beg = Optional.of(editor.get()); } } } return beg; } /** * Proxy {@link HasExpression} to be able intercept interactions with the original * to update the expression label in {@link ExpressionEditorView} when the {@link Expression} changes. * @param hasExpression A {@link HasExpression} to be proxied. * @return A proxy that intercepts interactions with the wrapped {@link HasExpression} */ HasExpression spyHasExpression(final HasExpression hasExpression) { final HasExpression spy = new HasExpression() { @Override public Expression getExpression() { return hasExpression.getExpression(); } @Override public void setExpression(final Expression expression) { hasExpression.setExpression(expression); onHasExpressionChanged.execute(Optional.ofNullable(expression)); } @Override public DMNModelInstrumentedBase asDMNModelInstrumentedBase() { return hasExpression.asDMNModelInstrumentedBase(); } @Override public boolean isClearSupported() { return hasExpression.isClearSupported(); } }; return spy; } /** * Proxy {@link HasName} to be able intercept interactions with the original to update the * navigation label in {@link ExpressionEditorView#setExpressionNameText(Optional)} when the {@link Name} * changes. The {@link Name} changes by a {@link SetHasNameCommand#execute(AbstractCanvasHandler)} or * {@link SetHasNameCommand#undo(AbstractCanvasHandler)} that ensures the {@link HasName#setName(Name)} * method is called. * @param hasName A {@link HasName} to be proxied. * @return A proxy that intercepts interactions with the wrapped {@link HasName} */ Optional<HasName> spyHasName(final Optional<HasName> hasName) { final HasName spy = new HasName() { @Override public Name getName() { return hasName.orElse(HasName.NOP).getName(); } @Override public void setName(final Name name) { hasName.ifPresent(hn -> { hn.setName(name); if (hn instanceof HasVariable) { final HasVariable hv = (HasVariable) hn; hv.getVariable().setName(name); } onHasNameChanged.execute(hasName); }); } }; return Optional.of(spy); } @Override public List<ListSelectorItem> getItems(final int uiRowIndex, final int uiColumnIndex) { if (hasExpression.isClearSupported()) { return Collections.singletonList(ListSelectorTextItem .build(translationService.format(DMNEditorConstants.ExpressionEditor_Clear), true, () -> { cellEditorControls.hide(); clearExpressionType(); })); } return Collections.emptyList(); } @Override public void onItemSelected(final ListSelectorItem item) { final ListSelectorTextItem li = (ListSelectorTextItem) item; li.getCommand().execute(); } void clearExpressionType() { sessionCommandManager.execute((AbstractCanvasHandler) sessionManager.getCurrentSession().getCanvasHandler(), new ClearExpressionTypeCommand(parent, nodeUUID.get(), hasExpression, uiModelMapper, expressionGridCache.get(), () -> { expressionColumn.setWidthInternal(getExistingEditorWidth()); selectExpressionEditorFirstCell(); }, () -> { expressionColumn.setWidthInternal(getExistingEditorWidth()); selectExpressionEditorFirstCell(); })); } void selectExpressionEditorFirstCell() { final Optional<BaseExpressionGrid<? extends Expression, ? extends GridData, ? extends BaseUIModelMapper>> grid = getBaseExpressionGrid(); grid.ifPresent(beg -> { //It's not possible to set-up GridLayer for ExpressionContainerGrid in Unit Tests so defensively handle nulls Optional.ofNullable(getLayer()).ifPresent(layer -> ((DMNGridLayer) layer).select(beg)); beg.selectFirstCell(); }); } public Optional<BaseExpressionGrid<? extends Expression, ? extends GridData, ? extends BaseUIModelMapper>> getBaseExpressionGrid() { final GridCellValue<?> value = model.getCell(0, 0).getValue(); return ((ExpressionCellValue) value).getValue(); } @Override public boolean selectCell(final Point2D ap, final boolean isShiftKeyDown, final boolean isControlKeyDown) { gridLayer.select(this); fireDomainObjectSelectionEvent(); return super.selectCell(ap, isShiftKeyDown, isControlKeyDown); } @Override public boolean selectCell(final int uiRowIndex, final int uiColumnIndex, final boolean isShiftKeyDown, final boolean isControlKeyDown) { gridLayer.select(this); fireDomainObjectSelectionEvent(); return super.selectCell(uiRowIndex, uiColumnIndex, isShiftKeyDown, isControlKeyDown); } }
/* * #%L * Fabric8 :: Container :: Karaf :: Managed * %% * Copyright (C) 2014 Red Hat * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.fabric8.container.karaf; import static io.fabric8.api.ContainerAttributes.ATTRIBUTE_KEY_REMOTE_AGENT_URL; import static io.fabric8.spi.RuntimeService.PROPERTY_REMOTE_AGENT_URL; import io.fabric8.domain.agent.AgentLogger; import io.fabric8.spi.AgentIdentity; import io.fabric8.spi.AgentRegistration; import io.fabric8.spi.AgentTopology; import io.fabric8.spi.process.AbstractProcessHandler; import io.fabric8.spi.process.MutableManagedProcess; import io.fabric8.spi.process.ProcessHandler; import io.fabric8.spi.utils.ManagementUtils; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import org.jboss.gravia.runtime.spi.RuntimePropertiesProvider; import org.jboss.gravia.utils.IllegalStateAssertion; import org.jboss.gravia.utils.ObjectNameFactory; import org.jolokia.client.J4pClient; import org.jolokia.client.request.J4pExecRequest; import org.jolokia.client.request.J4pResponse; import org.jolokia.client.request.J4pSearchRequest; import org.json.simple.JSONArray; import org.osgi.jmx.framework.FrameworkMBean; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Karaf {@link ProcessHandler} * * @author thomas.diesler@jboss.com * @since 26-Feb-2014 */ public final class KarafProcessHandler extends AbstractProcessHandler { static final Logger LOGGER = LoggerFactory.getLogger(KarafProcessHandler.class); public KarafProcessHandler(MBeanServer mbeanServer, AgentRegistration localAgent) { super(mbeanServer, localAgent, new RuntimePropertiesProvider()); } @Override protected void doConfigure(MutableManagedProcess process) throws Exception { File karafHome = process.getHomePath().toFile(); IllegalStateAssertion.assertTrue(karafHome.isDirectory(), "Karaf home does not exist: " + karafHome); File confDir = new File(karafHome, "etc"); IllegalStateAssertion.assertTrue(confDir.isDirectory(), "Karaf conf does not exist: " + confDir); String comment = "Modified by " + getClass().getName(); configureHttpService(process, confDir, comment); configureKarafManagement(process, confDir, comment); configureZookeeper(process, confDir, comment); } protected void configureHttpService(MutableManagedProcess process, File confDir, String comment) throws IOException { // etc/org.apache.felix.http.cfg File paxwebFile = new File(confDir, "org.apache.felix.http.cfg"); if (paxwebFile.exists()) { Properties props = new Properties(); props.load(new FileReader(paxwebFile)); KarafProcessOptions createOptions = (KarafProcessOptions) process.getCreateOptions(); int httpPort = nextAvailablePort(createOptions.getHttpPort()); int httpsPort = nextAvailablePort(createOptions.getHttpsPort()); props.setProperty("org.osgi.service.http.port", "" + httpPort); props.setProperty("org.osgi.service.https.port", "" + httpsPort); FileWriter fileWriter = new FileWriter(paxwebFile); try { props.store(fileWriter, comment); } finally { fileWriter.close(); } } } protected void configureKarafManagement(MutableManagedProcess process, File confDir, String comment) throws IOException { // etc/org.apache.karaf.management.cfg File managementFile = new File(confDir, "org.apache.karaf.management.cfg"); IllegalStateAssertion.assertTrue(managementFile.exists(), "File does not exist: " + managementFile); Properties props = new Properties(); props.load(new FileReader(managementFile)); KarafProcessOptions createOptions = (KarafProcessOptions) process.getCreateOptions(); int rmiRegistryPort = nextAvailablePort(createOptions.getRmiRegistryPort()); int rmiServerPort = nextAvailablePort(createOptions.getRmiServerPort()); props.setProperty("rmiRegistryPort", "" + rmiRegistryPort); props.setProperty("rmiServerPort", "" + rmiServerPort); FileWriter fileWriter = new FileWriter(managementFile); try { props.store(fileWriter, comment); } finally { fileWriter.close(); } } protected void configureZookeeper(MutableManagedProcess process, File confDir, String comment) throws IOException { // etc/io.fabric8.zookeeper.server-0000.cfg File managementFile = new File(confDir, "io.fabric8.zookeeper.server-0000.cfg"); IllegalStateAssertion.assertTrue(managementFile.exists(), "File does not exist: " + managementFile); IllegalStateAssertion.assertTrue(managementFile.delete(), "Cannot delete: " + managementFile); } @Override protected void doStart(MutableManagedProcess process) throws Exception { Path karafHome = process.getHomePath(); Path karafData = karafHome.resolve("data"); Path karafEtc = karafHome.resolve("etc"); Path karafLib = karafHome.resolve("lib"); Path karafInstances = karafHome.resolve("instances"); IllegalStateAssertion.assertTrue(karafHome.toFile().isDirectory(), "Not a valid home dir: " + karafHome); List<String> cmd = new ArrayList<String>(); cmd.add("java"); // JavaVM args KarafProcessOptions createOptions = (KarafProcessOptions) process.getCreateOptions(); String javaArgs = createOptions.getJavaVmArguments(); cmd.addAll(Arrays.asList(javaArgs.split("\\s+"))); // Karaf properties cmd.add("-Druntime.id=" + process.getIdentity().getName()); // Why is the runtime.id explicitly needed? cmd.add("-Dkaraf.name=" + process.getIdentity().getName()); cmd.add("-Dkaraf.home=" + karafHome); cmd.add("-Dkaraf.base=" + karafHome); cmd.add("-Dkaraf.data=" + karafData); cmd.add("-Dkaraf.etc=" + karafEtc); cmd.add("-Dkaraf.instances=" + karafInstances); cmd.add("-Dkaraf.startLocalConsole=false"); cmd.add("-Dkaraf.startRemoteShell=false"); cmd.add("-D" + PROPERTY_REMOTE_AGENT_URL + "=" + process.getAttribute(ATTRIBUTE_KEY_REMOTE_AGENT_URL)); // Java properties cmd.add("-Djava.io.tmpdir=" + karafData.resolve("tmp")); cmd.add("-Djava.util.logging.config.file=" + karafEtc.resolve("java.util.logging.properties")); cmd.add("-Djava.endorsed.dirs=" + karafLib.resolve("endorsed")); // Classpath StringBuffer classPath = new StringBuffer(); File karafLibDir = karafLib.toFile(); String[] libs = karafLibDir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("karaf"); } }); for (String lib : libs) { String separator = classPath.length() > 0 ? File.pathSeparator : ""; classPath.append(separator + karafLib.resolve(lib)); } cmd.add("-classpath"); cmd.add(classPath.toString()); // Main class cmd.add("org.apache.karaf.main.Main"); AgentLogger.LOGGER.info("Staring process with: {}", cmd); ProcessBuilder processBuilder = new ProcessBuilder(cmd); processBuilder.directory(karafHome.toFile()); processBuilder.redirectErrorStream(true); startProcess(processBuilder, createOptions); } @Override protected void doStop(MutableManagedProcess process) throws Exception { // [TODO] #55 Topology should manage ProcessRegistration instead of just ProcessIdentity // Every managed process also creates an agent with the same id as the process // Obtain the jolokia endpoint URL from the ProcessRegistration AgentIdentity agentId = AgentIdentity.create(process.getIdentity().getName()); AgentTopology topology = ManagementUtils.getMXBeanProxy(getMBeanServer(), AgentTopology.OBJECT_NAME, AgentTopology.class); AgentRegistration remoteAgent = topology.getAgentRegistration(agentId); String serviceURL = remoteAgent.getJolokiaEndpoint(); J4pClient client = new J4pClient(serviceURL); ObjectName oname = getFrameworkMBeanName(client); J4pExecRequest execReq = new J4pExecRequest(oname, "shutdownFramework"); client.execute(execReq); } protected ObjectName getFrameworkMBeanName(J4pClient client) throws Exception { J4pSearchRequest searchReq = new J4pSearchRequest(FrameworkMBean.OBJECTNAME + ",*"); J4pResponse<J4pSearchRequest> searchRes = client.execute(searchReq); Object firstItem = ((JSONArray) searchRes.getValue()).get(0); return ObjectNameFactory.create((String) firstItem); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lens.driver.hive; import static org.apache.lens.server.api.util.LensUtil.getImplementations; import java.io.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.lens.api.LensConf; import org.apache.lens.api.LensSessionHandle; import org.apache.lens.api.query.QueryHandle; import org.apache.lens.api.query.QueryPrepareHandle; import org.apache.lens.cube.query.cost.FactPartitionBasedQueryCostCalculator; import org.apache.lens.server.api.LensConfConstants; import org.apache.lens.server.api.driver.*; import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState; import org.apache.lens.server.api.error.LensException; import org.apache.lens.server.api.events.LensEventListener; import org.apache.lens.server.api.query.AbstractQueryContext; import org.apache.lens.server.api.query.PreparedQueryContext; import org.apache.lens.server.api.query.QueryContext; import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy; import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint; import org.apache.lens.server.api.query.cost.FactPartitionBasedQueryCost; import org.apache.lens.server.api.query.cost.QueryCost; import org.apache.lens.server.api.query.cost.QueryCostCalculator; import org.apache.lens.server.api.query.priority.CostRangePriorityDecider; import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf; import org.apache.lens.server.api.query.priority.QueryPriorityDecider; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.TaskStatus; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.service.cli.*; import org.apache.hive.service.cli.thrift.TOperationHandle; import org.apache.hive.service.cli.thrift.TProtocolVersion; import org.apache.hive.service.cli.thrift.TSessionHandle; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import com.google.common.collect.ImmutableSet; import lombok.Getter; import lombok.extern.slf4j.Slf4j; /** * The Class HiveDriver. */ @Slf4j public class HiveDriver implements LensDriver { /** The Constant HIVE_CONNECTION_CLASS. */ public static final String HIVE_CONNECTION_CLASS = "lens.driver.hive.connection.class"; public static final String HIVE_QUERY_HOOK_CLASS = "lens.driver.hive.query.hook.class"; /** The Constant HS2_CONNECTION_EXPIRY_DELAY. */ public static final String HS2_CONNECTION_EXPIRY_DELAY = "lens.driver.hive.hs2.connection.expiry.delay"; public static final String HS2_CALCULATE_PRIORITY = "lens.driver.hive.calculate.priority"; public static final String HS2_COST_CALCULATOR = "lens.driver.hive.cost.calculator.class"; /** * Config param for defining priority ranges. */ public static final String HS2_PRIORITY_RANGES = "lens.driver.hive.priority.ranges"; // Default values of conf params public static final long DEFAULT_EXPIRY_DELAY = 600 * 1000; public static final String HS2_PRIORITY_DEFAULT_RANGES = "VERY_HIGH,7.0,HIGH,30.0,NORMAL,90,LOW"; public static final String SESSION_KEY_DELIMITER = "."; private static final String QUERY_LAUNCHIG_CONSTRAINT_FACTORIES_KEY = "lens.driver.hive.query.launching.constraint.factories"; private static final String WAITING_QUERIES_SELECTION_POLICY_FACTORIES_KEY = "lens.driver.hive.waiting.queries.selection.policy.factories"; /** The driver conf- which will merged with query conf */ private Configuration driverConf; /** The HiveConf - used for connecting to hive server and metastore */ private HiveConf hiveConf; /** The hive handles. */ private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>(); /** The session lock. */ private final Lock sessionLock; // connections need to be separate for each user and each thread /** The thread connections. */ private final Map<String, ExpirableConnection> threadConnections = new ConcurrentHashMap<String, ExpirableConnection>(); /** The thrift conn expiry queue. */ private final DelayQueue<ExpirableConnection> thriftConnExpiryQueue = new DelayQueue<ExpirableConnection>(); /** The connection expiry thread. */ private final Thread connectionExpiryThread = new Thread(new ConnectionExpiryRunnable()); // assigned only in case of embedded connection /** The embedded connection. */ private ThriftConnection embeddedConnection; // Store mapping of Lens session ID to Hive session identifier /** The lens to hive session. */ private Map<String, SessionHandle> lensToHiveSession; /** Keep track of resources added to the Hive session */ private Map<SessionHandle, Boolean> resourcesAddedForSession; /** The driver listeners. */ private List<LensEventListener<DriverEvent>> driverListeners; QueryCostCalculator queryCostCalculator; QueryPriorityDecider queryPriorityDecider; // package-local. Test case can change. boolean whetherCalculatePriority; private DriverQueryHook queryHook; @Getter private ImmutableSet<QueryLaunchingConstraint> queryConstraints; private ImmutableSet<WaitingQueriesSelectionPolicy> selectionPolicies; private String sessionDbKey(String sessionHandle, String database) { return sessionHandle + SESSION_KEY_DELIMITER + database; } /** * Return true if resources have been added to this Hive session * @param sessionHandle lens session identifier * @param database lens database * @return true if resources have been already added to this session + db pair */ public boolean areDBResourcesAddedForSession(String sessionHandle, String database) { String key = sessionDbKey(sessionHandle, database); SessionHandle hiveSession = lensToHiveSession.get(key); return hiveSession != null && resourcesAddedForSession.containsKey(hiveSession) && resourcesAddedForSession.get(hiveSession); } /** * Tell Hive driver that resources have been added for this session and for the given database * @param sessionHandle lens session identifier * @param database lens database */ public void setResourcesAddedForSession(String sessionHandle, String database) { SessionHandle hiveSession = lensToHiveSession.get(sessionDbKey(sessionHandle, database)); resourcesAddedForSession.put(hiveSession, Boolean.TRUE); } /** * The Class ConnectionExpiryRunnable. */ class ConnectionExpiryRunnable implements Runnable { /* * (non-Javadoc) * * @see java.lang.Runnable#run() */ @Override public void run() { try { while (true) { ExpirableConnection expired = thriftConnExpiryQueue.take(); expired.setExpired(); ThriftConnection thConn = expired.getConnection(); if (thConn != null) { try { log.info("Closed connection: {}", expired.getConnId()); thConn.close(); } catch (IOException e) { log.error("Error closing connection", e); } } } } catch (InterruptedException intr) { log.warn("Connection expiry thread interrupted", intr); return; } } } /** The Constant CONNECTION_COUNTER. */ private static final AtomicInteger CONNECTION_COUNTER = new AtomicInteger(); /** * The Class ExpirableConnection. */ static class ExpirableConnection implements Delayed { /** The access time. */ long accessTime; /** The conn. */ private final ThriftConnection conn; /** The timeout. */ private final long timeout; /** The expired. */ private volatile boolean expired; /** The conn id. */ private final int connId; /** * Instantiates a new expirable connection. * * @param conn the conn * @param timeout the timeout */ public ExpirableConnection(ThriftConnection conn, long timeout) { this.conn = conn; this.timeout = timeout; connId = CONNECTION_COUNTER.incrementAndGet(); accessTime = System.currentTimeMillis(); } private ThriftConnection getConnection() { accessTime = System.currentTimeMillis(); return conn; } private boolean isExpired() { return expired; } /** * Sets the expired. */ private void setExpired() { expired = true; } private int getConnId() { return connId; } /* * (non-Javadoc) * * @see java.lang.Comparable#compareTo(java.lang.Object) */ @Override public int compareTo(Delayed other) { return (int) (this.getDelay(TimeUnit.MILLISECONDS) - other.getDelay(TimeUnit.MILLISECONDS)); } /* * (non-Javadoc) * * @see java.util.concurrent.Delayed#getDelay(java.util.concurrent.TimeUnit) */ @Override public long getDelay(TimeUnit unit) { long age = System.currentTimeMillis() - accessTime; return unit.convert(timeout - age, TimeUnit.MILLISECONDS); } } /** * Open connections. * * @return the int */ int openConnections() { return thriftConnExpiryQueue.size(); } /** The connection class. */ private Class<? extends ThriftConnection> connectionClass; /** The is embedded. */ private boolean isEmbedded; /** The connection expiry timeout. */ private long connectionExpiryTimeout; /** * Instantiates a new hive driver. * * @throws LensException the lens exception */ public HiveDriver() throws LensException { this.sessionLock = new ReentrantLock(); lensToHiveSession = new HashMap<String, SessionHandle>(); resourcesAddedForSession = new HashMap<SessionHandle, Boolean>(); connectionExpiryThread.setDaemon(true); connectionExpiryThread.setName("HiveDriver-ConnectionExpiryThread"); connectionExpiryThread.start(); driverListeners = new ArrayList<LensEventListener<DriverEvent>>(); log.info("Hive driver inited"); } @Override public Configuration getConf() { return driverConf; } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration) */ @Override public void configure(Configuration conf) throws LensException { this.driverConf = new Configuration(conf); this.driverConf.addResource("hivedriver-default.xml"); this.driverConf.addResource("hivedriver-site.xml"); // resources have to be added separately on hiveConf again because new HiveConf() overrides hive.* properties // from HiveConf this.hiveConf = new HiveConf(conf, HiveDriver.class); this.hiveConf.addResource("hivedriver-default.xml"); this.hiveConf.addResource("hivedriver-site.xml"); connectionClass = this.driverConf.getClass(HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class); isEmbedded = (connectionClass.getName().equals(EmbeddedThriftConnection.class.getName())); connectionExpiryTimeout = this.driverConf.getLong(HS2_CONNECTION_EXPIRY_DELAY, DEFAULT_EXPIRY_DELAY); whetherCalculatePriority = this.driverConf.getBoolean(HS2_CALCULATE_PRIORITY, true); Class<? extends QueryCostCalculator> queryCostCalculatorClass = this.driverConf.getClass(HS2_COST_CALCULATOR, FactPartitionBasedQueryCostCalculator.class, QueryCostCalculator.class); try { queryCostCalculator = queryCostCalculatorClass.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new LensException("Can't instantiate query cost calculator of class: " + queryCostCalculatorClass, e); } queryPriorityDecider = new CostRangePriorityDecider( new CostToPriorityRangeConf(driverConf.get(HS2_PRIORITY_RANGES, HS2_PRIORITY_DEFAULT_RANGES)) ); try { queryHook = driverConf.getClass( HIVE_QUERY_HOOK_CLASS, NoOpDriverQueryHook.class, DriverQueryHook.class ).newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new LensException("Can't instantiate driver query hook for hivedriver with given class", e); } queryConstraints = getImplementations(QUERY_LAUNCHIG_CONSTRAINT_FACTORIES_KEY, driverConf); selectionPolicies = getImplementations(WAITING_QUERIES_SELECTION_POLICY_FACTORIES_KEY, driverConf); } private QueryCost calculateQueryCost(AbstractQueryContext qctx) throws LensException { if (qctx.isOlapQuery()) { return queryCostCalculator.calculateCost(qctx, this); } else { return new FactPartitionBasedQueryCost(Double.MAX_VALUE); } } @Override public QueryCost estimate(AbstractQueryContext qctx) throws LensException { log.info("Estimate: {}", qctx.getDriverQuery(this)); if (qctx.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + qctx.getUserQuery()); } if (qctx.getDriverContext().getDriverQueryCost(this) != null) { // estimate called again and again return qctx.getDriverContext().getDriverQueryCost(this); } if (qctx.isOlapQuery()) { // if query is olap query and rewriting takes care of semantic validation // estimate is calculating cost of the query // the calculation is done only for cube queries // for all other native table queries, the cost will be maximum return calculateQueryCost(qctx); } else { // its native table query. validate and return cost return explain(qctx).getCost(); } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#explain(java.lang.String, org.apache.hadoop.conf.Configuration) */ @Override public HiveQueryPlan explain(AbstractQueryContext explainCtx) throws LensException { if (explainCtx.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + explainCtx.getUserQuery()); } if (explainCtx.getDriverContext().getDriverQueryPlan(this) != null) { // explain called again and again return (HiveQueryPlan) explainCtx.getDriverContext().getDriverQueryPlan(this); } log.info("Explain: {}", explainCtx.getDriverQuery(this)); Configuration explainConf = new Configuration(explainCtx.getDriverConf(this)); explainConf.setClassLoader(explainCtx.getConf().getClassLoader()); explainConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false); final String explainQuery = "EXPLAIN EXTENDED " + explainCtx.getDriverQuery(this); QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, explainCtx.getSubmittedUser(), new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false); // Get result set of explain HiveInMemoryResultSet inMemoryResultSet = (HiveInMemoryResultSet) execute(explainQueryCtx); List<String> explainOutput = new ArrayList<>(); while (inMemoryResultSet.hasNext()) { explainOutput.add((String) inMemoryResultSet.next().getValues().get(0)); } closeQuery(explainQueryCtx.getQueryHandle()); try { hiveConf.setClassLoader(explainCtx.getConf().getClassLoader()); HiveQueryPlan hqp = new HiveQueryPlan(explainOutput, null, hiveConf, calculateQueryCost(explainCtx)); explainCtx.getDriverContext().setDriverQueryPlan(this, hqp); return hqp; } catch (HiveException e) { throw new LensException("Unable to create hive query plan", e); } } // this is used for tests int getHiveHandleSize() { return hiveHandles.size(); } /* * (non-Javadoc) * * @see * org.apache.lens.server.api.driver.LensDriver#explainAndPrepare * (org.apache.lens.server.api.query.PreparedQueryContext) */ @Override public DriverQueryPlan explainAndPrepare(PreparedQueryContext pContext) throws LensException { DriverQueryPlan plan = explain(pContext); plan.setPrepareHandle(pContext.getPrepareHandle()); return plan; } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#prepare(org.apache.lens.server.api.query.PreparedQueryContext) */ @Override public void prepare(PreparedQueryContext pContext) throws LensException { // NO OP } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#closePreparedQuery(org.apache.lens.api.query.QueryPrepareHandle) */ @Override public void closePreparedQuery(QueryPrepareHandle handle) throws LensException { // NO OP } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#execute(org.apache.lens.server.api.query.QueryContext) */ // assuming this is only called for executing explain/insert/set/delete/etc... queries which don't ask to fetch data. public LensResultSet execute(QueryContext ctx) throws LensException { try { addPersistentPath(ctx); Configuration qdconf = ctx.getDriverConf(this); qdconf.set("mapred.job.name", ctx.getQueryHandle().toString()); OperationHandle op = getClient().executeStatement(getSession(ctx), ctx.getSelectedDriverQuery(), qdconf.getValByRegex(".*")); log.info("The hive operation handle: {}", op); ctx.setDriverOpHandle(op.toString()); hiveHandles.put(ctx.getQueryHandle(), op); updateStatus(ctx); OperationStatus status = getClient().getOperationStatus(op); if (status.getState() == OperationState.ERROR) { throw new LensException("Unknown error while running query " + ctx.getUserQuery()); } LensResultSet result = createResultSet(ctx, true); // close the query immediately if the result is not inmemory result set if (result == null || !(result instanceof HiveInMemoryResultSet)) { closeQuery(ctx.getQueryHandle()); } // remove query handle from hiveHandles even in case of inmemory result set hiveHandles.remove(ctx.getQueryHandle()); return result; } catch (IOException e) { throw new LensException("Error adding persistent path", e); } catch (HiveSQLException hiveErr) { handleHiveServerError(ctx, hiveErr); throw new LensException("Error executing query", hiveErr); } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#executeAsync(org.apache.lens.server.api.query.QueryContext) */ @Override public void executeAsync(QueryContext ctx) throws LensException { try { addPersistentPath(ctx); Configuration qdconf = ctx.getDriverConf(this); qdconf.set("mapred.job.name", ctx.getQueryHandle().toString()); //Query is already explained. log.info("whetherCalculatePriority: {}", whetherCalculatePriority); if (whetherCalculatePriority) { try { // Inside try since non-data fetching queries can also be executed by async method. if (ctx.getDriverQueryCost(this) == null) { ctx.setDriverCost(this, queryCostCalculator.calculateCost(ctx, this)); } String priority = queryPriorityDecider.decidePriority(ctx.getDriverQueryCost(this)).toString(); qdconf.set("mapred.job.priority", priority); log.info("set priority to {}", priority); } catch (Exception e) { // not failing query launch when setting priority fails // priority will be set to usually NORMAL - the default in underlying system. log.error("could not set priority for lens session id:{} User query: {}", ctx.getLensSessionIdentifier(), ctx.getUserQuery(), e); } } queryHook.preLaunch(ctx); OperationHandle op = getClient().executeStatementAsync(getSession(ctx), ctx.getSelectedDriverQuery(), qdconf.getValByRegex(".*")); ctx.setDriverOpHandle(op.toString()); log.info("QueryHandle: {} HiveHandle:{}", ctx.getQueryHandle(), op); hiveHandles.put(ctx.getQueryHandle(), op); } catch (IOException e) { throw new LensException("Error adding persistent path", e); } catch (HiveSQLException e) { handleHiveServerError(ctx, e); throw new LensException("Error executing async query", e); } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#updateStatus(org.apache.lens.server.api.query.QueryContext) */ @Override public void updateStatus(QueryContext context) throws LensException { log.debug("GetStatus: {}", context.getQueryHandle()); if (context.getDriverStatus().isFinished()) { return; } OperationHandle hiveHandle = getHiveHandle(context.getQueryHandle()); ByteArrayInputStream in = null; try { // Get operation status from hive server log.debug("GetStatus hiveHandle: {}", hiveHandle); OperationStatus opStatus = getClient().getOperationStatus(hiveHandle); log.debug("GetStatus on hiveHandle: {} returned state:", hiveHandle, opStatus.getState().name()); switch (opStatus.getState()) { case CANCELED: context.getDriverStatus().setState(DriverQueryState.CANCELED); context.getDriverStatus().setStatusMessage("Query has been cancelled!"); break; case CLOSED: context.getDriverStatus().setState(DriverQueryState.CLOSED); context.getDriverStatus().setStatusMessage("Query has been closed!"); break; case ERROR: context.getDriverStatus().setState(DriverQueryState.FAILED); context.getDriverStatus().setStatusMessage("Query execution failed!"); context.getDriverStatus().setErrorMessage( "Query failed with errorCode:" + opStatus.getOperationException().getErrorCode() + " with errorMessage: " + opStatus.getOperationException().getMessage()); break; case FINISHED: context.getDriverStatus().setState(DriverQueryState.SUCCESSFUL); context.getDriverStatus().setStatusMessage("Query is successful!"); context.getDriverStatus().setResultSetAvailable(hiveHandle.hasResultSet()); break; case INITIALIZED: context.getDriverStatus().setState(DriverQueryState.INITIALIZED); context.getDriverStatus().setStatusMessage("Query is initiazed in HiveServer!"); break; case RUNNING: context.getDriverStatus().setState(DriverQueryState.RUNNING); context.getDriverStatus().setStatusMessage("Query is running in HiveServer!"); break; case PENDING: context.getDriverStatus().setState(DriverQueryState.PENDING); context.getDriverStatus().setStatusMessage("Query is pending in HiveServer"); break; case UNKNOWN: default: throw new LensException("Query is in unknown state at HiveServer"); } float progress = 0f; String jsonTaskStatus = opStatus.getTaskStatus(); String errorMsg = null; if (StringUtils.isNotBlank(jsonTaskStatus)) { ObjectMapper mapper = new ObjectMapper(); in = new ByteArrayInputStream(jsonTaskStatus.getBytes("UTF-8")); List<TaskStatus> taskStatuses = mapper.readValue(in, new TypeReference<List<TaskStatus>>() { }); int completedTasks = 0; StringBuilder errorMessage = new StringBuilder(); for (TaskStatus taskStat : taskStatuses) { String tstate = taskStat.getTaskState(); if ("FINISHED_STATE".equalsIgnoreCase(tstate)) { completedTasks++; } if ("FAILED_STATE".equalsIgnoreCase(tstate)) { appendTaskIds(errorMessage, taskStat); errorMessage.append(" has failed! "); } } progress = taskStatuses.size() == 0 ? 0 : (float) completedTasks / taskStatuses.size(); errorMsg = errorMessage.toString(); } else { log.warn("Empty task statuses"); } String error = null; if (StringUtils.isNotBlank(errorMsg)) { error = errorMsg; } else if (opStatus.getState().equals(OperationState.ERROR)) { error = context.getDriverStatus().getStatusMessage(); } context.getDriverStatus().setErrorMessage(error); context.getDriverStatus().setProgressMessage(jsonTaskStatus); context.getDriverStatus().setProgress(progress); context.getDriverStatus().setDriverStartTime(opStatus.getOperationStarted()); context.getDriverStatus().setDriverFinishTime(opStatus.getOperationCompleted()); } catch (Exception e) { log.error("Error getting query status", e); handleHiveServerError(context, e); throw new LensException("Error getting query status", e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { log.error("Error closing stream.", e); } } } } /** * Append task ids. * * @param message the message * @param taskStat the task stat */ private void appendTaskIds(StringBuilder message, TaskStatus taskStat) { message.append(taskStat.getTaskId()).append("("); message.append(taskStat.getType()).append("):"); if (taskStat.getExternalHandle() != null) { message.append(taskStat.getExternalHandle()).append(":"); } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#fetchResultSet(org.apache.lens.server.api.query.QueryContext) */ @Override public LensResultSet fetchResultSet(QueryContext ctx) throws LensException { log.info("FetchResultSet: {}", ctx.getQueryHandle()); // This should be applicable only for a async query return createResultSet(ctx, false); } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#closeResultSet(org.apache.lens.api.query.QueryHandle) */ @Override public void closeResultSet(QueryHandle handle) throws LensException { // NO OP ? } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#closeQuery(org.apache.lens.api.query.QueryHandle) */ @Override public void closeQuery(QueryHandle handle) throws LensException { if (handle == null) { return; } log.info("CloseQuery: {}", handle); OperationHandle opHandle = hiveHandles.remove(handle); if (opHandle != null) { log.info("CloseQuery hiveHandle: {}", opHandle); try { getClient().closeOperation(opHandle); } catch (HiveSQLException e) { checkInvalidOperation(handle, e); throw new LensException("Unable to close query", e); } } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#cancelQuery(org.apache.lens.api.query.QueryHandle) */ @Override public boolean cancelQuery(QueryHandle handle) throws LensException { log.info("CancelQuery: {}", handle); OperationHandle hiveHandle = getHiveHandle(handle); try { log.info("CancelQuery hiveHandle: {}", hiveHandle); getClient().cancelOperation(hiveHandle); return true; } catch (HiveSQLException e) { checkInvalidOperation(handle, e); throw new LensException(); } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#close() */ @Override public void close() { log.info("CloseDriver"); // Close this driver and release all resources sessionLock.lock(); try { for (String lensSessionDbKey : lensToHiveSession.keySet()) { try { getClient().closeSession(lensToHiveSession.get(lensSessionDbKey)); } catch (Exception e) { checkInvalidSession(e); log.warn("Error closing session for lens session: {}, hive session: ", lensSessionDbKey, lensToHiveSession.get(lensSessionDbKey), e); } } lensToHiveSession.clear(); } finally { sessionLock.unlock(); } } /** * Add a listener for driver events. * * @param driverEventListener the driver event listener */ @Override public void registerDriverEventListener(LensEventListener<DriverEvent> driverEventListener) { driverListeners.add(driverEventListener); } @Override public ImmutableSet<WaitingQueriesSelectionPolicy> getWaitingQuerySelectionPolicies() { return selectionPolicies; } protected CLIServiceClient getClient() throws LensException { if (isEmbedded) { if (embeddedConnection == null) { try { embeddedConnection = connectionClass.newInstance(); embeddedConnection.init(hiveConf, null); } catch (Exception e) { throw new LensException(e); } log.info("New thrift connection {}", connectionClass); } return embeddedConnection.getClient(); } else { String user = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER); if (SessionState.get() != null && SessionState.get().getUserName() != null) { user = SessionState.get().getUserName(); } String connectionKey = user.toLowerCase() + Thread.currentThread().getId(); ExpirableConnection connection = threadConnections.get(connectionKey); if (connection == null || connection.isExpired()) { try { ThriftConnection tconn = connectionClass.newInstance(); tconn.init(hiveConf, user); connection = new ExpirableConnection(tconn, connectionExpiryTimeout); thriftConnExpiryQueue.offer(connection); threadConnections.put(connectionKey, connection); log.info("New thrift connection {} for thread: {} for user: {} connection ID={}", connectionClass, Thread.currentThread().getId(), user, connection.getConnId()); } catch (Exception e) { throw new LensException(e); } } else { synchronized (thriftConnExpiryQueue) { thriftConnExpiryQueue.remove(connection); thriftConnExpiryQueue.offer(connection); } } return connection.getConnection().getClient(); } } /** * Creates the result set. * * @param context the context * @param closeAfterFetch the close after fetch * @return the lens result set * @throws LensException the lens exception */ private LensResultSet createResultSet(QueryContext context, boolean closeAfterFetch) throws LensException { OperationHandle op = getHiveHandle(context.getQueryHandle()); log.info("Creating result set for hiveHandle:{}", op); try { if (context.isDriverPersistent()) { return new HivePersistentResultSet(new Path(context.getHdfsoutPath()), op, getClient()); } else if (op.hasResultSet()) { return new HiveInMemoryResultSet(op, getClient(), closeAfterFetch); } else { // queries that do not have result return null; } } catch (HiveSQLException hiveErr) { handleHiveServerError(context, hiveErr); throw new LensException("Error creating result set", hiveErr); } } /** * Adds the persistent path. * * @param context the context * @throws IOException Signals that an I/O exception has occurred. */ void addPersistentPath(QueryContext context) throws IOException { String hiveQuery; Configuration qdconf = context.getDriverConf(this); boolean addInsertOverwrite = qdconf.getBoolean( LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, LensConfConstants.DEFAULT_ADD_INSERT_OVEWRITE); if (context.isDriverPersistent() && addInsertOverwrite && (context.getSelectedDriverQuery().startsWith("SELECT") || context.getSelectedDriverQuery().startsWith("select"))) { // store persistent data into user specified location // If absent, take default home directory Path resultSetPath = context.getHDFSResultDir(); // create query StringBuilder builder = new StringBuilder("INSERT OVERWRITE DIRECTORY "); context.setHdfsoutPath(resultSetPath.makeQualified(resultSetPath.getFileSystem(context.getConf())).toString()); builder.append('"').append(resultSetPath).append("\" "); String outputDirFormat = qdconf.get(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT); if (outputDirFormat != null) { builder.append(outputDirFormat); } builder.append(' ').append(context.getSelectedDriverQuery()).append(' '); hiveQuery = builder.toString(); } else { context.unSetDriverPersistent(); hiveQuery = context.getSelectedDriverQuery(); } log.info("Hive driver query:{}", hiveQuery); context.setSelectedDriverQuery(hiveQuery); } /** * Gets the session. * * @param ctx the ctx * @return the session * @throws LensException the lens exception */ private SessionHandle getSession(QueryContext ctx) throws LensException { sessionLock.lock(); try { String lensSession = ctx.getLensSessionIdentifier(); String sessionDbKey = sessionDbKey(lensSession, ctx.getDatabase()); if (lensSession == null && SessionState.get() != null) { lensSession = SessionState.get().getSessionId(); } if (lensSession == null) { throw new IllegalStateException("Current session state does not have a Lens session id"); } SessionHandle hiveSession; if (!lensToHiveSession.containsKey(sessionDbKey)) { try { hiveSession = getClient().openSession(ctx.getClusterUser(), ""); lensToHiveSession.put(sessionDbKey, hiveSession); log.info("New hive session for user: {} , lens session: {} hive session handle: {}", ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier()); for (LensEventListener<DriverEvent> eventListener : driverListeners) { try { eventListener.onEvent(new DriverSessionStarted(System.currentTimeMillis(), this, lensSession, hiveSession .getSessionId().toString())); } catch (Exception exc) { log.error("Error sending driver start event to listener {}", eventListener, exc); } } } catch (Exception e) { throw new LensException(e); } } else { hiveSession = lensToHiveSession.get(sessionDbKey); } return hiveSession; } finally { sessionLock.unlock(); } } /** * Gets the hive handle. * * @param handle the handle * @return the hive handle * @throws LensException the lens exception */ private OperationHandle getHiveHandle(QueryHandle handle) throws LensException { OperationHandle opHandle = hiveHandles.get(handle); if (opHandle == null) { throw new LensException("Query not found " + handle); } return opHandle; } /** * The Class QueryCompletionNotifier. */ private class QueryCompletionNotifier implements Runnable { /** The poll interval. */ long pollInterval; /** The hive handle. */ OperationHandle hiveHandle; /** The timeout millis. */ long timeoutMillis; /** The listener. */ QueryCompletionListener listener; /** The handle. */ QueryHandle handle; /** * Instantiates a new query completion notifier. * * @param handle the handle * @param timeoutMillis the timeout millis * @param listener the listener * @throws LensException the lens exception */ QueryCompletionNotifier(QueryHandle handle, long timeoutMillis, QueryCompletionListener listener) throws LensException { this.handle = handle; this.timeoutMillis = timeoutMillis; this.listener = listener; this.pollInterval = timeoutMillis / 10; } /* * (non-Javadoc) * * @see java.lang.Runnable#run() */ @Override public void run() { // till query is complete or timeout has reached long timeSpent = 0; String error; try { while (timeSpent <= timeoutMillis) { try { hiveHandle = getHiveHandle(handle); if (isFinished(hiveHandle)) { listener.onCompletion(handle); return; } } catch (LensException e) { log.debug("query handle: {} Not yet launched on driver", handle); } Thread.sleep(pollInterval); timeSpent += pollInterval; } error = "timedout"; } catch (Exception e) { log.warn("Error while polling for status", e); error = "error polling"; } listener.onError(handle, error); } /** * Checks if is finished. * * @param hiveHandle the hive handle * @return true, if is finished * @throws LensException the lens exception */ private boolean isFinished(OperationHandle hiveHandle) throws LensException { OperationState state; try { state = getClient().getOperationStatus(hiveHandle).getState(); } catch (HiveSQLException e) { throw new LensException("Could not get Status", e); } if (state.equals(OperationState.FINISHED) || state.equals(OperationState.CANCELED) || state.equals(OperationState.ERROR) || state.equals(OperationState.CLOSED)) { return true; } return false; } } /* * (non-Javadoc) * * @see * org.apache.lens.server.api.driver.LensDriver#registerForCompletionNotification * (org.apache.lens.api.query.QueryHandle, long, org.apache.lens.server.api.driver.QueryCompletionListener) */ @Override public void registerForCompletionNotification( QueryHandle handle, long timeoutMillis, QueryCompletionListener listener) throws LensException { Thread th = new Thread(new QueryCompletionNotifier(handle, timeoutMillis, listener)); th.start(); } /* * (non-Javadoc) * * @see java.io.Externalizable#readExternal(java.io.ObjectInput) */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { synchronized (hiveHandles) { int numHiveHnadles = in.readInt(); for (int i = 0; i < numHiveHnadles; i++) { QueryHandle qhandle = (QueryHandle) in.readObject(); OperationHandle opHandle = new OperationHandle((TOperationHandle) in.readObject()); hiveHandles.put(qhandle, opHandle); log.debug("Hive driver recovered {}:{}", qhandle, opHandle); } log.info("HiveDriver recovered {} queries", hiveHandles.size()); int numSessions = in.readInt(); for (int i = 0; i < numSessions; i++) { String lensId = in.readUTF(); SessionHandle sHandle = new SessionHandle((TSessionHandle) in.readObject(), TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6); lensToHiveSession.put(lensId, sHandle); } log.info("HiveDriver recovered {} sessions", lensToHiveSession.size()); } } /* * (non-Javadoc) * * @see java.io.Externalizable#writeExternal(java.io.ObjectOutput) */ @Override public void writeExternal(ObjectOutput out) throws IOException { // Write the query handle to hive handle map to output synchronized (hiveHandles) { out.writeInt(hiveHandles.size()); for (Map.Entry<QueryHandle, OperationHandle> entry : hiveHandles.entrySet()) { out.writeObject(entry.getKey()); out.writeObject(entry.getValue().toTOperationHandle()); log.debug("Hive driver persisted {}:{}", entry.getKey(), entry.getValue()); } log.info("HiveDriver persisted {} queries ", hiveHandles.size()); out.writeInt(lensToHiveSession.size()); for (Map.Entry<String, SessionHandle> entry : lensToHiveSession.entrySet()) { out.writeUTF(entry.getKey()); out.writeObject(entry.getValue().toTSessionHandle()); } log.info("HiveDriver persisted {} sessions", lensToHiveSession.size()); } } /** * Checks if is session invalid. * * @param exc the exc * @param sessionHandle the session handle * @return true, if is session invalid */ protected boolean isSessionInvalid(HiveSQLException exc, SessionHandle sessionHandle) { if (exc.getMessage().contains("Invalid SessionHandle") && exc.getMessage().contains(sessionHandle.toString())) { return true; } // Check if there is underlying cause if (exc.getCause() instanceof HiveSQLException) { isSessionInvalid((HiveSQLException) exc.getCause(), sessionHandle); } return false; } /** * Check invalid session. * * @param e the e */ protected void checkInvalidSession(Exception e) { if (!(e instanceof HiveSQLException)) { return; } HiveSQLException exc = (HiveSQLException) e; String lensSession = null; if (SessionState.get() != null) { lensSession = SessionState.get().getSessionId(); } if (lensSession == null) { return; } // Get all hive sessions corresponding to the lens session and check if // any of those sessions have become invalid List<String> sessionKeys = new ArrayList<String>(lensToHiveSession.keySet()); List<SessionHandle> hiveSessionsToCheck = new ArrayList<SessionHandle>(); sessionLock.lock(); try { for (String key : sessionKeys) { if (key.startsWith(lensSession)) { hiveSessionsToCheck.add(lensToHiveSession.get(key)); } } } finally { sessionLock.unlock(); } for (SessionHandle session : hiveSessionsToCheck) { if (isSessionInvalid(exc, session)) { // We have to expire previous session log.info("Hive server session {} for lens session {} has become invalid", session, lensSession); sessionLock.lock(); try { // We should close all connections and clear the session map since // most likely all sessions are gone closeAllConnections(); lensToHiveSession.clear(); log.info("Cleared all sessions"); } finally { sessionLock.unlock(); } } } } /** * Check invalid operation. * * @param queryHandle the query handle * @param exc the exc */ protected void checkInvalidOperation(QueryHandle queryHandle, HiveSQLException exc) { final OperationHandle operation = hiveHandles.get(queryHandle); if (operation == null) { log.info("No hive operation available for {}", queryHandle); return; } if (exc.getMessage() != null && exc.getMessage().contains("Invalid OperationHandle:") && exc.getMessage().contains(operation.toString())) { log.info("Hive operation {} for query {} has become invalid", operation, queryHandle); hiveHandles.remove(queryHandle); return; } if (exc.getCause() instanceof HiveSQLException) { checkInvalidOperation(queryHandle, (HiveSQLException) exc.getCause()); } return; } /** * Handle hive server error. * * @param ctx the ctx * @param exc the exc */ protected void handleHiveServerError(QueryContext ctx, Exception exc) { if (exc instanceof HiveSQLException) { if (ctx != null) { checkInvalidOperation(ctx.getQueryHandle(), (HiveSQLException) exc); } checkInvalidSession((HiveSQLException) exc); } } /** * Close session. * * @param sessionHandle the session handle */ public void closeSession(LensSessionHandle sessionHandle) { String sessionIdentifier = sessionHandle.getPublicId().toString(); sessionLock.lock(); try { for (String sessionDbKey : new ArrayList<String>(lensToHiveSession.keySet())) { if (sessionDbKey.startsWith(sessionIdentifier)) { SessionHandle hiveSession = lensToHiveSession.remove(sessionDbKey); if (hiveSession != null) { try { getClient().closeSession(hiveSession); log.info("Closed Hive session {} for lens session {}", hiveSession.getHandleIdentifier(), sessionDbKey); } catch (Exception e) { log.error("Error closing hive session {} for lens session {}", hiveSession.getHandleIdentifier(), sessionDbKey, e); } resourcesAddedForSession.remove(hiveSession); } } } } finally { sessionLock.unlock(); } } /** * Close all connections. */ private void closeAllConnections() { synchronized (thriftConnExpiryQueue) { for (ExpirableConnection connection : threadConnections.values()) { try { connection.getConnection().close(); } catch (Exception ce) { log.warn("Error closing connection to hive server"); } } threadConnections.clear(); } } // For test /** * Checks for lens session. * * @param session the session * @return true, if successful */ public boolean hasLensSession(LensSessionHandle session) { return lensToHiveSession.containsKey(session.getPublicId().toString()); } }
package d4move; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.sql.Connection; import java.util.Date; import java.util.Vector; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Properties; import java.util.logging.Level; import java.util.logging.Logger; public class trace { public trace() { Connection con = null; PreparedStatement pst = null; ResultSet rs = null; ResultSet rs2 = null; PrintWriter writerLocations = null; try { writerLocations = new PrintWriter("moveLatLon.js", "UTF-8"); } catch (IOException ex) { Logger lgr = Logger.getLogger(process.class.getName()); lgr.log(Level.SEVERE, ex.getMessage(), ex); } Properties props = new Properties(); InputStream in = null; try { in = process.class.getResourceAsStream("/db.properties"); props.load(in); } catch (IOException ex) { Logger lgr = Logger.getLogger(process.class.getName()); lgr.log(Level.SEVERE, ex.getMessage(), ex); } finally { try { if (in != null) { in.close(); } } catch (IOException ex) { Logger lgr = Logger.getLogger(process.class.getName()); lgr.log(Level.SEVERE, ex.getMessage(), ex); } } String url = props.getProperty("db.url"); String user = props.getProperty("db.user"); String passwd = props.getProperty("db.passwd"); Date lefttime = new Date(); Date righttime = new Date(); try { con = DriverManager.getConnection(url, user, passwd); pst = con .prepareStatement("SELECT time FROM senegal_set2_small ORDER BY time ASC LIMIT 1"); rs = pst.executeQuery(); while (rs.next()) { System.out.println(rs.getDate("time")); lefttime = rs.getDate("time"); } pst = con .prepareStatement("SELECT time FROM senegal_set2_small ORDER BY time DESC LIMIT 1"); rs = pst.executeQuery(); while (rs.next()) { System.out.println(rs.getDate("time")); righttime = rs.getDate("time"); } writerLocations.println("// From: " + lefttime); writerLocations.println("// To: " + righttime); int day = 0; int dayspan = 1 * 24 + 1; int offsetday = 0 * 24; int maxid = 1000; System.out.println("User data to write: " + maxid); int user_id = 0; String sql = "SELECT DISTINCT (user_id) FROM senegal_set2_small WHERE time BETWEEN (DATE('" + lefttime + "') + INTERVAL '" + (day + offsetday) + " hours') AND (DATE('" + lefttime + "') + INTERVAL '" + (day + offsetday + dayspan) + " hours') and user_id <= " + maxid + " order by user_id"; pst = con.prepareStatement(sql); rs = pst.executeQuery(); Vector<Double>[][] vx = new Vector[dayspan + 1][maxid + 1]; Vector<Double>[][] vy = new Vector[dayspan + 1][maxid + 1]; for (int i = 0; i <= dayspan; i++) { for (int j = 1; j <= maxid; j++) { vx[i][j] = new Vector<Double>(); vy[i][j] = new Vector<Double>(); } } while (rs.next()) { user_id = rs.getInt("user_id"); System.out.println("--- New user: " + user_id); int id = 0; Date time = new Date(); double lon = 0; double lat = 0; int hour = -1; int prev_id = 0; double prev_lon = -999; double prev_lat = -999; int prev_hour = -1; sql = "SELECT trunc((EXTRACT(EPOCH FROM senegal_set2_small.time) - (EXTRACT(EPOCH FROM (DATE('" + lefttime + "') + INTERVAL '" + offsetday + " hours'))))/3600)::Integer AS td," + " senegal_ant_pos.lon, senegal_ant_pos.lat, senegal_set2_small.time,senegal_set2_small.user_id" + " FROM senegal_ant_pos,senegal_set2_small" + " WHERE senegal_ant_pos.site_id = senegal_set2_small.antenna_id" + " AND time BETWEEN (DATE('" + lefttime + "') + INTERVAL '" + (day + offsetday) + " hours') AND (DATE('" + lefttime + "') + INTERVAL '" + (day + offsetday + dayspan ) + " hours') AND senegal_set2_small.user_id = '" + user_id + "' order by time"; pst = con.prepareStatement(sql); rs2 = pst.executeQuery(); while (rs2.next()) { id = rs2.getInt("user_id"); time = rs2.getTimestamp("time"); lat = rs2.getDouble("lat"); lon = rs2.getDouble("lon"); hour = rs2.getInt("td"); System.out.println(id + " " + time + " " + hour + " (" + lat + "," + lon + ")"); if (hour != prev_hour) { if (prev_hour > 0 && !(lon == vx[prev_hour][id].lastElement() && lat == vy[prev_hour][id] .lastElement())) { vx[prev_hour][id].add(lon); vy[prev_hour][id].add(lat); System.out.print("added to previous - "); System.out.println(id + " " + time + " " + prev_hour + " (" + lat + "," + lon + ")"); } vx[hour][id].add(lon); vy[hour][id].add(lat); System.out.println("init new slice - added"); } if ((hour == prev_hour) && id == prev_id && !(lon == prev_lon && lat == prev_lat)) { vx[hour][id].add(lon); vy[hour][id].add(lat); System.out.println("added"); } // Check if position is same, prev_id = id; prev_lon = lon; prev_lat = lat; prev_hour = hour; } } writerLocations.print("var jsonStruct = {\nlatlng: ["); boolean first = true; for (int i = 0; i <= dayspan; i++) { for (int k = 1; k <= maxid; k++) { if (vx[i][k].size() > 1) { for (int j = 0; j < vx[i][k].size(); j++) { if (!(first)) { writerLocations.print("," + vx[i][k].elementAt(j)); } else { writerLocations.print("" + vx[i][k].elementAt(j)); first = false; } writerLocations.print("," + vy[i][k].elementAt(j)); } } } } writerLocations.print("],\nsegments: ["); int points = 0; first = true; for (int i = 0; i <= dayspan; i++) { for (int k = 1; k <= maxid; k++) { if (vx[i][k].size() > 1) { points++; } } if (!(first)) { writerLocations.print("," + points); } else { writerLocations.print("" + points); } first = false; } writerLocations.print("],\nsegmentlength: [0,"); // List the index (position) of the segments (in order) first = true; boolean add = false; points = 0; for (int i = 0; i <= dayspan; i++) { for (int k = 1; k <= maxid; k++) { if (vx[i][k].size() > 1) { points += (vx[i][k].size()); add = true; } else { points += 0; } if (add) { if (!(first)) { writerLocations.print("," + points); } else { writerLocations.print("" + points); } add = false; first = false; } } } writerLocations.println("]"); writerLocations.println("};"); } catch (Exception ex) { Logger lgr = Logger.getLogger(process.class.getName()); lgr.log(Level.SEVERE, ex.getMessage(), ex); } finally { try { if (writerLocations != null) { writerLocations.close(); } if (rs != null) { rs.close(); } if (rs2 != null) { rs2.close(); } if (pst != null) { pst.close(); } if (con != null) { con.close(); } } catch (SQLException ex) { Logger lgr = Logger.getLogger(process.class.getName()); lgr.log(Level.WARNING, ex.getMessage(), ex); } } } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rockagen.gnext.dao.hb; import java.io.Serializable; import java.util.List; import java.util.Map; import org.hibernate.Criteria; import org.hibernate.Query; import org.hibernate.criterion.DetachedCriteria; import com.rockagen.commons.util.CommUtil; import com.rockagen.commons.util.ReflexUtil; import com.rockagen.gnext.dao.Hibernate4GenericDao; /** * Implementation of the <code>Hibernate4GenericDao</code> interface * * @author RA * @see Hibernate4GenericDao */ public class Hibernate4GenericDaoImpl <E, PK extends Serializable> extends BaseDao implements Hibernate4GenericDao<E, PK>{ // ~ Instance fields ================================================== /** * Query all */ public final String QUERY_ALL = "from " + getEntityClass().getSimpleName(); @Override public void delete(PK id) { if (id != null) { E instance = get(id); if (instance != null) { delete(instance); } } } @Override public void delete(E pojo) { if (pojo != null) { super.getSession().delete(pojo); } } @SuppressWarnings("unchecked") @Override public E get(PK id) { if (id != null) { return (E) super.getSession().get(getEntityClass(), id); } return null; } /** * Create a Hibernate {@link Query} * * @param hql * @param firstResult * @param maxResults * negatives meaning no limit... * @param map * @param values * @return Query */ protected Query createQuery(String hql, int firstResult, int maxResults,final Map<String, Object> map,final Object... values) { if (CommUtil.isBlank(hql)) { hql = QUERY_ALL; } Query query = super.getSession().createQuery(hql); if (firstResult > 0) { query.setFirstResult(firstResult); } if (maxResults > 0) { query.setMaxResults(maxResults); } if(map!=null && !map.isEmpty()){ for(Map.Entry<String, Object> entry : map.entrySet()){ query.setParameter(entry.getKey(),entry.getValue()); } }else if(values !=null && values.length>0){ for (int i = 0; i < values.length; i++) { query.setParameter(i, values[i]); } } return query; } /** * Create a hibernate {@link Criteria} by {@link DetachedCriteria} * * @param criterions * @param firstResult * @param maxResults * negatives meaning no limit... * @return Criteria */ protected Criteria createCriteria(DetachedCriteria dcriteria, int firstResult, int maxResults) { if (dcriteria == null) { dcriteria = DetachedCriteria.forClass(getEntityClass()); } Criteria criteria = dcriteria.getExecutableCriteria(super.getSession()); if (firstResult > 0) { criteria.setFirstResult(firstResult); } if (maxResults > 0) { criteria.setMaxResults(maxResults); } return criteria; } @Override public List<?> query(String hql, int firstResult, int maxResults, Object... values) { List<?> list = createQuery(hql, firstResult, maxResults, null,values).list(); return list; } @Override public List<?> query(String hql, Object... values) { return query(hql, 0, -1, values); } @Override public List<?> query(String hql, int firstResult, int maxResults, Map<String, Object> values) { List<?> list = createQuery(hql, firstResult, maxResults, values).list(); return list; } @Override public List<?> query(String hql, Map<String, Object> values) { return query(hql, 0, -1, values); } @Override public List<?> queryByCriteria(DetachedCriteria dcriteria, final int firstResult, final int maxResults) { List<?> list = createCriteria(dcriteria, firstResult, maxResults) .list(); return list; } @Override public List<?> queryByCriteria(DetachedCriteria dcriteria) { return queryByCriteria(dcriteria, 0, -1); } /** * <b>NOte: update operation id must not null</b> * <p> * Recommended to persistence at first, and then call the setXxx(x) * </p> * * @see org.hibernate.Session#saveOrUpdate(Object object) */ @Override public void save(E pojo) { if (pojo != null) { //XXX not ideal super.getSession().saveOrUpdate(pojo); } } @Override public int executeUpdate(String hql, Object... values) { return createQuery(hql, 0, -1, null,values).executeUpdate(); } @SuppressWarnings("unchecked") protected Class<E> getEntityClass() { return (Class<E>) ReflexUtil.getSuperClassGenricClass(getClass(), 0); } }
// ASM: a very small and fast Java bytecode manipulation framework // Copyright (c) 2000-2011 INRIA, France Telecom // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. Neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. package org.apache.tapestry5.internal.plastic.asm.util; import java.util.EnumSet; import org.apache.tapestry5.internal.plastic.asm.Opcodes; import org.apache.tapestry5.internal.plastic.asm.signature.SignatureVisitor; /** * A {@link SignatureVisitor} that checks that its methods are properly used. * * @author Eric Bruneton */ public class CheckSignatureAdapter extends SignatureVisitor { /** * Type to be used to check class signatures. See {@link #CheckSignatureAdapter(int, * SignatureVisitor)}. */ public static final int CLASS_SIGNATURE = 0; /** * Type to be used to check method signatures. See {@link #CheckSignatureAdapter(int, * SignatureVisitor)}. */ public static final int METHOD_SIGNATURE = 1; /** * Type to be used to check type signatures.See {@link #CheckSignatureAdapter(int, * SignatureVisitor)}. */ public static final int TYPE_SIGNATURE = 2; /** The valid automaton states for a {@link #visitFormalTypeParameter} method call. */ private static final EnumSet<State> VISIT_FORMAL_TYPE_PARAMETER_STATES = EnumSet.of(State.EMPTY, State.FORMAL, State.BOUND); /** The valid automaton states for a {@link #visitClassBound} method call. */ private static final EnumSet<State> VISIT_CLASS_BOUND_STATES = EnumSet.of(State.FORMAL); /** The valid automaton states for a {@link #visitInterfaceBound} method call. */ private static final EnumSet<State> VISIT_INTERFACE_BOUND_STATES = EnumSet.of(State.FORMAL, State.BOUND); /** The valid automaton states for a {@link #visitSuperclass} method call. */ private static final EnumSet<State> VISIT_SUPER_CLASS_STATES = EnumSet.of(State.EMPTY, State.FORMAL, State.BOUND); /** The valid automaton states for a {@link #visitInterface} method call. */ private static final EnumSet<State> VISIT_INTERFACE_STATES = EnumSet.of(State.SUPER); /** The valid automaton states for a {@link #visitParameterType} method call. */ private static final EnumSet<State> VISIT_PARAMETER_TYPE_STATES = EnumSet.of(State.EMPTY, State.FORMAL, State.BOUND, State.PARAM); /** The valid automaton states for a {@link #visitReturnType} method call. */ private static final EnumSet<State> VISIT_RETURN_TYPE_STATES = EnumSet.of(State.EMPTY, State.FORMAL, State.BOUND, State.PARAM); /** The valid automaton states for a {@link #visitExceptionType} method call. */ private static final EnumSet<State> VISIT_EXCEPTION_TYPE_STATES = EnumSet.of(State.RETURN); /** The possible states of the automaton used to check the order of method calls. */ private enum State { EMPTY, FORMAL, BOUND, SUPER, PARAM, RETURN, SIMPLE_TYPE, CLASS_TYPE, END; } private static final String INVALID = "Invalid "; /** The type of the visited signature. */ private final int type; /** The current state of the automaton used to check the order of method calls. */ private State state; /** Whether the visited signature can be 'V'. */ private boolean canBeVoid; /** The visitor to which this adapter must delegate calls. May be {@literal null}. */ private final SignatureVisitor signatureVisitor; /** * Constructs a new {@link CheckSignatureAdapter}. <i>Subclasses must not use this * constructor</i>. Instead, they must use the {@link #CheckSignatureAdapter(int, int, * SignatureVisitor)} version. * * @param type the type of signature to be checked. See {@link #CLASS_SIGNATURE}, {@link * #METHOD_SIGNATURE} and {@link #TYPE_SIGNATURE}. * @param signatureVisitor the visitor to which this adapter must delegate calls. May be {@literal * null}. */ public CheckSignatureAdapter(final int type, final SignatureVisitor signatureVisitor) { this(/* latest api = */ Opcodes.ASM9, type, signatureVisitor); } /** * Constructs a new {@link CheckSignatureAdapter}. * * @param api the ASM API version implemented by this visitor. Must be one of {@link * Opcodes#ASM4}, {@link Opcodes#ASM5}, {@link Opcodes#ASM6}, {@link Opcodes#ASM7}, {@link * Opcodes#ASM8} or {@link Opcodes#ASM9}. * @param type the type of signature to be checked. See {@link #CLASS_SIGNATURE}, {@link * #METHOD_SIGNATURE} and {@link #TYPE_SIGNATURE}. * @param signatureVisitor the visitor to which this adapter must delegate calls. May be {@literal * null}. */ protected CheckSignatureAdapter( final int api, final int type, final SignatureVisitor signatureVisitor) { super(api); this.type = type; this.state = State.EMPTY; this.signatureVisitor = signatureVisitor; } // class and method signatures @Override public void visitFormalTypeParameter(final String name) { if (type == TYPE_SIGNATURE || !VISIT_FORMAL_TYPE_PARAMETER_STATES.contains(state)) { throw new IllegalStateException(); } checkIdentifier(name, "formal type parameter"); state = State.FORMAL; if (signatureVisitor != null) { signatureVisitor.visitFormalTypeParameter(name); } } @Override public SignatureVisitor visitClassBound() { if (type == TYPE_SIGNATURE || !VISIT_CLASS_BOUND_STATES.contains(state)) { throw new IllegalStateException(); } state = State.BOUND; return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitClassBound()); } @Override public SignatureVisitor visitInterfaceBound() { if (type == TYPE_SIGNATURE || !VISIT_INTERFACE_BOUND_STATES.contains(state)) { throw new IllegalStateException(); } return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitInterfaceBound()); } // class signatures @Override public SignatureVisitor visitSuperclass() { if (type != CLASS_SIGNATURE || !VISIT_SUPER_CLASS_STATES.contains(state)) { throw new IllegalStateException(); } state = State.SUPER; return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitSuperclass()); } @Override public SignatureVisitor visitInterface() { if (type != CLASS_SIGNATURE || !VISIT_INTERFACE_STATES.contains(state)) { throw new IllegalStateException(); } return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitInterface()); } // method signatures @Override public SignatureVisitor visitParameterType() { if (type != METHOD_SIGNATURE || !VISIT_PARAMETER_TYPE_STATES.contains(state)) { throw new IllegalStateException(); } state = State.PARAM; return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitParameterType()); } @Override public SignatureVisitor visitReturnType() { if (type != METHOD_SIGNATURE || !VISIT_RETURN_TYPE_STATES.contains(state)) { throw new IllegalStateException(); } state = State.RETURN; CheckSignatureAdapter checkSignatureAdapter = new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitReturnType()); checkSignatureAdapter.canBeVoid = true; return checkSignatureAdapter; } @Override public SignatureVisitor visitExceptionType() { if (type != METHOD_SIGNATURE || !VISIT_EXCEPTION_TYPE_STATES.contains(state)) { throw new IllegalStateException(); } return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitExceptionType()); } // type signatures @Override public void visitBaseType(final char descriptor) { if (type != TYPE_SIGNATURE || state != State.EMPTY) { throw new IllegalStateException(); } if (descriptor == 'V') { if (!canBeVoid) { throw new IllegalArgumentException("Base type descriptor can't be V"); } } else { if ("ZCBSIFJD".indexOf(descriptor) == -1) { throw new IllegalArgumentException("Base type descriptor must be one of ZCBSIFJD"); } } state = State.SIMPLE_TYPE; if (signatureVisitor != null) { signatureVisitor.visitBaseType(descriptor); } } @Override public void visitTypeVariable(final String name) { if (type != TYPE_SIGNATURE || state != State.EMPTY) { throw new IllegalStateException(); } checkIdentifier(name, "type variable"); state = State.SIMPLE_TYPE; if (signatureVisitor != null) { signatureVisitor.visitTypeVariable(name); } } @Override public SignatureVisitor visitArrayType() { if (type != TYPE_SIGNATURE || state != State.EMPTY) { throw new IllegalStateException(); } state = State.SIMPLE_TYPE; return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitArrayType()); } @Override public void visitClassType(final String name) { if (type != TYPE_SIGNATURE || state != State.EMPTY) { throw new IllegalStateException(); } checkClassName(name, "class name"); state = State.CLASS_TYPE; if (signatureVisitor != null) { signatureVisitor.visitClassType(name); } } @Override public void visitInnerClassType(final String name) { if (state != State.CLASS_TYPE) { throw new IllegalStateException(); } checkIdentifier(name, "inner class name"); if (signatureVisitor != null) { signatureVisitor.visitInnerClassType(name); } } @Override public void visitTypeArgument() { if (state != State.CLASS_TYPE) { throw new IllegalStateException(); } if (signatureVisitor != null) { signatureVisitor.visitTypeArgument(); } } @Override public SignatureVisitor visitTypeArgument(final char wildcard) { if (state != State.CLASS_TYPE) { throw new IllegalStateException(); } if ("+-=".indexOf(wildcard) == -1) { throw new IllegalArgumentException("Wildcard must be one of +-="); } return new CheckSignatureAdapter( TYPE_SIGNATURE, signatureVisitor == null ? null : signatureVisitor.visitTypeArgument(wildcard)); } @Override public void visitEnd() { if (state != State.CLASS_TYPE) { throw new IllegalStateException(); } state = State.END; if (signatureVisitor != null) { signatureVisitor.visitEnd(); } } private void checkClassName(final String name, final String message) { if (name == null || name.length() == 0) { throw new IllegalArgumentException(INVALID + message + " (must not be null or empty)"); } for (int i = 0; i < name.length(); ++i) { if (".;[<>:".indexOf(name.charAt(i)) != -1) { throw new IllegalArgumentException( INVALID + message + " (must not contain . ; [ < > or :): " + name); } } } private void checkIdentifier(final String name, final String message) { if (name == null || name.length() == 0) { throw new IllegalArgumentException(INVALID + message + " (must not be null or empty)"); } for (int i = 0; i < name.length(); ++i) { if (".;[/<>:".indexOf(name.charAt(i)) != -1) { throw new IllegalArgumentException( INVALID + message + " (must not contain . ; [ / < > or :): " + name); } } } }
package com.planet_ink.coffee_mud.Abilities.Prayers; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2003-2022 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Prayer_AnimateGhast extends Prayer { @Override public String ID() { return "Prayer_AnimateGhast"; } private final static String localizedName = CMLib.lang().L("Animate Ghast"); @Override public String name() { return localizedName; } @Override public int classificationCode() { return Ability.ACODE_PRAYER | Ability.DOMAIN_DEATHLORE; } @Override public int abstractQuality() { return Ability.QUALITY_INDIFFERENT; } @Override public int enchantQuality() { return Ability.QUALITY_INDIFFERENT; } @Override public long flags() { return Ability.FLAG_UNHOLY; } @Override protected int canTargetCode() { return CAN_ITEMS; } private final static String localizedDiplayText = CMLib.lang().L("Newly animate dead"); @Override public String displayText() { return localizedDiplayText; } @Override public void unInvoke() { final Physical P=affected; super.unInvoke(); if((P instanceof MOB)&&(this.canBeUninvoked)&&(this.unInvoked)) { if((!P.amDestroyed()) &&(((MOB)P).amFollowing()==null)) { final Room R=CMLib.map().roomLocation(P); if(CMLib.law().getLandOwnerName(R).length()==0) { if(!CMLib.law().doesHavePriviledgesHere(invoker(), R)) { if((R!=null)&&(!((MOB)P).amDead())) R.showHappens(CMMsg.MSG_OK_ACTION, P,L("<S-NAME> wander(s) off.")); P.destroy(); } } } } } @Override public boolean tick(final Tickable ticking, final int tickID) { final int tickSet = super.tickDown; if(!super.tick(ticking, tickID)) return false; if(ticking instanceof MOB) { final MOB mob=(MOB)ticking; if(mob.amFollowing() != null) super.tickDown = tickSet; } return true; } public int getUndeadLevel(final MOB mob, final double baseLvl, final double corpseLevel) { final ExpertiseLibrary exLib=CMLib.expertises(); final double deathLoreExpertiseLevel = super.getXLEVELLevel(mob); final double appropriateLoreExpertiseLevel = super.getX1Level(mob); final double charLevel = mob.phyStats().level(); final double maxDeathLoreExpertiseLevel = exLib.getHighestListableStageBySkill(mob,ID(),ExpertiseLibrary.XType.LEVEL); final double maxApproLoreExpertiseLevel = exLib.getHighestListableStageBySkill(mob,ID(),ExpertiseLibrary.XType.X1); double lvl = 0; if ((maxApproLoreExpertiseLevel > 0) && (maxDeathLoreExpertiseLevel > 0)) { lvl = (charLevel * (10 + appropriateLoreExpertiseLevel) / (10 + maxApproLoreExpertiseLevel)) -(baseLvl+4+(2*maxDeathLoreExpertiseLevel)); } if(lvl < 0.0) lvl = 0.0; lvl += baseLvl + (2*deathLoreExpertiseLevel); if(lvl > corpseLevel) lvl = corpseLevel; return (int)Math.round(lvl); } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { final Physical target=getAnyTarget(mob,commands,givenTarget,Wearable.FILTER_UNWORNONLY); if(target==null) return false; if(target==mob) { mob.tell(L("@x1 doesn't look dead yet.",target.name(mob))); return false; } if(!(target instanceof DeadBody)) { mob.tell(L("You can't animate that.")); return false; } final DeadBody body=(DeadBody)target; if(body.isPlayerCorpse()||(body.getMobName().length()==0) ||((body.charStats()!=null)&&(body.charStats().getMyRace()!=null)&&(body.charStats().getMyRace().racialCategory().equalsIgnoreCase("Undead")))) { mob.tell(L("You can't animate that.")); return false; } String race="a"; if((body.charStats()!=null)&&(body.charStats().getMyRace()!=null)) race=CMLib.english().startWithAorAn(body.charStats().getMyRace().name()).toLowerCase(); String description=body.getMobDescription(); if(description.trim().length()==0) description="It looks dead."; else description+="\n\rIt also looks dead."; if(body.basePhyStats().level()<7) { mob.tell(L("This creature is too weak to create a ghast from.")); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto), auto?"":L("^S<S-NAME> @x1 to animate <T-NAMESELF> as a ghast.^?",prayForWord(mob))); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); final int undeadLevel = this.getUndeadLevel(mob, 6, body.phyStats().level()); final String undeadRace = ((body.charStats()!=null) && (body.charStats().getMyRace() != null) && (body.charStats().getMyRace().useRideClass())) ? "GenRideableUndead" : "GenUndead"; final MOB newMOB=CMClass.getMOB(undeadRace); newMOB.setName(L("@x1 ghast",race)); newMOB.setDescription(description); newMOB.setDisplayText(L("@x1 ghast is here",race)); newMOB.basePhyStats().setLevel(undeadLevel); newMOB.baseCharStats().setStat(CharStats.STAT_GENDER,body.charStats().getStat(CharStats.STAT_GENDER)); newMOB.baseCharStats().setMyRace(CMClass.getRace("Undead")); newMOB.baseCharStats().setBodyPartsFromStringAfterRace(body.charStats().getBodyPartsAsString()); final Ability P=CMClass.getAbility("Prop_StatTrainer"); if(P!=null) { P.setMiscText("NOTEACH STR=20 INT=10 WIS=10 CON=10 DEX=15 CHA=2"); newMOB.addNonUninvokableEffect(P); } newMOB.recoverCharStats(); newMOB.basePhyStats().setAttackAdjustment(CMLib.leveler().getLevelAttack(newMOB)); newMOB.basePhyStats().setDamage(CMLib.leveler().getLevelMOBDamage(newMOB)); newMOB.basePhyStats().setSensesMask(PhyStats.CAN_SEE_DARK); CMLib.factions().setAlignment(newMOB,Faction.Align.EVIL); newMOB.baseState().setHitPoints(25*newMOB.basePhyStats().level()); newMOB.baseState().setMovement(CMLib.leveler().getLevelMove(newMOB)); newMOB.basePhyStats().setArmor(CMLib.leveler().getLevelMOBArmor(newMOB)); newMOB.baseState().setMana(100); newMOB.recoverCharStats(); newMOB.recoverPhyStats(); newMOB.recoverMaxState(); newMOB.resetToMaxState(); newMOB.addAbility(CMClass.getAbility("Paralysis")); Behavior B=CMClass.getBehavior("CombatAbilities"); if(B!=null) newMOB.addBehavior(B); B=CMClass.getBehavior("Aggressive"); if(B!=null) { B.setParms("+NAMES \"-"+mob.Name()+"\" -LEVEL +>"+newMOB.basePhyStats().level()); newMOB.addBehavior(B); } newMOB.addNonUninvokableEffect(CMClass.getAbility("Spell_CauseStink")); newMOB.addNonUninvokableEffect(CMClass.getAbility("Prop_ModExperience","0")); newMOB.addTattoo("SYSTEM_SUMMONED"); newMOB.text(); newMOB.bringToLife(mob.location(),true); CMLib.beanCounter().clearZeroMoney(newMOB,null); newMOB.setMoneyVariation(0); //newMOB.location().showOthers(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> appears!")); int it=0; while(it<newMOB.location().numItems()) { final Item item=newMOB.location().getItem(it); if((item!=null)&&(item.container()==body)) { final CMMsg msg2=CMClass.getMsg(newMOB,body,item,CMMsg.MSG_GET,null); newMOB.location().send(newMOB,msg2); final CMMsg msg4=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_GET,null); newMOB.location().send(newMOB,msg4); final CMMsg msg3=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_WEAR,null); newMOB.location().send(newMOB,msg3); if(!newMOB.isMine(item)) it++; else it=0; } else it++; } body.destroy(); mob.location().show(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> begin(s) to rise!")); newMOB.setStartRoom(null); beneficialAffect(mob,newMOB,0,0); mob.location().recoverRoomStats(); } } else return beneficialWordsFizzle(mob,target,L("<S-NAME> @x1 to animate <T-NAMESELF>, but fail(s) miserably.",prayForWord(mob))); // return whether it worked return success; } }
/* * Created on 04.10.2004 * */ package net.strongdesign.desij.decomposition; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import net.strongdesign.desij.DesiJ; import net.strongdesign.desij.CLW; import net.strongdesign.desij.Messages; import net.strongdesign.desij.decomposition.partitioning.Partition; import net.strongdesign.stg.Node; import net.strongdesign.stg.Place; import net.strongdesign.stg.STG; import net.strongdesign.stg.STGException; import net.strongdesign.stg.STGFile; import net.strongdesign.stg.STGUtil; import net.strongdesign.stg.Signature; import net.strongdesign.stg.Transition; import net.strongdesign.stg.traversal.ConditionFactory; import net.strongdesign.util.FileSupport; import net.strongdesign.util.Pair; /** * An abstract implementation of the decomposition algorithm, which is highly * configurable. By an instance of * {@link desij.decomposition.DecompositionParameter} all necessary parameters are * provided to the algorithm. * * <p> * <b>History: </b> <br> * 11.04.2004: Created <br> * * <p> * * @author Mark Schaefer */ public abstract class AbstractDecomposition { private int number = 0; protected String filePrefix; protected STG specification; public String lastMessage; public AbstractDecomposition(String filePrefix) { this.filePrefix = filePrefix; } protected enum UndoMarker { ENTERED_NODE, BEFORE_DECOMPOSITION, FINAL_REDUCTION, BEFORE_CONTRACTION } public Collection<STG> decompose(STG stg, Partition partition) throws STGException, IOException { // String fileNamePrefix = decoPara.filePrefix; this.specification = stg; // specification file --> for determination of initial dummies during OutDet Decomposition //valid Partition? if (CLW.instance.ALLOW_INCOMPLETE_PARTITION.isEnabled()) { if (! partition.correctSubPartitionOf(stg)) throw new STGException("Incorrect subpartition"); } else if (! partition.correctPartitionOf(stg)) throw new STGException("Incorrect complete partition (try -" + CLW.instance.ALLOW_INCOMPLETE_PARTITION.getShortName() + " option for incomplete partition)"); if (! partition.feasiblePartitionOf(stg) ) throw new STGException(Messages.getString("ParametrizedDecomposition.invalid_partition")); //$NON-NLS-1$ DesiJ.logFile.info( Messages.getString("ParametrizedDecomposition.partition") + partition); DesiJ.logFile.info( Messages.getString("ParametrizedDecomposition.partition_feasible")); //Partitionen generieren List<STG> components = Partition.splitByPartition(stg, partition); //for the results List<STG> result= new LinkedList<STG>(); //und aufrufen for (STG component : components) { StringBuilder signalNames = new StringBuilder(); for (String s : component.getSignalNames(component.getSignals(Signature.OUTPUT))) signalNames.append(s.toString()); logging(stg, signalNames.toString(), DecompositionEvent.NEW_COMPONENT, signalNames); STGInOutParameter componentParameter = new STGInOutParameter(component); reduce(componentParameter); result.add(componentParameter.stg); } return result; } /** * Tries to remove a transition from an STG * * @param dP * The actual parameter set, including the STG * @param transition * The transition to remove * @return True if it was successful, false if not */ protected boolean removeIfRedundant(STG stg, Transition transition) { if (ConditionFactory.getRedundantTransitionCondition(stg).fulfilled(transition)) { stg.removeTransition(transition); return true; } return false; } protected void logging(STG stg, DecompositionEvent event, Object affectedComponents) { logging(stg, filePrefix + File.separator + "stg_" + digits(number), event, affectedComponents); if (event.writeSTG()) ++number; } @SuppressWarnings("rawtypes") protected void logging(STG stg, String fileName, DecompositionEvent event, Object affectedComponents) { DesiJ.stats.logging(stg, event, affectedComponents); //TODO move check to events // no logging at the console output --> you can see the ouput in the logfile too // if (event.getVerboseLevel() <= CLW.instance.VERBOSE.getIntValue()) { // if ( ! ((event == DecompositionEvent.RED_PLACE_DEL || event == DecompositionEvent.RED_TRANS_DEL) // && affectedComponents instanceof Collection && ((Collection)affectedComponents).size() == 0) ) // System.out.println(event.toString()+(affectedComponents!=null?affectedComponents:"")); // // } try { if (CLW.instance.WRITE_LOGFILE.isEnabled()) { //tried to delete redundant nodes but nothing found -> prevent empty log entry if ( (event == DecompositionEvent.RED_PLACE_DEL || event == DecompositionEvent.RED_TRANS_DEL) && affectedComponents instanceof Collection && ((Collection)affectedComponents).size() == 0) return; DesiJ.logFile.debug( "" + event + (affectedComponents!=null?affectedComponents:"") + (CLW.instance.WRITE_INTERMEDIATE_RESULTS.isEnabled() ? (" - file: " + fileName) : "")); } if (CLW.instance.WRITE_INTERMEDIATE_RESULTS.isEnabled() && event.writeSTG()) { FileSupport.saveToDisk(STGFile.convertToG(stg), fileName); } } catch (IOException e) { System.out.println("Error during logging"); } } protected Collection<Node> redDel(STG stg) { return STGUtil.redDel(stg); } public abstract List<Transition> reduce(STGInOutParameter stg) throws STGException; /** * Contracts a set of transitions; tries to contract a transition several times. * In fact, as long as no transition at all could be contracted. * * @param decoPara * @param contract * @param number * @return A pair with a: all transittions which could no be contracted, b: not contractable due to syntactic conflict * @throws Exception * TODO optimise handling of selftriggering inducing contractions, such transitions cannot be contracted * */ public Pair<List<Transition>, List<Transition>> contract (STG stg, List<Transition> contract) throws STGException { boolean back = false; boolean fault; List<Transition> tried, triedSyntactic; Collection<Node> removed = new HashSet<Node>(); do { triedSyntactic = new LinkedList<Transition>(); tried = new LinkedList<Transition>(); fault=false; //used to detect an increase in the number of places int nroPlaces = stg.getNumberOfPlaces(); for (Transition actTransition : contract ) { if (removed.contains(actTransition)) continue; Reason contractable = isContractable(stg, actTransition); if (contractable == Reason.OK) { if (CLW.instance.FORBID_SELFTRIGGERING.isEnabled()) { stg.addUndoMarker(UndoMarker.BEFORE_CONTRACTION); } Collection<Place> newPlaces = stg.contract(actTransition); logging(stg, DecompositionEvent.TRANS_CON, actTransition.getString(Node.UNIQUE)); if (CLW.instance.FORBID_SELFTRIGGERING.isEnabled()) { fp: for (Place place : newPlaces) { if (! ConditionFactory.SELF_TRIGGERING_PLACE.fulfilled(place)) continue; logging(stg, DecompositionEvent.SELF_TRIGGERING_FOUND, place); if (ConditionFactory.getRedundantPlaceCondition(stg).fulfilled(place)) { stg.removePlace(place); logging(stg, DecompositionEvent.SELF_TRIGGERING_REMOVED, place); } else { logging(stg, DecompositionEvent.SELF_TRIGGERING_NOT_REMOVED, place); stg.undoToMarker(UndoMarker.BEFORE_CONTRACTION); // a posteriori it seems syntactically impossible to contract tried.add(actTransition); //triedSyntactic.add(actTransition); // However, the signal MUST be backtracked to avoid this conflict --> also when out-det is enabled fault = true; break fp; } } } } else { if (contractable == Reason.CONFLICT) { tried.add(actTransition); } else if (contractable == Reason.SYNTACTIC) { triedSyntactic.add(actTransition); tried.add(actTransition); } fault = true; } if (CLW.instance.CHECK_RED_OFTEN.isEnabled()) { removed.addAll(redDel(stg)); } else { int newNroP = stg.getNumberOfPlaces(); if (newNroP > CLW.instance.PLACE_INCREASE.getDoubleValue() * nroPlaces) { removed.addAll(redDel(stg)); nroPlaces = newNroP; logging(stg, DecompositionEvent.PLACE_INCREASE, null); } } } if (! CLW.instance.CHECK_RED_OFTEN.isEnabled()) { removed.addAll(redDel(stg)); } //tried to contract all transitions, but none was successful if (tried.size() == contract.size()) back = true; //could contract some transitions, will try for the remaining again //first redundant transitions will be removed contract = tried; logging(stg, DecompositionEvent.NEW_POSTPONE_TRY, null); } while (!back && fault); // redDel(stg); return Pair.getPair(tried, triedSyntactic); } public enum Reason {SYNTACTIC, CONFLICT, OK} public Reason isContractable(Transition transition) { return isContractable(transition.getSTG(), transition); } private Reason isContractable(STG stg, Transition transition) { if (stg.getSignature(transition.getLabel().getSignal()) != Signature.DUMMY) { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_DUMMY, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_DUMMY.toString(); return Reason.SYNTACTIC; } if ( ! ConditionFactory.SECURE_CONTRACTION.fulfilled(transition)) { logging(stg, DecompositionEvent.CONTRACTION_NOT_SECURE, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_SECURE.toString(); return Reason.SYNTACTIC; } //TODO wird das doppelt geprueft ??? if ( ConditionFactory.LOOP_NODE.fulfilled(transition)) { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_LOOP, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_LOOP.toString(); return Reason.SYNTACTIC; } //TODO wird das doppelt geprueft ??? if ( ConditionFactory.ARC_WEIGHT.fulfilled(transition)) { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_ARC_WEIGHT, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_ARC_WEIGHT.toString(); return Reason.SYNTACTIC; } if (!CLW.instance.RISKY.isEnabled() && ConditionFactory.NEW_AUTOCONFLICT_PAIR.fulfilled(transition) ) { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_NEW_AUTOCONFLICT, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_NEW_AUTOCONFLICT.toString(); return Reason.CONFLICT; } if (CLW.instance.SAFE_CONTRACTIONS.isEnabled()) { if (! ConditionFactory.SAFE_CONTRACTABLE.fulfilled(transition)) { if (CLW.instance.SAFE_CONTRACTIONS_UNFOLDING.isEnabled() && stg.getSize() <= CLW.instance.MAX_STG_SIZE_FOR_UNFOLDING.getIntValue()) { if (! new ConditionFactory.SafeContraction<Transition>(stg).fulfilled(transition)) { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_DYNAMICALLY_UNSAFE, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_DYNAMICALLY_UNSAFE.toString(); return Reason.SYNTACTIC; } } else { logging(stg, DecompositionEvent.CONTRACTION_NOT_POSSIBLE_SYNTACTICALLY_UNSAFE, transition.getString(Node.UNIQUE)); lastMessage = DecompositionEvent.CONTRACTION_NOT_POSSIBLE_SYNTACTICALLY_UNSAFE.toString(); return Reason.SYNTACTIC; } } } return Reason.OK; // a priori it seems OK to contract this transition } /** * Formats a number as a string with up to three leading zeros * * @param n * The number to format * @return The formated string */ protected String digits(int n) { //return String.format("%1$03d", n); return "" + (n<=9?"0":"") + (n<=99?"0":"") + (n<=999?"0":"") + n; } }
/******************************************************************************* * Copyright 2014 United States Government as represented by the * Administrator of the National Aeronautics and Space Administration. * All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ /** * <copyright> * </copyright> * * $Id$ */ package gov.nasa.ensemble.core.plan.resources.profile.provider; import gov.nasa.ensemble.common.functional.Lists; import gov.nasa.ensemble.common.functional.Predicate; import gov.nasa.ensemble.core.jscience.Profile; import gov.nasa.ensemble.core.jscience.TemporalOffset; import gov.nasa.ensemble.core.model.plan.EPlan; import gov.nasa.ensemble.core.model.plan.translator.WrapperUtils; import gov.nasa.ensemble.core.model.plan.util.EPlanUtils; import gov.nasa.ensemble.core.plan.resources.profile.ProfilePackage; import gov.nasa.ensemble.core.plan.resources.profile.ProfileReference; import gov.nasa.ensemble.core.plan.resources.profile.ResourceProfileMember; import gov.nasa.ensemble.emf.model.common.Timepoint; import gov.nasa.ensemble.emf.util.EMFUtils; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.eclipse.emf.common.command.Command; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.EcoreFactory; import org.eclipse.emf.ecore.EcorePackage; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.edit.command.CopyCommand; import org.eclipse.emf.edit.command.CopyCommand.Helper; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link gov.nasa.ensemble.core.plan.resources.profile.ProfileReference} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class ProfileReferenceItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ProfileReferenceItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addProfileKeyPropertyDescriptor(object); addStartOffsetAmountPropertyDescriptor(object); addStartOffsetTimepointPropertyDescriptor(object); addEndOffsetAmountPropertyDescriptor(object); addEndOffsetTimepointPropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Profile Key feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ protected void addProfileKeyPropertyDescriptor(Object object) { itemPropertyDescriptors.add(new ProfileKeyPropertyDescriptor((ComposeableAdapterFactory)adapterFactory)); } /** * This adds a property descriptor for the Start Offset Amount feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addStartOffsetAmountPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ProfileReference_startOffsetAmount_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ProfileReference_startOffsetAmount_feature", "_UI_ProfileReference_type"), ProfilePackage.Literals.PROFILE_REFERENCE__START_OFFSET_AMOUNT, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Start Offset Timepoint feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addStartOffsetTimepointPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ProfileReference_startOffsetTimepoint_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ProfileReference_startOffsetTimepoint_feature", "_UI_ProfileReference_type"), ProfilePackage.Literals.PROFILE_REFERENCE__START_OFFSET_TIMEPOINT, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the End Offset Amount feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addEndOffsetAmountPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ProfileReference_endOffsetAmount_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ProfileReference_endOffsetAmount_feature", "_UI_ProfileReference_type"), ProfilePackage.Literals.PROFILE_REFERENCE__END_OFFSET_AMOUNT, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the End Offset Timepoint feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addEndOffsetTimepointPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_ProfileReference_endOffsetTimepoint_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ProfileReference_endOffsetTimepoint_feature", "_UI_ProfileReference_type"), ProfilePackage.Literals.PROFILE_REFERENCE__END_OFFSET_TIMEPOINT, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } protected void addStartOffsetTimepointDescriptor(Object object) { itemPropertyDescriptors.add (new ItemPropertyDescriptor( ((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), "Start timepoint", "Relevant timepoint", ProfilePackage.Literals.PROFILE_REFERENCE__START_OFFSET, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null) { @Override protected Collection<?> getComboBoxObjects(Object object) { return Timepoint.VALUES; } @Override public Object getPropertyValue(Object object) { TemporalOffset oldOffset = getPropertyValueUnwrapped(object); return oldOffset == null ? null : oldOffset.getTimepoint(); } @Override public void setPropertyValue(Object object, Object value) { TemporalOffset oldOffset = getPropertyValueUnwrapped(object); TemporalOffset newOffset = oldOffset.setTimepoint((Timepoint) value); super.setPropertyValue(object, newOffset); } private TemporalOffset getPropertyValueUnwrapped(Object object) { TemporalOffset oldOffset; Object propertyValue = super.getPropertyValue(object); if (propertyValue instanceof PropertyValueWrapper) { oldOffset = (TemporalOffset) ((PropertyValueWrapper) propertyValue).getEditableValue(object); } else { oldOffset = (TemporalOffset) propertyValue; } return oldOffset; } }); } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(ProfilePackage.Literals.PROFILE_REFERENCE__METADATA); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns ProfileReference.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/ProfileReference")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ @Override public String getText(Object object) { String label = ((ProfileReference)object).getProfileKey(); return label == null || label.length() == 0 ? getString("_UI_ProfileReference_type") : getString("_UI_ProfileReference_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(ProfileReference.class)) { case ProfilePackage.PROFILE_REFERENCE__START_OFFSET: case ProfilePackage.PROFILE_REFERENCE__START_OFFSET_AMOUNT: case ProfilePackage.PROFILE_REFERENCE__START_OFFSET_TIMEPOINT: case ProfilePackage.PROFILE_REFERENCE__END_OFFSET: case ProfilePackage.PROFILE_REFERENCE__END_OFFSET_AMOUNT: case ProfilePackage.PROFILE_REFERENCE__END_OFFSET_TIMEPOINT: case ProfilePackage.PROFILE_REFERENCE__PROFILE_KEY: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case ProfilePackage.PROFILE_REFERENCE__METADATA: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (ProfilePackage.Literals.PROFILE_REFERENCE__METADATA, EcoreFactory.eINSTANCE.create(EcorePackage.Literals.ESTRING_TO_STRING_MAP_ENTRY))); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResourceLocator getResourceLocator() { return ResourceProfileEditPlugin.INSTANCE; } @Override protected Command createCopyCommand(EditingDomain domain, EObject owner, Helper helper) { return new CopyCommand(domain, owner, helper, domain.getOptimizeCopy()) { @Override public void execute() { super.execute(); for (Object object : getResult()) { if (object instanceof ProfileReference) { ((ProfileReference) object).setId(EcoreUtil.generateUUID()); } } } }; } protected Profile<?> getProfile(Object object) { ProfileReference reference = (ProfileReference)object; EPlan plan = EPlanUtils.getPlan(reference); if (plan == null) { plan = EPlanUtils.getPlanFromResourceSet(reference); } ResourceProfileMember member = WrapperUtils.getMember(plan, ResourceProfileMember.class); if (member==null) { return null; } String id = reference.getProfileKey(); Profile<?> profile = member.getProfile(id); return profile; } protected class ProfileKeyPropertyDescriptor extends ItemPropertyDescriptor { protected ProfileKeyPropertyDescriptor(ComposeableAdapterFactory adapterFactory) { super(adapterFactory.getRootAdapterFactory(), getResourceLocator(), getString("_UI_ProfileReference_profileKey_feature"), getString("_UI_PropertyDescriptor_description", "_UI_ProfileReference_profileKey_feature", "_UI_ProfileReference_type"), ProfilePackage.Literals.PROFILE_REFERENCE__PROFILE_KEY, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null); } @Override public Object getPropertyValue(Object object) { return getProfile(object); } @Override public IItemLabelProvider getLabelProvider(Object object) { ProfileReference reference = (ProfileReference) object; final EPlan plan = EPlanUtils.getPlan(reference); return new IItemLabelProvider() { @Override public String getText(Object object) { Profile<?> profile = null; if (object instanceof Profile) { profile = (Profile<?>) object; } else if (object instanceof String) { String id = (String) object; ResourceProfileMember member = WrapperUtils.getMember(plan, ResourceProfileMember.class); profile = member.getProfile(id); } if (profile != null) { IItemLabelProvider lp = EMFUtils.adapt(profile, IItemLabelProvider.class); return lp.getText(profile); } return null; } @Override public Object getImage(Object object) { Profile<?> profile = null; if (object instanceof Profile) { profile = (Profile<?>) object; } else if (object instanceof String) { String id = (String) object; ResourceProfileMember member = WrapperUtils.getMember(plan, ResourceProfileMember.class); profile = member.getProfile(id); } if (profile != null) { IItemLabelProvider lp = EMFUtils.adapt(profile, IItemLabelProvider.class); return lp.getImage(profile); } return null; } }; } @Override public Collection<?> getChoiceOfValues(Object object) { return getProfileKeys((ProfileReference) object, new Predicate<Profile<?>>() { @Override public boolean apply(Profile<?> value) { return true; } }); } protected Collection<?> getProfileKeys(ProfileReference reference, Predicate<Profile<?>> predicate) { EPlan plan = EPlanUtils.getPlan(reference); ResourceProfileMember member = WrapperUtils.getMember(plan, ResourceProfileMember.class); EList<Profile<?>> profiles = member.getResourceProfiles(); List<Profile<?>> filtered = Lists.filter(profiles, predicate); List<Profile<?>> profileKeys = new ArrayList<Profile<?>>(); for (Profile profile : filtered) { profileKeys.add(profile); } Collections.sort(profileKeys, new Comparator<Profile>() { @Override public int compare(Profile o1, Profile o2) { return o1.getId().compareTo(o2.getId()); } }); return profileKeys; } @Override public void setPropertyValue(Object object, Object value) { if (value instanceof Profile) { value = ((Profile)value).getId(); } super.setPropertyValue(object, value); } } }
package com.jeremyfeinstein.slidingmenu.lib; public final class R$style { public static final int AlertDialog_AppCompat = 2131165306; public static final int AlertDialog_AppCompat_Light = 2131165307; public static final int Animation_AppCompat_Dialog = 2131165309; public static final int Animation_AppCompat_DropDownUp = 2131165310; public static final int Base_AlertDialog_AppCompat = 2131165313; public static final int Base_AlertDialog_AppCompat_Light = 2131165314; public static final int Base_Animation_AppCompat_Dialog = 2131165315; public static final int Base_Animation_AppCompat_DropDownUp = 2131165316; public static final int Base_DialogWindowTitleBackground_AppCompat = 2131165318; public static final int Base_DialogWindowTitle_AppCompat = 2131165317; public static final int Base_TextAppearance_AppCompat = 2131165234; public static final int Base_TextAppearance_AppCompat_Body1 = 2131165235; public static final int Base_TextAppearance_AppCompat_Body2 = 2131165236; public static final int Base_TextAppearance_AppCompat_Button = 2131165209; public static final int Base_TextAppearance_AppCompat_Caption = 2131165237; public static final int Base_TextAppearance_AppCompat_Display1 = 2131165238; public static final int Base_TextAppearance_AppCompat_Display2 = 2131165239; public static final int Base_TextAppearance_AppCompat_Display3 = 2131165240; public static final int Base_TextAppearance_AppCompat_Display4 = 2131165241; public static final int Base_TextAppearance_AppCompat_Headline = 2131165242; public static final int Base_TextAppearance_AppCompat_Inverse = 2131165188; public static final int Base_TextAppearance_AppCompat_Large = 2131165243; public static final int Base_TextAppearance_AppCompat_Large_Inverse = 2131165189; public static final int Base_TextAppearance_AppCompat_Light_Widget_PopupMenu_Large = 2131165244; public static final int Base_TextAppearance_AppCompat_Light_Widget_PopupMenu_Small = 2131165245; public static final int Base_TextAppearance_AppCompat_Medium = 2131165246; public static final int Base_TextAppearance_AppCompat_Medium_Inverse = 2131165190; public static final int Base_TextAppearance_AppCompat_Menu = 2131165247; public static final int Base_TextAppearance_AppCompat_SearchResult = 2131165319; public static final int Base_TextAppearance_AppCompat_SearchResult_Subtitle = 2131165248; public static final int Base_TextAppearance_AppCompat_SearchResult_Title = 2131165249; public static final int Base_TextAppearance_AppCompat_Small = 2131165250; public static final int Base_TextAppearance_AppCompat_Small_Inverse = 2131165191; public static final int Base_TextAppearance_AppCompat_Subhead = 2131165251; public static final int Base_TextAppearance_AppCompat_Subhead_Inverse = 2131165192; public static final int Base_TextAppearance_AppCompat_Title = 2131165252; public static final int Base_TextAppearance_AppCompat_Title_Inverse = 2131165193; public static final int Base_TextAppearance_AppCompat_Widget_ActionBar_Menu = 2131165253; public static final int Base_TextAppearance_AppCompat_Widget_ActionBar_Subtitle = 2131165254; public static final int Base_TextAppearance_AppCompat_Widget_ActionBar_Subtitle_Inverse = 2131165255; public static final int Base_TextAppearance_AppCompat_Widget_ActionBar_Title = 2131165256; public static final int Base_TextAppearance_AppCompat_Widget_ActionBar_Title_Inverse = 2131165257; public static final int Base_TextAppearance_AppCompat_Widget_ActionMode_Subtitle = 2131165258; public static final int Base_TextAppearance_AppCompat_Widget_ActionMode_Title = 2131165259; public static final int Base_TextAppearance_AppCompat_Widget_DropDownItem = 2131165320; public static final int Base_TextAppearance_AppCompat_Widget_PopupMenu_Large = 2131165260; public static final int Base_TextAppearance_AppCompat_Widget_PopupMenu_Small = 2131165261; public static final int Base_TextAppearance_AppCompat_Widget_Switch = 2131165262; public static final int Base_TextAppearance_AppCompat_Widget_TextView_SpinnerItem = 2131165263; public static final int Base_TextAppearance_Widget_AppCompat_ExpandedMenu_Item = 2131165321; public static final int Base_TextAppearance_Widget_AppCompat_Toolbar_Subtitle = 2131165264; public static final int Base_TextAppearance_Widget_AppCompat_Toolbar_Title = 2131165265; public static final int Base_ThemeOverlay_AppCompat = 2131165330; public static final int Base_ThemeOverlay_AppCompat_ActionBar = 2131165331; public static final int Base_ThemeOverlay_AppCompat_Dark = 2131165332; public static final int Base_ThemeOverlay_AppCompat_Dark_ActionBar = 2131165333; public static final int Base_ThemeOverlay_AppCompat_Light = 2131165334; public static final int Base_Theme_AppCompat = 2131165266; public static final int Base_Theme_AppCompat_CompactMenu = 2131165322; public static final int Base_Theme_AppCompat_Dialog = 2131165194; public static final int Base_Theme_AppCompat_DialogWhenLarge = 2131165186; public static final int Base_Theme_AppCompat_Dialog_Alert = 2131165323; public static final int Base_Theme_AppCompat_Dialog_FixedSize = 2131165324; public static final int Base_Theme_AppCompat_Dialog_MinWidth = 2131165325; public static final int Base_Theme_AppCompat_Light = 2131165267; public static final int Base_Theme_AppCompat_Light_DarkActionBar = 2131165326; public static final int Base_Theme_AppCompat_Light_Dialog = 2131165195; public static final int Base_Theme_AppCompat_Light_DialogWhenLarge = 2131165187; public static final int Base_Theme_AppCompat_Light_Dialog_Alert = 2131165327; public static final int Base_Theme_AppCompat_Light_Dialog_FixedSize = 2131165328; public static final int Base_Theme_AppCompat_Light_Dialog_MinWidth = 2131165329; public static final int Base_V11_Theme_AppCompat_Dialog = 2131165196; public static final int Base_V11_Theme_AppCompat_Light_Dialog = 2131165197; public static final int Base_V12_Widget_AppCompat_AutoCompleteTextView = 2131165205; public static final int Base_V12_Widget_AppCompat_EditText = 2131165206; public static final int Base_V21_Theme_AppCompat = 2131165268; public static final int Base_V21_Theme_AppCompat_Dialog = 2131165269; public static final int Base_V21_Theme_AppCompat_Light = 2131165270; public static final int Base_V21_Theme_AppCompat_Light_Dialog = 2131165271; public static final int Base_V7_Theme_AppCompat = 2131165335; public static final int Base_V7_Theme_AppCompat_Dialog = 2131165336; public static final int Base_V7_Theme_AppCompat_Light = 2131165337; public static final int Base_V7_Theme_AppCompat_Light_Dialog = 2131165338; public static final int Base_V7_Widget_AppCompat_AutoCompleteTextView = 2131165339; public static final int Base_V7_Widget_AppCompat_EditText = 2131165340; public static final int Base_Widget_AppCompat_ActionBar = 2131165341; public static final int Base_Widget_AppCompat_ActionBar_Solid = 2131165342; public static final int Base_Widget_AppCompat_ActionBar_TabBar = 2131165343; public static final int Base_Widget_AppCompat_ActionBar_TabText = 2131165272; public static final int Base_Widget_AppCompat_ActionBar_TabView = 2131165273; public static final int Base_Widget_AppCompat_ActionButton = 2131165274; public static final int Base_Widget_AppCompat_ActionButton_CloseMode = 2131165275; public static final int Base_Widget_AppCompat_ActionButton_Overflow = 2131165276; public static final int Base_Widget_AppCompat_ActionMode = 2131165344; public static final int Base_Widget_AppCompat_ActivityChooserView = 2131165345; public static final int Base_Widget_AppCompat_AutoCompleteTextView = 2131165207; public static final int Base_Widget_AppCompat_Button = 2131165277; public static final int Base_Widget_AppCompat_ButtonBar = 2131165281; public static final int Base_Widget_AppCompat_ButtonBar_AlertDialog = 2131165347; public static final int Base_Widget_AppCompat_Button_Borderless = 2131165278; public static final int Base_Widget_AppCompat_Button_Borderless_Colored = 2131165279; public static final int Base_Widget_AppCompat_Button_ButtonBar_AlertDialog = 2131165346; public static final int Base_Widget_AppCompat_Button_Small = 2131165280; public static final int Base_Widget_AppCompat_CompoundButton_CheckBox = 2131165282; public static final int Base_Widget_AppCompat_CompoundButton_RadioButton = 2131165283; public static final int Base_Widget_AppCompat_CompoundButton_Switch = 2131165348; public static final int Base_Widget_AppCompat_DrawerArrowToggle = 2131165184; public static final int Base_Widget_AppCompat_DrawerArrowToggle_Common = 2131165349; public static final int Base_Widget_AppCompat_DropDownItem_Spinner = 2131165284; public static final int Base_Widget_AppCompat_EditText = 2131165208; public static final int Base_Widget_AppCompat_Light_ActionBar = 2131165350; public static final int Base_Widget_AppCompat_Light_ActionBar_Solid = 2131165351; public static final int Base_Widget_AppCompat_Light_ActionBar_TabBar = 2131165352; public static final int Base_Widget_AppCompat_Light_ActionBar_TabText = 2131165285; public static final int Base_Widget_AppCompat_Light_ActionBar_TabText_Inverse = 2131165286; public static final int Base_Widget_AppCompat_Light_ActionBar_TabView = 2131165287; public static final int Base_Widget_AppCompat_Light_PopupMenu = 2131165288; public static final int Base_Widget_AppCompat_Light_PopupMenu_Overflow = 2131165289; public static final int Base_Widget_AppCompat_ListPopupWindow = 2131165290; public static final int Base_Widget_AppCompat_ListView = 2131165291; public static final int Base_Widget_AppCompat_ListView_DropDown = 2131165292; public static final int Base_Widget_AppCompat_ListView_Menu = 2131165293; public static final int Base_Widget_AppCompat_PopupMenu = 2131165294; public static final int Base_Widget_AppCompat_PopupMenu_Overflow = 2131165295; public static final int Base_Widget_AppCompat_PopupWindow = 2131165353; public static final int Base_Widget_AppCompat_ProgressBar = 2131165198; public static final int Base_Widget_AppCompat_ProgressBar_Horizontal = 2131165199; public static final int Base_Widget_AppCompat_RatingBar = 2131165296; public static final int Base_Widget_AppCompat_SearchView = 2131165354; public static final int Base_Widget_AppCompat_SearchView_ActionBar = 2131165355; public static final int Base_Widget_AppCompat_Spinner = 2131165200; public static final int Base_Widget_AppCompat_Spinner_DropDown_ActionBar = 2131165297; public static final int Base_Widget_AppCompat_Spinner_Underlined = 2131165298; public static final int Base_Widget_AppCompat_TextView_SpinnerItem = 2131165299; public static final int Base_Widget_AppCompat_Toolbar = 2131165356; public static final int Base_Widget_AppCompat_Toolbar_Button_Navigation = 2131165300; public static final int Platform_AppCompat = 2131165201; public static final int Platform_AppCompat_Light = 2131165202; public static final int Platform_ThemeOverlay_AppCompat_Dark = 2131165301; public static final int Platform_ThemeOverlay_AppCompat_Light = 2131165302; public static final int Platform_V11_AppCompat = 2131165203; public static final int Platform_V11_AppCompat_Light = 2131165204; public static final int Platform_V14_AppCompat = 2131165210; public static final int Platform_V14_AppCompat_Light = 2131165211; public static final int RtlOverlay_DialogWindowTitle_AppCompat = 2131165218; public static final int RtlOverlay_Widget_AppCompat_ActionBar_TitleItem = 2131165219; public static final int RtlOverlay_Widget_AppCompat_ActionButton_Overflow = 2131165220; public static final int RtlOverlay_Widget_AppCompat_DialogTitle_Icon = 2131165221; public static final int RtlOverlay_Widget_AppCompat_PopupMenuItem = 2131165222; public static final int RtlOverlay_Widget_AppCompat_PopupMenuItem_InternalGroup = 2131165223; public static final int RtlOverlay_Widget_AppCompat_PopupMenuItem_Text = 2131165224; public static final int RtlOverlay_Widget_AppCompat_SearchView_MagIcon = 2131165230; public static final int RtlOverlay_Widget_AppCompat_Search_DropDown = 2131165225; public static final int RtlOverlay_Widget_AppCompat_Search_DropDown_Icon1 = 2131165226; public static final int RtlOverlay_Widget_AppCompat_Search_DropDown_Icon2 = 2131165227; public static final int RtlOverlay_Widget_AppCompat_Search_DropDown_Query = 2131165228; public static final int RtlOverlay_Widget_AppCompat_Search_DropDown_Text = 2131165229; public static final int RtlOverlay_Widget_AppCompat_Toolbar_Button_Navigation = 2131165231; public static final int TextAppearance_AppCompat = 2131165467; public static final int TextAppearance_AppCompat_Body1 = 2131165468; public static final int TextAppearance_AppCompat_Body2 = 2131165469; public static final int TextAppearance_AppCompat_Button = 2131165470; public static final int TextAppearance_AppCompat_Caption = 2131165471; public static final int TextAppearance_AppCompat_Display1 = 2131165472; public static final int TextAppearance_AppCompat_Display2 = 2131165473; public static final int TextAppearance_AppCompat_Display3 = 2131165474; public static final int TextAppearance_AppCompat_Display4 = 2131165475; public static final int TextAppearance_AppCompat_Headline = 2131165476; public static final int TextAppearance_AppCompat_Inverse = 2131165477; public static final int TextAppearance_AppCompat_Large = 2131165478; public static final int TextAppearance_AppCompat_Large_Inverse = 2131165479; public static final int TextAppearance_AppCompat_Light_SearchResult_Subtitle = 2131165480; public static final int TextAppearance_AppCompat_Light_SearchResult_Title = 2131165481; public static final int TextAppearance_AppCompat_Light_Widget_PopupMenu_Large = 2131165482; public static final int TextAppearance_AppCompat_Light_Widget_PopupMenu_Small = 2131165483; public static final int TextAppearance_AppCompat_Medium = 2131165484; public static final int TextAppearance_AppCompat_Medium_Inverse = 2131165485; public static final int TextAppearance_AppCompat_Menu = 2131165486; public static final int TextAppearance_AppCompat_SearchResult_Subtitle = 2131165487; public static final int TextAppearance_AppCompat_SearchResult_Title = 2131165488; public static final int TextAppearance_AppCompat_Small = 2131165489; public static final int TextAppearance_AppCompat_Small_Inverse = 2131165490; public static final int TextAppearance_AppCompat_Subhead = 2131165491; public static final int TextAppearance_AppCompat_Subhead_Inverse = 2131165492; public static final int TextAppearance_AppCompat_Title = 2131165493; public static final int TextAppearance_AppCompat_Title_Inverse = 2131165494; public static final int TextAppearance_AppCompat_Widget_ActionBar_Menu = 2131165495; public static final int TextAppearance_AppCompat_Widget_ActionBar_Subtitle = 2131165496; public static final int TextAppearance_AppCompat_Widget_ActionBar_Subtitle_Inverse = 2131165497; public static final int TextAppearance_AppCompat_Widget_ActionBar_Title = 2131165498; public static final int TextAppearance_AppCompat_Widget_ActionBar_Title_Inverse = 2131165499; public static final int TextAppearance_AppCompat_Widget_ActionMode_Subtitle = 2131165500; public static final int TextAppearance_AppCompat_Widget_ActionMode_Subtitle_Inverse = 2131165501; public static final int TextAppearance_AppCompat_Widget_ActionMode_Title = 2131165502; public static final int TextAppearance_AppCompat_Widget_ActionMode_Title_Inverse = 2131165503; public static final int TextAppearance_AppCompat_Widget_DropDownItem = 2131165504; public static final int TextAppearance_AppCompat_Widget_PopupMenu_Large = 2131165505; public static final int TextAppearance_AppCompat_Widget_PopupMenu_Small = 2131165506; public static final int TextAppearance_AppCompat_Widget_Switch = 2131165507; public static final int TextAppearance_AppCompat_Widget_TextView_SpinnerItem = 2131165508; public static final int TextAppearance_StatusBar_EventContent = 2131165213; public static final int TextAppearance_StatusBar_EventContent_Info = 2131165214; public static final int TextAppearance_StatusBar_EventContent_Line2 = 2131165215; public static final int TextAppearance_StatusBar_EventContent_Time = 2131165216; public static final int TextAppearance_StatusBar_EventContent_Title = 2131165217; public static final int TextAppearance_Widget_AppCompat_ExpandedMenu_Item = 2131165515; public static final int TextAppearance_Widget_AppCompat_Toolbar_Subtitle = 2131165516; public static final int TextAppearance_Widget_AppCompat_Toolbar_Title = 2131165517; public static final int ThemeOverlay_AppCompat = 2131165532; public static final int ThemeOverlay_AppCompat_ActionBar = 2131165533; public static final int ThemeOverlay_AppCompat_Dark = 2131165534; public static final int ThemeOverlay_AppCompat_Dark_ActionBar = 2131165535; public static final int ThemeOverlay_AppCompat_Light = 2131165536; public static final int Theme_AppCompat = 2131165518; public static final int Theme_AppCompat_CompactMenu = 2131165519; public static final int Theme_AppCompat_Dialog = 2131165520; public static final int Theme_AppCompat_DialogWhenLarge = 2131165523; public static final int Theme_AppCompat_Dialog_Alert = 2131165521; public static final int Theme_AppCompat_Dialog_MinWidth = 2131165522; public static final int Theme_AppCompat_Light = 2131165524; public static final int Theme_AppCompat_Light_DarkActionBar = 2131165525; public static final int Theme_AppCompat_Light_Dialog = 2131165526; public static final int Theme_AppCompat_Light_DialogWhenLarge = 2131165529; public static final int Theme_AppCompat_Light_Dialog_Alert = 2131165527; public static final int Theme_AppCompat_Light_Dialog_MinWidth = 2131165528; public static final int Theme_AppCompat_Light_NoActionBar = 2131165530; public static final int Theme_AppCompat_NoActionBar = 2131165531; public static final int Widget_AppCompat_ActionBar = 2131165545; public static final int Widget_AppCompat_ActionBar_Solid = 2131165546; public static final int Widget_AppCompat_ActionBar_TabBar = 2131165547; public static final int Widget_AppCompat_ActionBar_TabText = 2131165548; public static final int Widget_AppCompat_ActionBar_TabView = 2131165549; public static final int Widget_AppCompat_ActionButton = 2131165550; public static final int Widget_AppCompat_ActionButton_CloseMode = 2131165551; public static final int Widget_AppCompat_ActionButton_Overflow = 2131165552; public static final int Widget_AppCompat_ActionMode = 2131165553; public static final int Widget_AppCompat_ActivityChooserView = 2131165554; public static final int Widget_AppCompat_AutoCompleteTextView = 2131165555; public static final int Widget_AppCompat_Button = 2131165556; public static final int Widget_AppCompat_ButtonBar = 2131165561; public static final int Widget_AppCompat_ButtonBar_AlertDialog = 2131165562; public static final int Widget_AppCompat_Button_Borderless = 2131165557; public static final int Widget_AppCompat_Button_Borderless_Colored = 2131165558; public static final int Widget_AppCompat_Button_ButtonBar_AlertDialog = 2131165559; public static final int Widget_AppCompat_Button_Small = 2131165560; public static final int Widget_AppCompat_CompoundButton_CheckBox = 2131165563; public static final int Widget_AppCompat_CompoundButton_RadioButton = 2131165564; public static final int Widget_AppCompat_CompoundButton_Switch = 2131165565; public static final int Widget_AppCompat_DrawerArrowToggle = 2131165566; public static final int Widget_AppCompat_DropDownItem_Spinner = 2131165567; public static final int Widget_AppCompat_EditText = 2131165568; public static final int Widget_AppCompat_Light_ActionBar = 2131165569; public static final int Widget_AppCompat_Light_ActionBar_Solid = 2131165570; public static final int Widget_AppCompat_Light_ActionBar_Solid_Inverse = 2131165571; public static final int Widget_AppCompat_Light_ActionBar_TabBar = 2131165572; public static final int Widget_AppCompat_Light_ActionBar_TabBar_Inverse = 2131165573; public static final int Widget_AppCompat_Light_ActionBar_TabText = 2131165574; public static final int Widget_AppCompat_Light_ActionBar_TabText_Inverse = 2131165575; public static final int Widget_AppCompat_Light_ActionBar_TabView = 2131165576; public static final int Widget_AppCompat_Light_ActionBar_TabView_Inverse = 2131165577; public static final int Widget_AppCompat_Light_ActionButton = 2131165578; public static final int Widget_AppCompat_Light_ActionButton_CloseMode = 2131165579; public static final int Widget_AppCompat_Light_ActionButton_Overflow = 2131165580; public static final int Widget_AppCompat_Light_ActionMode_Inverse = 2131165581; public static final int Widget_AppCompat_Light_ActivityChooserView = 2131165582; public static final int Widget_AppCompat_Light_AutoCompleteTextView = 2131165583; public static final int Widget_AppCompat_Light_DropDownItem_Spinner = 2131165584; public static final int Widget_AppCompat_Light_ListPopupWindow = 2131165585; public static final int Widget_AppCompat_Light_ListView_DropDown = 2131165586; public static final int Widget_AppCompat_Light_PopupMenu = 2131165587; public static final int Widget_AppCompat_Light_PopupMenu_Overflow = 2131165588; public static final int Widget_AppCompat_Light_SearchView = 2131165589; public static final int Widget_AppCompat_Light_Spinner_DropDown_ActionBar = 2131165590; public static final int Widget_AppCompat_ListPopupWindow = 2131165591; public static final int Widget_AppCompat_ListView = 2131165592; public static final int Widget_AppCompat_ListView_DropDown = 2131165593; public static final int Widget_AppCompat_ListView_Menu = 2131165594; public static final int Widget_AppCompat_PopupMenu = 2131165595; public static final int Widget_AppCompat_PopupMenu_Overflow = 2131165596; public static final int Widget_AppCompat_PopupWindow = 2131165597; public static final int Widget_AppCompat_ProgressBar = 2131165598; public static final int Widget_AppCompat_ProgressBar_Horizontal = 2131165599; public static final int Widget_AppCompat_RatingBar = 2131165600; public static final int Widget_AppCompat_SearchView = 2131165601; public static final int Widget_AppCompat_SearchView_ActionBar = 2131165602; public static final int Widget_AppCompat_Spinner = 2131165603; public static final int Widget_AppCompat_Spinner_DropDown = 2131165604; public static final int Widget_AppCompat_Spinner_DropDown_ActionBar = 2131165605; public static final int Widget_AppCompat_Spinner_Underlined = 2131165606; public static final int Widget_AppCompat_TextView_SpinnerItem = 2131165607; public static final int Widget_AppCompat_Toolbar = 2131165608; public static final int Widget_AppCompat_Toolbar_Button_Navigation = 2131165609; } /* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar * Qualified Name: com.jeremyfeinstein.slidingmenu.lib.R.style * JD-Core Version: 0.6.0 */
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.io.ArchiveMemberPath; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.HasOutputName; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.MoreCollectors; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Maps; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; import java.util.Map; import java.util.Optional; public class SourcePathResolver { private final SourcePathRuleFinder ruleFinder; public SourcePathResolver(SourcePathRuleFinder ruleFinder) { this.ruleFinder = ruleFinder; } public <T> ImmutableMap<T, Path> getMappedPaths(Map<T, SourcePath> sourcePathMap) { ImmutableMap.Builder<T, Path> paths = ImmutableMap.builder(); for (ImmutableMap.Entry<T, SourcePath> entry : sourcePathMap.entrySet()) { paths.put(entry.getKey(), getAbsolutePath(entry.getValue())); } return paths.build(); } /** * @return the {@link ProjectFilesystem} associated with {@code sourcePath}. */ public ProjectFilesystem getFilesystem(SourcePath sourcePath) { if (sourcePath instanceof PathSourcePath) { return ((PathSourcePath) sourcePath).getFilesystem(); } if (sourcePath instanceof BuildTargetSourcePath) { return ruleFinder.getRuleOrThrow((BuildTargetSourcePath) sourcePath) .getProjectFilesystem(); } throw new IllegalStateException(); } /** * @return the {@link Path} for this {@code sourcePath}, resolved using its associated * {@link com.facebook.buck.io.ProjectFilesystem}. */ public Path getAbsolutePath(SourcePath sourcePath) { if (sourcePath instanceof ResourceSourcePath) { return ((ResourceSourcePath) sourcePath).getAbsolutePath(); } Path relative = getPathPrivateImpl(sourcePath); if (relative.isAbsolute()) { return relative; } Optional<BuildRule> rule = ruleFinder.getRule(sourcePath); if (rule.isPresent()) { return rule.get().getProjectFilesystem().resolve(relative); } return ((PathSourcePath) sourcePath).getFilesystem().resolve(relative); } public ArchiveMemberPath getAbsoluteArchiveMemberPath(SourcePath sourcePath) { Preconditions.checkState(sourcePath instanceof ArchiveMemberSourcePath); ArchiveMemberSourcePath archiveMemberSourcePath = (ArchiveMemberSourcePath) sourcePath; Path archiveAbsolutePath = getAbsolutePath(archiveMemberSourcePath.getArchiveSourcePath()); return ArchiveMemberPath.of(archiveAbsolutePath, archiveMemberSourcePath.getMemberPath()); } public ArchiveMemberPath getRelativeArchiveMemberPath(SourcePath sourcePath) { Preconditions.checkState(sourcePath instanceof ArchiveMemberSourcePath); ArchiveMemberSourcePath archiveMemberSourcePath = (ArchiveMemberSourcePath) sourcePath; Path archiveRelativePath = getRelativePath(archiveMemberSourcePath.getArchiveSourcePath()); return ArchiveMemberPath.of(archiveRelativePath, archiveMemberSourcePath.getMemberPath()); } public ImmutableSortedSet<Path> getAllAbsolutePaths( Collection<? extends SourcePath> sourcePaths) { return sourcePaths.stream() .map(this::getAbsolutePath) .collect(MoreCollectors.toImmutableSortedSet()); } /** * @return The {@link Path} the {@code sourcePath} refers to, relative to its owning * {@link com.facebook.buck.io.ProjectFilesystem}. */ public Path getRelativePath(SourcePath sourcePath) { Preconditions.checkState(!(sourcePath instanceof ResourceSourcePath)); Path toReturn = getPathPrivateImpl(sourcePath); Preconditions.checkState( !toReturn.isAbsolute(), "Expected path to be relative, not absolute: %s (from %s)", toReturn, sourcePath); return toReturn; } /** * @return the {@link SourcePath} as a {@link Path}, with no guarantee whether the return value is * absolute or relative. This should never be exposed to users. */ private Path getPathPrivateImpl(SourcePath sourcePath) { if (sourcePath instanceof PathSourcePath) { return ((PathSourcePath) sourcePath).getRelativePath(); } Preconditions.checkArgument(sourcePath instanceof BuildTargetSourcePath); BuildTargetSourcePath buildTargetSourcePath = (BuildTargetSourcePath) sourcePath; Optional<Path> resolvedPath = buildTargetSourcePath.getResolvedPath(); Path toReturn; if (resolvedPath.isPresent()) { toReturn = resolvedPath.get(); } else { toReturn = ruleFinder.getRuleOrThrow(buildTargetSourcePath).getPathToOutput(); } if (toReturn == null) { throw new HumanReadableException( "No known output for: %s", buildTargetSourcePath.getTarget()); } return toReturn; } /** * Resolved the logical names for a group of SourcePath objects into a map, throwing an * error on duplicates. */ public ImmutableMap<String, SourcePath> getSourcePathNames( BuildTarget target, String parameter, Iterable<SourcePath> sourcePaths) { return getSourcePathNames(target, parameter, sourcePaths, Functions.identity()); } /** * Resolves the logical names for a group of objects that have a SourcePath into a map, * throwing an error on duplicates. */ public <T> ImmutableMap<String, T> getSourcePathNames( BuildTarget target, String parameter, Iterable<T> objects, Function<T, SourcePath> objectSourcePathFunction) { Map<String, T> resolved = Maps.newLinkedHashMap(); for (T object : objects) { SourcePath path = objectSourcePathFunction.apply(object); String name = getSourcePathName(target, path); T old = resolved.put(name, object); if (old != null) { throw new HumanReadableException(String.format( "%s: parameter '%s': duplicate entries for '%s'", target, parameter, name)); } } return ImmutableMap.copyOf(resolved); } public String getSourcePathName(BuildTarget target, SourcePath sourcePath) { Preconditions.checkArgument(!(sourcePath instanceof ArchiveMemberSourcePath)); if (sourcePath instanceof BuildTargetSourcePath) { return getNameForBuildTargetSourcePath((BuildTargetSourcePath) sourcePath); } Preconditions.checkArgument(sourcePath instanceof PathSourcePath); Path path = ((PathSourcePath) sourcePath).getRelativePath(); return MorePaths.relativize(target.getBasePath(), path).toString(); } private String getNameForBuildTargetSourcePath(BuildTargetSourcePath sourcePath) { BuildRule rule = ruleFinder.getRuleOrThrow(sourcePath); // If this build rule implements `HasOutputName`, then return the output name // it provides. if (rule instanceof HasOutputName) { HasOutputName hasOutputName = (HasOutputName) rule; return hasOutputName.getOutputName(); } // If an explicit path is set, use it's relative path to the build rule's output location to // infer a unique name. Optional<Path> explicitPath = sourcePath.getResolvedPath(); if (explicitPath.isPresent()) { Path path = explicitPath.get(); if (path.startsWith(rule.getProjectFilesystem().getBuckPaths().getGenDir())) { path = rule.getProjectFilesystem().getBuckPaths().getGenDir().relativize(path); } if (path.startsWith(rule.getBuildTarget().getBasePath())) { return rule.getBuildTarget().getBasePath().relativize(path).toString(); } } // Otherwise, fall back to using the short name of rule's build target. return rule.getBuildTarget().getShortName(); } /** * Takes an {@link Iterable} of {@link SourcePath} objects and filters those that represent * {@link Path}s. */ public ImmutableCollection<Path> filterInputsToCompareToOutput( Iterable<? extends SourcePath> sources) { // Currently, the only implementation of SourcePath that should be included in the Iterable // returned by getInputsToCompareToOutput() is FileSourcePath, so it is safe to filter by that // and then use .asReference() to get its path. // // BuildTargetSourcePath should not be included in the output because it refers to a generated // file, and generated files are not hashed as part of a RuleKey. return FluentIterable.from(sources) .filter(PathSourcePath.class) .transform( PathSourcePath::getRelativePath) .toList(); } public ImmutableCollection<Path> filterInputsToCompareToOutput(SourcePath... sources) { return filterInputsToCompareToOutput(Arrays.asList(sources)); } }
/* * Copyright 2014-2015 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kotcrab.vis.editor.module.project; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.TextureAtlas; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.tools.texturepacker.TexturePacker; import com.badlogic.gdx.tools.texturepacker.TexturePacker.Settings; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.ObjectMap.Entry; import com.badlogic.gdx.utils.Timer; import com.badlogic.gdx.utils.Timer.Task; import com.kotcrab.vis.editor.App; import com.kotcrab.vis.editor.Assets; import com.kotcrab.vis.editor.Log; import com.kotcrab.vis.editor.event.ResourceReloadedEvent; import com.kotcrab.vis.editor.module.editor.StatusBarModule; import com.kotcrab.vis.editor.util.DirectoryWatcher.WatchListener; import com.kotcrab.vis.editor.util.FileUtils; import com.kotcrab.vis.editor.util.vis.ProjectPathUtils; import com.kotcrab.vis.runtime.assets.AtlasRegionAsset; import com.kotcrab.vis.runtime.assets.TextureRegionAsset; import com.kotcrab.vis.runtime.assets.VisAssetDescriptor; import com.kotcrab.vis.runtime.util.UnsupportedAssetDescriptorException; import org.apache.commons.io.FilenameUtils; /** * Allows to get loaded textures from project 'gfx' assets directory and allows to get loaded atlases from project 'atlas' asset directory. * Live reloading is fully supported, however it requires listening for {@link ResourceReloadedEvent} and manually updating * textures. * @author Kotcrab */ public class TextureCacheModule extends ProjectModule implements WatchListener { private StatusBarModule statusBar; private FileAccessModule fileAccess; private AssetsWatcherModule watcher; private String gfxPath; private String cachePath; private Settings settings; private TextureRegion loadingRegion; private TextureRegion missingRegion; private ObjectMap<String, TextureRegion> regions = new ObjectMap<>(); private FileHandle cacheFile; private FileHandle atlasesFolder; private TextureAtlas cache; private ObjectMap<String, TextureAtlas> atlases = new ObjectMap<>(); private Timer cacheWaitTimer = new Timer(); private Timer atlasWaitTimer = new Timer(); private boolean packagingEnabled = true; @Override public void init () { settings = new Settings(); settings.maxWidth = 4096; settings.maxHeight = 4096; settings.combineSubdirectories = true; settings.silent = true; settings.useIndexes = false; settings.fast = true; loadingRegion = Assets.icons.findRegion("refresh-big"); missingRegion = Assets.icons.findRegion("file-question-big"); FileHandle out = fileAccess.getModuleFolder(".textureCache"); cachePath = out.path(); cacheFile = out.child("cache.atlas"); gfxPath = fileAccess.getAssetsFolder().child("gfx").path(); atlasesFolder = fileAccess.getAssetsFolder().child("atlas"); watcher.addListener(this); try { if (cacheFile.exists()) cache = new TextureAtlas(cacheFile); } catch (Exception e) { Log.error("Error while loading texture cache, texture cache will be regenerated"); } try { if (atlasesFolder.exists()) { FileHandle[] files = atlasesFolder.list(); for (FileHandle file : files) if (file.extension().equals("atlas")) updateAtlas(file); } } catch (Exception e) { Log.error("Error encountered while loading one of atlases"); Log.exception(e); } updateCache(); } private void updateCache () { new Thread(this::packageAndReloadCache, "TextureCache").start(); } private void packageAndReloadCache () { if (packagingEnabled) TexturePacker.process(settings, gfxPath, cachePath, "cache"); Gdx.app.postRunnable(this::reloadCache); } private void reloadCache () { if (cacheFile.exists()) { TextureAtlas oldCache = null; if (cache != null) oldCache = cache; cache = new TextureAtlas(cacheFile); for (Entry<String, TextureRegion> e : regions.entries()) { String path = FileUtils.removeFirstSeparator(FilenameUtils.removeExtension(e.key)); TextureRegion region = e.value; TextureRegion newRegion = cache.findRegion(path); if (newRegion == null) region.setRegion(missingRegion); else region.setRegion(newRegion); } disposeCacheLater(oldCache); App.eventBus.post(new ResourceReloadedEvent(ResourceReloadedEvent.RESOURCE_TEXTURES)); } else Log.error("Texture cache not ready, probably they aren't any textures in project or packer failed"); } private void disposeCacheLater (final TextureAtlas oldCache) { Timer.instance().scheduleTask(new Task() { @Override public void run () { if (oldCache != null) oldCache.dispose(); } }, 0.5f); } private void updateAtlas (FileHandle file) { String relativePath = fileAccess.relativizeToAssetsFolder(file); TextureAtlas atlas = atlases.get(relativePath); if (atlas != null) { atlases.remove(relativePath); atlas.dispose(); } if (file.exists()) { atlases.put(relativePath, new TextureAtlas(file)); App.eventBus.post(new ResourceReloadedEvent(ResourceReloadedEvent.RESOURCE_TEXTURES)); App.eventBus.post(new ResourceReloadedEvent(ResourceReloadedEvent.RESOURCE_TEXTURE_ATLASES)); } } @Override public void dispose () { if (cache != null) cache.dispose(); for (TextureAtlas atlas : atlases.values()) atlas.dispose(); watcher.removeListener(this); } @Override public void fileChanged (FileHandle file) { String relativePath = fileAccess.relativizeToAssetsFolder(file); if (ProjectPathUtils.isTexture(relativePath, file.extension())) { cacheWaitTimer.clear(); cacheWaitTimer.scheduleTask(new Task() { @Override public void run () { updateCache(); } }, 0.5f); } if (ProjectPathUtils.isTextureAtlas(file, relativePath)) { atlasWaitTimer.clear(); cacheWaitTimer.scheduleTask(new Task() { @Override public void run () { updateAtlas(file); } }, 0.5f); } } public void setPackagingEnabled (boolean packagingEnabled) { this.packagingEnabled = packagingEnabled; } public TextureRegion getRegion (VisAssetDescriptor descriptor) { if (descriptor instanceof TextureRegionAsset) return getCachedGfxRegion((TextureRegionAsset) descriptor); if (descriptor instanceof AtlasRegionAsset) return getAtlasRegion((AtlasRegionAsset) descriptor); throw new UnsupportedAssetDescriptorException(descriptor); } public Sprite getSprite (VisAssetDescriptor descriptor, float pixelsPerUnit) { Sprite sprite = new Sprite(getRegion(descriptor)); sprite.setSize(sprite.getWidth() / pixelsPerUnit, sprite.getHeight() / pixelsPerUnit); sprite.setOrigin(sprite.getOriginX() / pixelsPerUnit, sprite.getOriginY() / pixelsPerUnit); return sprite; } private TextureRegion getCachedGfxRegion (TextureRegionAsset asset) { String relativePath = asset.getPath(); String regionName = FileUtils.removeFirstSeparator(FilenameUtils.removeExtension(relativePath)); TextureRegion region = regions.get(regionName); if (region == null) { if (cache != null) region = cache.findRegion(regionName); if (region == null) region = new TextureRegion(loadingRegion); regions.put(relativePath, region); } return region; } private TextureRegion getAtlasRegion (AtlasRegionAsset asset) { String relativePath = asset.getPath(); TextureAtlas atlas = atlases.get(relativePath); if (atlas == null) return missingRegion; if (asset.getRegionName() == null) { return new TextureRegion(atlas.getTextures().first()); } else { TextureRegion region = atlas.findRegion(asset.getRegionName()); if (region == null) return missingRegion; return region; } } public TextureAtlas getAtlas (String relativePath) { return atlases.get(relativePath); } }
package jp.blanktar.ruumusic.client.main; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.IntRange; import androidx.annotation.UiThread; import android.app.AlertDialog; import android.app.SearchManager; import android.content.DialogInterface; import android.content.Intent; import android.media.AudioManager; import android.os.Bundle; import android.provider.MediaStore; import androidx.fragment.app.Fragment; import androidx.fragment.app.FragmentPagerAdapter; import androidx.core.view.MenuItemCompat; import androidx.viewpager.widget.ViewPager; import androidx.appcompat.widget.SearchView; import androidx.appcompat.widget.Toolbar; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.widget.Toast; import jp.blanktar.ruumusic.R; import jp.blanktar.ruumusic.client.preference.PreferenceActivity; import jp.blanktar.ruumusic.util.DynamicShortcuts; import jp.blanktar.ruumusic.util.PermissionManager; import jp.blanktar.ruumusic.util.Preference; import jp.blanktar.ruumusic.util.RuuClient; import jp.blanktar.ruumusic.util.RuuDirectory; import jp.blanktar.ruumusic.util.RuuFile; import jp.blanktar.ruumusic.util.RuuFileBase; @UiThread public class MainActivity extends PermissionManager.Activity { public final static String ACTION_OPEN_PLAYER = "jp.blanktar.ruumusic.OPEN_PLAYER"; public final static String ACTION_OPEN_PLAYLIST = "jp.blanktar.ruumusic.OPEN_PLAYLIST"; public final static String ACTION_START_PLAY = "jp.blanktar.ruumusic.START_PLAY_WITH_ACTIVITY"; private Preference preference; private RuuClient client; private ViewPager viewPager; private PlayerFragment player; private PlaylistFragment playlist; Menu menu; SearchView searchView; @Override protected void onCreate(@Nullable Bundle savedInstanceState){ super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar)findViewById(R.id.toolbar); setSupportActionBar(toolbar); preference = new Preference(getApplicationContext()); client = new RuuClient(getApplicationContext()); preference.RootDirectory.setOnChangeListener(new Preference.OnChangeListener(){ @Override public void onChange(){ if(player != null){ player.updateRoot(); } if(playlist != null){ playlist.updateRoot(); } } }); setVolumeControlStream(AudioManager.STREAM_MUSIC); viewPager = (ViewPager)findViewById(R.id.viewPager); viewPager.setAdapter(new FragmentPagerAdapter(getSupportFragmentManager()){ @Override @NonNull public Fragment getItem(@IntRange(from=0, to=1) int position){ if(position == 0){ if(player == null){ player = new PlayerFragment(); } return player; }else{ if(playlist == null){ playlist = new PlaylistFragment(); playlist.permissionManager = getPermissionManager(); } return playlist; } } @Override public int getCount(){ return 2; } }); viewPager.addOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener(){ @Override public void onPageSelected(int position){ updateTitleAndMenu(); } }); if(getPermissionManager().getHasPermission()){ checkRootDirectory(); }else{ getPermissionManager().setOnResultListener(new PermissionManager.OnResultListener(){ @Override public void onGranted(){ checkRootDirectory(); if(playlist != null){ playlist.onPermissionGranted(); } } @Override public void onDenied(){ (new AlertDialog.Builder(MainActivity.this)) .setTitle(getString(R.string.permission_denied_title)) .setMessage(getString(R.string.permission_denied_message)) .setNegativeButton( getString(R.string.permission_denied_close), new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which){ finish(); } } ) .setPositiveButton( getString(R.string.permission_denied_ok), new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which){ getPermissionManager().request(); } } ) .create().show(); } }); getPermissionManager().request(); } onNewIntent(getIntent()); } private void checkRootDirectory(){ try{ RuuDirectory.rootDirectory(getApplicationContext()); }catch(RuuFileBase.NotFound e){ preference.RootDirectory.remove(); } try{ RuuDirectory.getInstance(getApplicationContext(), "/"); }catch(RuuFileBase.NotFound e){ (new AlertDialog.Builder(this)) .setTitle(getString(R.string.empty_device_title)) .setMessage(getString(R.string.empty_device_message)) .setPositiveButton( getString(R.string.empty_device_button), new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which){ finish(); } } ) .create().show(); } } @Override public void onNewIntent(@NonNull Intent intent) { super.onNewIntent(intent); new DynamicShortcuts(getApplicationContext()).reportShortcutUsed(intent.getStringExtra(DynamicShortcuts.EXTRA_SHORTCUT_ID)); switch(intent.getAction()){ case ACTION_OPEN_PLAYER: moveToPlayer(); break; case ACTION_OPEN_PLAYLIST: if(intent.getData() != null){ String path = intent.getData().getPath(); try{ RuuDirectory dir = RuuDirectory.getInstance(getApplicationContext(), path); dir.getRuuPath(); if(playlist != null){ moveToPlaylist(dir); }else{ preference.CurrentViewPath.set(dir); moveToPlaylist(); } return; }catch(RuuFileBase.NotFound e){ Toast.makeText(getApplicationContext(), getString(R.string.cant_open_dir), Toast.LENGTH_LONG).show(); viewPager.setCurrentItem(preference.LastViewPage.get()); }catch(RuuFileBase.OutOfRootDirectory e){ Toast.makeText(getApplicationContext(), getString(R.string.out_of_root, path), Toast.LENGTH_LONG).show(); viewPager.setCurrentItem(preference.LastViewPage.get()); } } moveToPlaylist(); break; case Intent.ACTION_SEARCH: moveToPlaylist(); preference.CurrentViewPath.set(preference.RootDirectory.get()); preference.LastSearchQuery.set(intent.getStringExtra(SearchManager.QUERY)); break; case ACTION_START_PLAY: case Intent.ACTION_VIEW: if(intent.getData() != null){ try{ RuuFile file = RuuFile.getInstance(getApplicationContext(), intent.getData()); try{ file.getRuuPath(); }catch(RuuFileBase.OutOfRootDirectory e){ Toast.makeText(getApplicationContext(), getString(R.string.out_of_root, file.getRealPath()), Toast.LENGTH_LONG).show(); viewPager.setCurrentItem(preference.LastViewPage.get()); return; } client.play(file); moveToPlayer(); }catch(RuuFileBase.NotFound e){ Toast.makeText(getApplicationContext(), getString(R.string.music_not_found), Toast.LENGTH_LONG).show(); viewPager.setCurrentItem(preference.LastViewPage.get()); } break; } case MediaStore.INTENT_ACTION_MEDIA_PLAY_FROM_SEARCH: moveToPlayer(); String query = intent.getStringExtra(SearchManager.QUERY); if(query == null || "".equals(query)){ client.play(); }else{ try{ client.playSearch(RuuDirectory.rootDirectory(getApplicationContext()), query); }catch(RuuFileBase.NotFound e){ Toast.makeText(getApplicationContext(), getString(R.string.cant_open_dir, "/"), Toast.LENGTH_LONG).show(); } } break; default: viewPager.setCurrentItem(preference.LastViewPage.get()); } } @Override public void onDestroy(){ client.release(); super.onDestroy(); } @Override public void onSaveInstanceState(@NonNull Bundle state){ super.onSaveInstanceState(state); if(player != null){ getSupportFragmentManager().putFragment(state, "player_fragment", player); } if(playlist != null){ getSupportFragmentManager().putFragment(state, "playlist_fragment", playlist); } } @Override public void onRestoreInstanceState(@NonNull Bundle state){ super.onRestoreInstanceState(state); player = (PlayerFragment)getSupportFragmentManager().getFragment(state, "player_fragment"); playlist = (PlaylistFragment)getSupportFragmentManager().getFragment(state, "playlist_fragment"); if(playlist != null){ playlist.permissionManager = getPermissionManager(); playlist.resumeDirectory(); } updateTitleAndMenu(); } @Override public void onPause(){ super.onPause(); preference.LastViewPage.set(getCurrentPage().ordinal()); } @Override public void onWindowFocusChanged(boolean hasFocus){ playlist.updateStatus(); } @Override public boolean onCreateOptionsMenu(@NonNull Menu menu){ getMenuInflater().inflate(R.menu.menu_main, menu); this.menu = menu; updateTitleAndMenu(); MenuItem searchMenu = menu.findItem(R.id.menu_search); assert searchMenu != null; searchView = (SearchView)MenuItemCompat.getActionView(searchMenu); assert searchView != null; searchView.setOnQueryTextListener(playlist); searchView.setOnCloseListener(playlist); String query = preference.LastSearchQuery.get(); if(query != null){ searchView.setIconified(false); searchView.setQuery(query, true); menu.findItem(R.id.menu_search).setVisible(viewPager.getCurrentItem() == 1); } return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(@NonNull MenuItem item){ int id = item.getItemId(); if(id == R.id.action_recursive_play && playlist.current != null){ client.playRecursive(playlist.current.path); moveToPlayer(); return true; } if(id == R.id.action_search_play && playlist.current != null){ client.playSearch(playlist.current.path, playlist.searchQuery); moveToPlayer(); return true; } if(id == R.id.action_preference){ startActivity(new Intent(getApplicationContext(), PreferenceActivity.class).addFlags(Intent.FLAG_ACTIVITY_LAUNCH_ADJACENT).addFlags(Intent.FLAG_ACTIVITY_NEW_DOCUMENT)); return true; } return super.onOptionsItemSelected(item); } private void updateTitleAndMenu(){ Toolbar toolbar = (Toolbar)findViewById(R.id.toolbar); if(getCurrentPage() == Page.PLAYER){ toolbar.setTitle(R.string.app_name); toolbar.setSubtitle(""); if(menu != null){ menu.findItem(R.id.action_recursive_play).setVisible(false); menu.findItem(R.id.menu_search).setVisible(false); menu.findItem(R.id.action_search_play).setVisible(false); } }else if(playlist != null){ playlist.updateTitle(toolbar); playlist.updateMenu(this); } } public void moveToPlayer(){ viewPager.setCurrentItem(0); } private void moveToPlaylist(){ viewPager.setCurrentItem(1); } public void moveToPlaylist(@NonNull RuuDirectory path){ playlist.changeDir(path); moveToPlaylist(); } public void moveToPlaylistSearch(@NonNull RuuDirectory path, @NonNull String query){ playlist.setSearchQuery(path, query); moveToPlaylist(); } public Page getCurrentPage(){ return Page.values()[viewPager.getCurrentItem()]; } @Override public void onBackPressed(){ } @Override public boolean onKeyLongPress(int keyCode, @Nullable KeyEvent event){ if(keyCode == KeyEvent.KEYCODE_BACK){ finish(); return true; } return super.onKeyLongPress(keyCode, event); } @Override public boolean onKeyUp(int keyCode, @NonNull KeyEvent event){ if(keyCode == KeyEvent.KEYCODE_SEARCH){ moveToPlaylist(); searchView.setIconified(false); return true; }else if(keyCode == KeyEvent.KEYCODE_BACK && (event.isCanceled() || getCurrentPage() != Page.PLAYLIST || !playlist.onBackKey())){ super.onBackPressed(); return true; } return super.onKeyUp(keyCode, event); } enum Page{ PLAYER, PLAYLIST } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode.INCLUDE; import static org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode.SKIP; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NavigableSet; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher.MatchCode; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestQueryMatcher extends HBaseTestCase { private static final boolean PRINT = false; private byte[] row1; private byte[] row2; private byte[] row3; private byte[] fam1; private byte[] fam2; private byte[] col1; private byte[] col2; private byte[] col3; private byte[] col4; private byte[] col5; private byte[] data; private Get get; long ttl = Long.MAX_VALUE; KVComparator rowComparator; private Scan scan; public void setUp() throws Exception { super.setUp(); row1 = Bytes.toBytes("row1"); row2 = Bytes.toBytes("row2"); row3 = Bytes.toBytes("row3"); fam1 = Bytes.toBytes("fam1"); fam2 = Bytes.toBytes("fam2"); col1 = Bytes.toBytes("col1"); col2 = Bytes.toBytes("col2"); col3 = Bytes.toBytes("col3"); col4 = Bytes.toBytes("col4"); col5 = Bytes.toBytes("col5"); data = Bytes.toBytes("data"); //Create Get get = new Get(row1); get.addFamily(fam1); get.addColumn(fam2, col2); get.addColumn(fam2, col4); get.addColumn(fam2, col5); this.scan = new Scan(get); rowComparator = KeyValue.COMPARATOR; } private void _testMatch_ExplicitColumns(Scan scan, List<MatchCode> expected) throws IOException { // 2,4,5 ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2, 0, 1, ttl, false, 0, rowComparator), get.getFamilyMap().get(fam2), EnvironmentEdgeManager.currentTimeMillis() - ttl); List<KeyValue> memstore = new ArrayList<KeyValue>(); memstore.add(new KeyValue(row1, fam2, col1, 1, data)); memstore.add(new KeyValue(row1, fam2, col2, 1, data)); memstore.add(new KeyValue(row1, fam2, col3, 1, data)); memstore.add(new KeyValue(row1, fam2, col4, 1, data)); memstore.add(new KeyValue(row1, fam2, col5, 1, data)); memstore.add(new KeyValue(row2, fam1, col1, data)); List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(); KeyValue k = memstore.get(0); qm.setRow(k.getRowArray(), k.getRowOffset(), k.getRowLength()); for (KeyValue kv : memstore){ actual.add(qm.match(kv)); } assertEquals(expected.size(), actual.size()); for(int i=0; i< expected.size(); i++){ assertEquals(expected.get(i), actual.get(i)); if(PRINT){ System.out.println("expected "+expected.get(i)+ ", actual " +actual.get(i)); } } } public void testMatch_ExplicitColumns() throws IOException { //Moving up from the Tracker by using Gets and List<KeyValue> instead //of just byte [] //Expected result List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>(); expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW); expected.add(ScanQueryMatcher.MatchCode.DONE); _testMatch_ExplicitColumns(scan, expected); } public void testMatch_ExplicitColumnsWithLookAhead() throws IOException { //Moving up from the Tracker by using Gets and List<KeyValue> instead //of just byte [] //Expected result List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>(); expected.add(ScanQueryMatcher.MatchCode.SKIP); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.SKIP); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL); expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW); expected.add(ScanQueryMatcher.MatchCode.DONE); Scan s = new Scan(scan); s.setAttribute(Scan.HINT_LOOKAHEAD, Bytes.toBytes(2)); _testMatch_ExplicitColumns(s, expected); } public void testMatch_Wildcard() throws IOException { //Moving up from the Tracker by using Gets and List<KeyValue> instead //of just byte [] //Expected result List<MatchCode> expected = new ArrayList<ScanQueryMatcher.MatchCode>(); expected.add(ScanQueryMatcher.MatchCode.INCLUDE); expected.add(ScanQueryMatcher.MatchCode.INCLUDE); expected.add(ScanQueryMatcher.MatchCode.INCLUDE); expected.add(ScanQueryMatcher.MatchCode.INCLUDE); expected.add(ScanQueryMatcher.MatchCode.INCLUDE); expected.add(ScanQueryMatcher.MatchCode.DONE); ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2, 0, 1, ttl, false, 0, rowComparator), null, EnvironmentEdgeManager.currentTimeMillis() - ttl); List<KeyValue> memstore = new ArrayList<KeyValue>(); memstore.add(new KeyValue(row1, fam2, col1, 1, data)); memstore.add(new KeyValue(row1, fam2, col2, 1, data)); memstore.add(new KeyValue(row1, fam2, col3, 1, data)); memstore.add(new KeyValue(row1, fam2, col4, 1, data)); memstore.add(new KeyValue(row1, fam2, col5, 1, data)); memstore.add(new KeyValue(row2, fam1, col1, 1, data)); List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(); KeyValue k = memstore.get(0); qm.setRow(k.getRowArray(), k.getRowOffset(), k.getRowLength()); for(KeyValue kv : memstore) { actual.add(qm.match(kv)); } assertEquals(expected.size(), actual.size()); for(int i=0; i< expected.size(); i++){ assertEquals(expected.get(i), actual.get(i)); if(PRINT){ System.out.println("expected "+expected.get(i)+ ", actual " +actual.get(i)); } } } /** * Verify that {@link ScanQueryMatcher} only skips expired KeyValue * instances and does not exit early from the row (skipping * later non-expired KeyValues). This version mimics a Get with * explicitly specified column qualifiers. * * @throws IOException */ public void testMatch_ExpiredExplicit() throws IOException { long testTTL = 1000; MatchCode [] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.SEEK_NEXT_ROW, ScanQueryMatcher.MatchCode.DONE }; long now = EnvironmentEdgeManager.currentTimeMillis(); ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2, 0, 1, testTTL, false, 0, rowComparator), get.getFamilyMap().get(fam2), now - testTTL); KeyValue [] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now-100, data), new KeyValue(row1, fam2, col2, now-50, data), new KeyValue(row1, fam2, col3, now-5000, data), new KeyValue(row1, fam2, col4, now-500, data), new KeyValue(row1, fam2, col5, now-10000, data), new KeyValue(row2, fam1, col1, now-10, data) }; KeyValue k = kvs[0]; qm.setRow(k.getRowArray(), k.getRowOffset(), k.getRowLength()); List<MatchCode> actual = new ArrayList<MatchCode>(kvs.length); for (KeyValue kv : kvs) { actual.add( qm.match(kv) ); } assertEquals(expected.length, actual.size()); for (int i=0; i<expected.length; i++) { if(PRINT){ System.out.println("expected "+expected[i]+ ", actual " +actual.get(i)); } assertEquals(expected[i], actual.get(i)); } } /** * Verify that {@link ScanQueryMatcher} only skips expired KeyValue * instances and does not exit early from the row (skipping * later non-expired KeyValues). This version mimics a Get with * wildcard-inferred column qualifiers. * * @throws IOException */ public void testMatch_ExpiredWildcard() throws IOException { long testTTL = 1000; MatchCode [] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.DONE }; long now = EnvironmentEdgeManager.currentTimeMillis(); ScanQueryMatcher qm = new ScanQueryMatcher(scan, new ScanInfo(fam2, 0, 1, testTTL, false, 0, rowComparator), null, now - testTTL); KeyValue [] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now-100, data), new KeyValue(row1, fam2, col2, now-50, data), new KeyValue(row1, fam2, col3, now-5000, data), new KeyValue(row1, fam2, col4, now-500, data), new KeyValue(row1, fam2, col5, now-10000, data), new KeyValue(row2, fam1, col1, now-10, data) }; KeyValue k = kvs[0]; qm.setRow(k.getRowArray(), k.getRowOffset(), k.getRowLength()); List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(kvs.length); for (KeyValue kv : kvs) { actual.add( qm.match(kv) ); } assertEquals(expected.length, actual.size()); for (int i=0; i<expected.length; i++) { if(PRINT){ System.out.println("expected "+expected[i]+ ", actual " +actual.get(i)); } assertEquals(expected[i], actual.get(i)); } } public void testMatch_PartialRangeDropDeletes() throws Exception { // Some ranges. testDropDeletes( row2, row3, new byte[][] { row1, row2, row2, row3 }, INCLUDE, SKIP, SKIP, INCLUDE); testDropDeletes(row2, row3, new byte[][] { row1, row1, row2 }, INCLUDE, INCLUDE, SKIP); testDropDeletes(row2, row3, new byte[][] { row2, row3, row3 }, SKIP, INCLUDE, INCLUDE); testDropDeletes(row1, row3, new byte[][] { row1, row2, row3 }, SKIP, SKIP, INCLUDE); // Open ranges. testDropDeletes(HConstants.EMPTY_START_ROW, row3, new byte[][] { row1, row2, row3 }, SKIP, SKIP, INCLUDE); testDropDeletes(row2, HConstants.EMPTY_END_ROW, new byte[][] { row1, row2, row3 }, INCLUDE, SKIP, SKIP); testDropDeletes(HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, new byte[][] { row1, row2, row3, row3 }, SKIP, SKIP, SKIP, SKIP); // No KVs in range. testDropDeletes(row2, row3, new byte[][] { row1, row1, row3 }, INCLUDE, INCLUDE, INCLUDE); testDropDeletes(row2, row3, new byte[][] { row3, row3 }, INCLUDE, INCLUDE); testDropDeletes(row2, row3, new byte[][] { row1, row1 }, INCLUDE, INCLUDE); } private void testDropDeletes( byte[] from, byte[] to, byte[][] rows, MatchCode... expected) throws IOException { long now = EnvironmentEdgeManager.currentTimeMillis(); // Set time to purge deletes to negative value to avoid it ever happening. ScanInfo scanInfo = new ScanInfo(fam2, 0, 1, ttl, false, -1L, rowComparator); NavigableSet<byte[]> cols = get.getFamilyMap().get(fam2); ScanQueryMatcher qm = new ScanQueryMatcher(scan, scanInfo, cols, Long.MAX_VALUE, HConstants.OLDEST_TIMESTAMP, HConstants.OLDEST_TIMESTAMP, from, to, null); List<ScanQueryMatcher.MatchCode> actual = new ArrayList<ScanQueryMatcher.MatchCode>(rows.length); byte[] prevRow = null; for (byte[] row : rows) { if (prevRow == null || !Bytes.equals(prevRow, row)) { qm.setRow(row, 0, (short)row.length); prevRow = row; } actual.add(qm.match(new KeyValue(row, fam2, null, now, Type.Delete))); } assertEquals(expected.length, actual.size()); for (int i = 0; i < expected.length; i++) { if (PRINT) System.out.println("expected " + expected[i] + ", actual " + actual.get(i)); assertEquals(expected[i], actual.get(i)); } } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.dmn.api.definition.model; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import org.jboss.errai.common.client.api.annotations.Portable; import org.jboss.errai.databinding.client.api.Bindable; import org.kie.soup.commons.util.Sets; import org.kie.workbench.common.dmn.api.definition.HasText; import org.kie.workbench.common.dmn.api.definition.HasTypeRef; import org.kie.workbench.common.dmn.api.property.DMNPropertySet; import org.kie.workbench.common.dmn.api.property.dmn.Description; import org.kie.workbench.common.dmn.api.property.dmn.Id; import org.kie.workbench.common.dmn.api.property.dmn.QName; import org.kie.workbench.common.dmn.api.property.dmn.Text; import org.kie.workbench.common.dmn.api.resource.i18n.DMNAPIConstants; import org.kie.workbench.common.forms.adf.definitions.annotations.FieldParam; import org.kie.workbench.common.forms.adf.definitions.annotations.FormDefinition; import org.kie.workbench.common.forms.adf.definitions.annotations.FormField; import org.kie.workbench.common.forms.adf.definitions.annotations.i18n.I18nSettings; import org.kie.workbench.common.forms.adf.definitions.settings.FieldPolicy; import org.kie.workbench.common.stunner.core.definition.annotation.Definition; import org.kie.workbench.common.stunner.core.definition.annotation.Property; import org.kie.workbench.common.stunner.core.definition.annotation.definition.Category; import org.kie.workbench.common.stunner.core.definition.annotation.definition.Labels; import org.kie.workbench.common.stunner.core.domainobject.DomainObject; import org.kie.workbench.common.stunner.core.util.HashUtil; import static java.util.Collections.singletonList; import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.COLLAPSIBLE_CONTAINER; import static org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.nestedForms.AbstractEmbeddedFormsInitializer.FIELD_CONTAINER_PARAM; /** * This is in essence a clone of {@link LiteralExpression} specifically for {@link OutputClause} * to expose the {@link Text} as a Form Property to the Dynamic Forms Engine with a specific * label for "Default Output value". */ @Portable @Bindable @Definition @FormDefinition(policy = FieldPolicy.ONLY_MARKED, defaultFieldSettings = {@FieldParam(name = FIELD_CONTAINER_PARAM, value = COLLAPSIBLE_CONTAINER)}, i18n = @I18nSettings(keyPreffix = "org.kie.workbench.common.dmn.api.definition.model.OutputClauseLiteralExpression"), startElement = "id") public class OutputClauseLiteralExpression extends DMNModelInstrumentedBase implements IsLiteralExpression, HasText, HasTypeRef, DMNPropertySet, DomainObject { @Category private static final String stunnerCategory = Categories.DOMAIN_OBJECTS; @Labels private static final Set<String> stunnerLabels = new Sets.Builder<String>().build(); protected Id id; protected Description description; protected QName typeRef; @Property @FormField(afterElement = "description", labelKey = "text") protected Text text; protected ImportedValues importedValues; public OutputClauseLiteralExpression() { this(new Id(), new Description(), new QName(), new Text(), null); } public OutputClauseLiteralExpression(final Id id, final Description description, final QName typeRef, final Text text, final ImportedValues importedValues) { this.id = id; this.description = description; this.typeRef = typeRef; this.text = text; this.importedValues = importedValues; } public OutputClauseLiteralExpression copy() { return new OutputClauseLiteralExpression( new Id(), Optional.ofNullable(description).map(Description::copy).orElse(null), Optional.ofNullable(typeRef).map(QName::copy).orElse(null), Optional.ofNullable(text).map(Text::copy).orElse(null), Optional.ofNullable(importedValues).map(ImportedValues::copy).orElse(null) ); } @Override public List<HasTypeRef> getHasTypeRefs() { return new ArrayList<>(singletonList(this)); } // ----------------------- // Stunner core properties // ----------------------- public String getStunnerCategory() { return stunnerCategory; } public Set<String> getStunnerLabels() { return stunnerLabels; } // ----------------------- // DMN properties // ----------------------- @Override public Id getId() { return id; } @Override public Description getDescription() { return description; } public void setDescription(final Description description) { this.description = description; } @Override public QName getTypeRef() { return typeRef; } @Override public void setTypeRef(final QName typeRef) { this.typeRef = typeRef; } @Override public Text getText() { return text; } @Override public void setText(final Text text) { this.text = text; } @Override public ImportedValues getImportedValues() { return importedValues; } public void setImportedValues(final ImportedValues importedValues) { this.importedValues = importedValues; } @Override public DMNModelInstrumentedBase asDMNModelInstrumentedBase() { return this; } // ------------------------------------------------------ // DomainObject requirements - to use in Properties Panel // ------------------------------------------------------ @Override public String getDomainObjectUUID() { return getId().getValue(); } @Override public String getDomainObjectNameTranslationKey() { return DMNAPIConstants.LiteralExpression_DomainObjectName; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (!(o instanceof OutputClauseLiteralExpression)) { return false; } final OutputClauseLiteralExpression that = (OutputClauseLiteralExpression) o; if (id != null ? !id.equals(that.id) : that.id != null) { return false; } if (description != null ? !description.equals(that.description) : that.description != null) { return false; } if (typeRef != null ? !typeRef.equals(that.typeRef) : that.typeRef != null) { return false; } if (text != null ? !text.equals(that.text) : that.text != null) { return false; } return importedValues != null ? importedValues.equals(that.importedValues) : that.importedValues == null; } @Override public int hashCode() { return HashUtil.combineHashCodes(id != null ? id.hashCode() : 0, description != null ? description.hashCode() : 0, typeRef != null ? typeRef.hashCode() : 0, text != null ? text.hashCode() : 0, importedValues != null ? importedValues.hashCode() : 0); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticsearch; import com.amazonaws.services.elasticsearch.model.*; import com.amazonaws.annotation.ThreadSafe; /** * Interface for accessing Amazon Elasticsearch Service asynchronously. Each * asynchronous method will return a Java Future object representing the * asynchronous operation; overloads which accept an {@code AsyncHandler} can be * used to receive notification when an asynchronous operation completes. * <p> * <fullname>Amazon Elasticsearch Configuration Service</fullname> * <p> * Use the Amazon Elasticsearch configuration API to create, configure, and * manage Elasticsearch domains. * </p> * <p> * The endpoint for configuration service requests is region-specific: * es.<i>region</i>.amazonaws.com. For example, es.us-east-1.amazonaws.com. For * a current list of supported regions and endpoints, see <a href= * "http://docs.aws.amazon.com/general/latest/gr/rande.html#cloudsearch_region" * target="_blank">Regions and Endpoints</a>. * </p> */ @ThreadSafe public class AWSElasticsearchAsyncClient extends AWSElasticsearchClient implements AWSElasticsearchAsync { private static final int DEFAULT_THREAD_POOL_SIZE = 50; private final java.util.concurrent.ExecutorService executorService; /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service. A credentials provider chain will be used that * searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Credential profiles file at the default location (~/.aws/credentials) * shared by all AWS SDKs and the AWS CLI</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing * 50 threads (to match the default maximum number of concurrent connections * to the service). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) */ public AWSElasticsearchAsyncClient() { this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain()); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service. A credentials provider chain will be used that * searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Credential profiles file at the default location (~/.aws/credentials) * shared by all AWS SDKs and the AWS CLI</li> * <li>Instance profile credentials delivered through the Amazon EC2 * metadata service</li> * </ul> * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing * a number of threads equal to the maximum number of concurrent connections * configured via {@code ClientConfiguration.getMaxConnections()}. * * @param clientConfiguration * The client configuration options controlling how this client * connects to Amazon Elasticsearch Service (ex: proxy settings, * retry counts, etc). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) */ public AWSElasticsearchAsyncClient( com.amazonaws.ClientConfiguration clientConfiguration) { this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain(), clientConfiguration, java.util.concurrent.Executors .newFixedThreadPool(clientConfiguration .getMaxConnections())); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing * 50 threads (to match the default maximum number of concurrent connections * to the service). * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. * @see java.util.concurrent.Executors#newFixedThreadPool(int) */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentials awsCredentials) { this(awsCredentials, java.util.concurrent.Executors .newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE)); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials and * executor service. Default client settings will be used. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. * @param executorService * The executor service by which all asynchronous requests will be * executed. */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentials awsCredentials, java.util.concurrent.ExecutorService executorService) { this(awsCredentials, configFactory.getConfig(), executorService); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials, * executor service, and client configuration options. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when * authenticating with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, * etc). * @param executorService * The executor service by which all asynchronous requests will be * executed. */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentials awsCredentials, com.amazonaws.ClientConfiguration clientConfiguration, java.util.concurrent.ExecutorService executorService) { super(awsCredentials, clientConfiguration); this.executorService = executorService; } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials * provider. Default client settings will be used. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing * 50 threads (to match the default maximum number of concurrent connections * to the service). * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @see java.util.concurrent.Executors#newFixedThreadPool(int) */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider) { this(awsCredentialsProvider, java.util.concurrent.Executors .newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE)); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the provided AWS account credentials provider * and client configuration options. * <p> * Asynchronous methods are delegated to a fixed-size thread pool containing * a number of threads equal to the maximum number of concurrent connections * configured via {@code ClientConfiguration.getMaxConnections()}. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, * etc). * * @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain * @see java.util.concurrent.Executors#newFixedThreadPool(int) */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider, com.amazonaws.ClientConfiguration clientConfiguration) { this(awsCredentialsProvider, clientConfiguration, java.util.concurrent.Executors .newFixedThreadPool(clientConfiguration .getMaxConnections())); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials * provider and executor service. Default client settings will be used. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param executorService * The executor service by which all asynchronous requests will be * executed. */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider, java.util.concurrent.ExecutorService executorService) { this(awsCredentialsProvider, configFactory.getConfig(), executorService); } /** * Constructs a new asynchronous client to invoke service methods on Amazon * Elasticsearch Service using the specified AWS account credentials * provider, executor service, and client configuration options. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to * authenticate requests with AWS services. * @param clientConfiguration * Client configuration options (ex: max retry limit, proxy settings, * etc). * @param executorService * The executor service by which all asynchronous requests will be * executed. */ public AWSElasticsearchAsyncClient( com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider, com.amazonaws.ClientConfiguration clientConfiguration, java.util.concurrent.ExecutorService executorService) { super(awsCredentialsProvider, clientConfiguration); this.executorService = executorService; } /** * Returns the executor service used by this client to execute async * requests. * * @return The executor service used by this client to execute async * requests. */ public java.util.concurrent.ExecutorService getExecutorService() { return executorService; } @Override public java.util.concurrent.Future<Void> addTagsAsync(AddTagsRequest request) { return addTagsAsync(request, null); } @Override public java.util.concurrent.Future<Void> addTagsAsync( final AddTagsRequest request, final com.amazonaws.handlers.AsyncHandler<AddTagsRequest, Void> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<Void>() { @Override public Void call() throws Exception { Void result; try { addTags(request); result = null; } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<CreateElasticsearchDomainResult> createElasticsearchDomainAsync( CreateElasticsearchDomainRequest request) { return createElasticsearchDomainAsync(request, null); } @Override public java.util.concurrent.Future<CreateElasticsearchDomainResult> createElasticsearchDomainAsync( final CreateElasticsearchDomainRequest request, final com.amazonaws.handlers.AsyncHandler<CreateElasticsearchDomainRequest, CreateElasticsearchDomainResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<CreateElasticsearchDomainResult>() { @Override public CreateElasticsearchDomainResult call() throws Exception { CreateElasticsearchDomainResult result; try { result = createElasticsearchDomain(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<DeleteElasticsearchDomainResult> deleteElasticsearchDomainAsync( DeleteElasticsearchDomainRequest request) { return deleteElasticsearchDomainAsync(request, null); } @Override public java.util.concurrent.Future<DeleteElasticsearchDomainResult> deleteElasticsearchDomainAsync( final DeleteElasticsearchDomainRequest request, final com.amazonaws.handlers.AsyncHandler<DeleteElasticsearchDomainRequest, DeleteElasticsearchDomainResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<DeleteElasticsearchDomainResult>() { @Override public DeleteElasticsearchDomainResult call() throws Exception { DeleteElasticsearchDomainResult result; try { result = deleteElasticsearchDomain(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainResult> describeElasticsearchDomainAsync( DescribeElasticsearchDomainRequest request) { return describeElasticsearchDomainAsync(request, null); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainResult> describeElasticsearchDomainAsync( final DescribeElasticsearchDomainRequest request, final com.amazonaws.handlers.AsyncHandler<DescribeElasticsearchDomainRequest, DescribeElasticsearchDomainResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<DescribeElasticsearchDomainResult>() { @Override public DescribeElasticsearchDomainResult call() throws Exception { DescribeElasticsearchDomainResult result; try { result = describeElasticsearchDomain(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainConfigResult> describeElasticsearchDomainConfigAsync( DescribeElasticsearchDomainConfigRequest request) { return describeElasticsearchDomainConfigAsync(request, null); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainConfigResult> describeElasticsearchDomainConfigAsync( final DescribeElasticsearchDomainConfigRequest request, final com.amazonaws.handlers.AsyncHandler<DescribeElasticsearchDomainConfigRequest, DescribeElasticsearchDomainConfigResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<DescribeElasticsearchDomainConfigResult>() { @Override public DescribeElasticsearchDomainConfigResult call() throws Exception { DescribeElasticsearchDomainConfigResult result; try { result = describeElasticsearchDomainConfig(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainsResult> describeElasticsearchDomainsAsync( DescribeElasticsearchDomainsRequest request) { return describeElasticsearchDomainsAsync(request, null); } @Override public java.util.concurrent.Future<DescribeElasticsearchDomainsResult> describeElasticsearchDomainsAsync( final DescribeElasticsearchDomainsRequest request, final com.amazonaws.handlers.AsyncHandler<DescribeElasticsearchDomainsRequest, DescribeElasticsearchDomainsResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<DescribeElasticsearchDomainsResult>() { @Override public DescribeElasticsearchDomainsResult call() throws Exception { DescribeElasticsearchDomainsResult result; try { result = describeElasticsearchDomains(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<ListDomainNamesResult> listDomainNamesAsync( ListDomainNamesRequest request) { return listDomainNamesAsync(request, null); } @Override public java.util.concurrent.Future<ListDomainNamesResult> listDomainNamesAsync( final ListDomainNamesRequest request, final com.amazonaws.handlers.AsyncHandler<ListDomainNamesRequest, ListDomainNamesResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<ListDomainNamesResult>() { @Override public ListDomainNamesResult call() throws Exception { ListDomainNamesResult result; try { result = listDomainNames(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<ListTagsResult> listTagsAsync( ListTagsRequest request) { return listTagsAsync(request, null); } @Override public java.util.concurrent.Future<ListTagsResult> listTagsAsync( final ListTagsRequest request, final com.amazonaws.handlers.AsyncHandler<ListTagsRequest, ListTagsResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<ListTagsResult>() { @Override public ListTagsResult call() throws Exception { ListTagsResult result; try { result = listTags(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<Void> removeTagsAsync( RemoveTagsRequest request) { return removeTagsAsync(request, null); } @Override public java.util.concurrent.Future<Void> removeTagsAsync( final RemoveTagsRequest request, final com.amazonaws.handlers.AsyncHandler<RemoveTagsRequest, Void> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<Void>() { @Override public Void call() throws Exception { Void result; try { removeTags(request); result = null; } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } @Override public java.util.concurrent.Future<UpdateElasticsearchDomainConfigResult> updateElasticsearchDomainConfigAsync( UpdateElasticsearchDomainConfigRequest request) { return updateElasticsearchDomainConfigAsync(request, null); } @Override public java.util.concurrent.Future<UpdateElasticsearchDomainConfigResult> updateElasticsearchDomainConfigAsync( final UpdateElasticsearchDomainConfigRequest request, final com.amazonaws.handlers.AsyncHandler<UpdateElasticsearchDomainConfigRequest, UpdateElasticsearchDomainConfigResult> asyncHandler) { return executorService .submit(new java.util.concurrent.Callable<UpdateElasticsearchDomainConfigResult>() { @Override public UpdateElasticsearchDomainConfigResult call() throws Exception { UpdateElasticsearchDomainConfigResult result; try { result = updateElasticsearchDomainConfig(request); } catch (Exception ex) { if (asyncHandler != null) { asyncHandler.onError(ex); } throw ex; } if (asyncHandler != null) { asyncHandler.onSuccess(request, result); } return result; } }); } /** * Shuts down the client, releasing all managed resources. This includes * forcibly terminating all pending asynchronous service calls. Clients who * wish to give pending asynchronous service calls time to complete should * call {@code getExecutorService().shutdown()} followed by * {@code getExecutorService().awaitTermination()} prior to calling this * method. */ @Override public void shutdown() { super.shutdown(); executorService.shutdownNow(); } }
package org.apache.lucene.index; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import java.util.*; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DocumentStoredFieldVisitor; import org.apache.lucene.document.Field; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.store.BaseDirectory; import org.apache.lucene.store.BufferedIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.junit.AfterClass; import org.junit.BeforeClass; public class TestFieldsReader extends LuceneTestCase { private static Directory dir; private static Document testDoc; private static FieldInfos.Builder fieldInfos = null; @BeforeClass public static void beforeClass() throws Exception { testDoc = new Document(); fieldInfos = new FieldInfos.Builder(); DocHelper.setupDoc(testDoc); for (IndexableField field : testDoc) { fieldInfos.addOrUpdate(field.name(), field.fieldType()); } dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()); conf.getMergePolicy().setNoCFSRatio(0.0); IndexWriter writer = new IndexWriter(dir, conf); writer.addDocument(testDoc); writer.close(); FaultyIndexInput.doFail = false; } @AfterClass public static void afterClass() throws Exception { dir.close(); dir = null; fieldInfos = null; testDoc = null; } public void test() throws IOException { assertTrue(dir != null); assertTrue(fieldInfos != null); IndexReader reader = DirectoryReader.open(dir); Document doc = reader.document(0); assertTrue(doc != null); assertTrue(doc.getField(DocHelper.TEXT_FIELD_1_KEY) != null); Field field = (Field) doc.getField(DocHelper.TEXT_FIELD_2_KEY); assertTrue(field != null); assertTrue(field.fieldType().storeTermVectors()); assertFalse(field.fieldType().omitNorms()); assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); field = (Field) doc.getField(DocHelper.TEXT_FIELD_3_KEY); assertTrue(field != null); assertFalse(field.fieldType().storeTermVectors()); assertTrue(field.fieldType().omitNorms()); assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); field = (Field) doc.getField(DocHelper.NO_TF_KEY); assertTrue(field != null); assertFalse(field.fieldType().storeTermVectors()); assertFalse(field.fieldType().omitNorms()); assertTrue(field.fieldType().indexOptions() == IndexOptions.DOCS_ONLY); DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(DocHelper.TEXT_FIELD_3_KEY); reader.document(0, visitor); final List<IndexableField> fields = visitor.getDocument().getFields(); assertEquals(1, fields.size()); assertEquals(DocHelper.TEXT_FIELD_3_KEY, fields.get(0).name()); reader.close(); } public static class FaultyFSDirectory extends BaseDirectory { Directory fsDir; public FaultyFSDirectory(File dir) { fsDir = newFSDirectory(dir); lockFactory = fsDir.getLockFactory(); } @Override public IndexInput openInput(String name, IOContext context) throws IOException { return new FaultyIndexInput(fsDir.openInput(name, context)); } @Override public String[] listAll() throws IOException { return fsDir.listAll(); } @Override public boolean fileExists(String name) throws IOException { return fsDir.fileExists(name); } @Override public void deleteFile(String name) throws IOException { fsDir.deleteFile(name); } @Override public long fileLength(String name) throws IOException { return fsDir.fileLength(name); } @Override public IndexOutput createOutput(String name, IOContext context) throws IOException { return fsDir.createOutput(name, context); } @Override public void sync(Collection<String> names) throws IOException { fsDir.sync(names); } @Override public void close() throws IOException { fsDir.close(); } } private static class FaultyIndexInput extends BufferedIndexInput { IndexInput delegate; static boolean doFail; int count; private FaultyIndexInput(IndexInput delegate) { super("FaultyIndexInput(" + delegate + ")", BufferedIndexInput.BUFFER_SIZE); this.delegate = delegate; } private void simOutage() throws IOException { if (doFail && count++ % 2 == 1) { throw new IOException("Simulated network outage"); } } @Override public void readInternal(byte[] b, int offset, int length) throws IOException { simOutage(); delegate.seek(getFilePointer()); delegate.readBytes(b, offset, length); } @Override public void seekInternal(long pos) throws IOException { } @Override public long length() { return delegate.length(); } @Override public void close() throws IOException { delegate.close(); } @Override public FaultyIndexInput clone() { FaultyIndexInput i = new FaultyIndexInput(delegate.clone()); // seek the clone to our current position try { i.seek(getFilePointer()); } catch (IOException e) { throw new RuntimeException(); } return i; } @Override public IndexInput slice(String sliceDescription, long offset, long length) throws IOException { IndexInput slice = delegate.slice(sliceDescription, offset, length); return new FaultyIndexInput(slice); } } // LUCENE-1262 public void testExceptions() throws Throwable { File indexDir = createTempDir("testfieldswriterexceptions"); try { Directory dir = new FaultyFSDirectory(indexDir); IndexWriterConfig iwc = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE); IndexWriter writer = new IndexWriter(dir, iwc); for(int i=0;i<2;i++) writer.addDocument(testDoc); writer.forceMerge(1); writer.close(); IndexReader reader = DirectoryReader.open(dir); FaultyIndexInput.doFail = true; boolean exc = false; for(int i=0;i<2;i++) { try { reader.document(i); } catch (IOException ioe) { // expected exc = true; } try { reader.document(i); } catch (IOException ioe) { // expected exc = true; } } assertTrue(exc); reader.close(); dir.close(); } finally { TestUtil.rm(indexDir); } } }
/* * This file is part of the DITA Open Toolkit project. * * Copyright 2004, 2005 IBM Corporation * * See the accompanying LICENSE file for applicable license. */ package org.dita.dost.reader; import static org.dita.dost.util.Constants.*; import static org.dita.dost.util.URLUtils.*; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import org.dita.dost.log.MessageUtils; import org.dita.dost.util.KeyDef; import org.dita.dost.util.StringUtils; import org.dita.dost.writer.AbstractXMLFilter; import org.xml.sax.Attributes; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * This class extends AbstractReader, used to parse relevant dita topics and * ditamap files for GenMapAndTopicListModule. * * <p> * <strong>Not thread-safe</strong>. Instances can be reused by calling * {@link #reset()} between calls to {@link org.xml.sax.XMLReader#parse(InputSource)}. * </p> */ public final class KeydefFilter extends AbstractXMLFilter { /** Basedir of the current parsing file */ private URI currentDir = null; /** Map of key definitions */ private final Map<String, KeyDef> keysDefMap; /** Map to store multi-level keyrefs */ private final Map<String, String> keysRefMap; /** * Constructor. */ public KeydefFilter() { keysDefMap = new HashMap<>(); keysRefMap = new HashMap<>(); } /** * Get the Key definitions. * * @return Key definitions map */ public Map<String, KeyDef> getKeysDMap() { return keysDefMap; } /** * Set the relative directory of current file. * * @param dir dir */ public void setCurrentDir(final URI dir) { currentDir = dir; } /** * * Reset the internal variables. */ public void reset() { currentDir = null; keysDefMap.clear(); keysRefMap.clear(); } @Override public void startElement(final String uri, final String localName, final String qName, final Attributes atts) throws SAXException { handleKeysAttr(atts); getContentHandler().startElement(uri, localName, qName, atts); } /** * Clean up. */ @Override public void endDocument() throws SAXException { checkMultiLevelKeys(keysDefMap, keysRefMap); getContentHandler().endDocument(); } /** * Parse the keys attributes. * * @param atts all attributes */ private void handleKeysAttr(final Attributes atts) { final String attrValue = atts.getValue(ATTRIBUTE_NAME_KEYS); if (attrValue != null) { URI target = toURI(atts.getValue(ATTRIBUTE_NAME_HREF)); final URI copyTo = toURI(atts.getValue(ATTRIBUTE_NAME_COPY_TO)); if (copyTo != null) { target = copyTo; } final String keyRef = atts.getValue(ATTRIBUTE_NAME_KEYREF); // Many keys can be defined in a single definition, like // keys="a b c", a, b and c are seperated by blank. for (final String key : attrValue.trim().split("\\s+")) { if (!keysDefMap.containsKey(key)) { if (target != null && !target.toString().isEmpty()) { final String attrScope = atts.getValue(ATTRIBUTE_NAME_SCOPE); final String attrFormat = atts.getValue(ATTRIBUTE_NAME_FORMAT); if (attrScope != null && (attrScope.equals(ATTR_SCOPE_VALUE_EXTERNAL) || attrScope.equals(ATTR_SCOPE_VALUE_PEER))) { keysDefMap.put(key, new KeyDef(key, target, attrScope, attrFormat, null, null)); } else { String tail = null; if (target.getFragment() != null) { tail = target.getFragment(); target = stripFragment(target); } if (!target.isAbsolute()) { target = currentDir.resolve(target); } keysDefMap.put(key, new KeyDef(key, setFragment(target, tail), ATTR_SCOPE_VALUE_LOCAL, attrFormat, null, null)); } } else if (!StringUtils.isEmptyString(keyRef)) { // store multi-level keys. keysRefMap.put(key, keyRef); } else { // target is null or empty, it is useful in the future // when consider the content of key definition keysDefMap.put(key, new KeyDef(key, null, null, null, null, null)); } } else { logger.info(MessageUtils.getMessage("DOTJ045I", key).toString()); } } } } /** * Get multi-level keys list */ private List<String> getKeysList(final String key, final Map<String, String> keysRefMap) { final List<String> list = new ArrayList<>(); // Iterate the map to look for multi-level keys for (Entry<String, String> entry : keysRefMap.entrySet()) { // Multi-level key found if (entry.getValue().equals(key)) { // add key into the list final String entryKey = entry.getKey(); list.add(entryKey); // still have multi-level keys if (keysRefMap.containsValue(entryKey)) { // rescuive point final List<String> tempList = getKeysList(entryKey, keysRefMap); list.addAll(tempList); } } } return list; } /** * Update keysDefMap for multi-level keys */ private void checkMultiLevelKeys(final Map<String, KeyDef> keysDefMap, final Map<String, String> keysRefMap) { String key; KeyDef value; // tempMap storing values to avoid ConcurrentModificationException final Map<String, KeyDef> tempMap = new HashMap<>(); for (Entry<String, KeyDef> entry : keysDefMap.entrySet()) { key = entry.getKey(); value = entry.getValue(); // there is multi-level keys exist. if (keysRefMap.containsValue(key)) { // get multi-level keys final List<String> keysList = getKeysList(key, keysRefMap); for (final String multikey : keysList) { // update tempMap tempMap.put(multikey, value); } } } // update keysDefMap. keysDefMap.putAll(tempMap); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.formats.avro.typeutils; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.formats.avro.AvroRowDeserializationSchema; import org.apache.flink.formats.avro.AvroRowSerializationSchema; import org.apache.flink.table.types.logical.ArrayType; import org.apache.flink.table.types.logical.DecimalType; import org.apache.flink.table.types.logical.IntType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeFamily; import org.apache.flink.table.types.logical.MapType; import org.apache.flink.table.types.logical.MultisetType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.utils.LogicalTypeChecks; import org.apache.flink.types.Row; import org.apache.flink.util.Preconditions; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; import org.apache.avro.SchemaParseException; import org.apache.avro.specific.SpecificData; import org.apache.avro.specific.SpecificRecord; import java.util.List; /** * Converts an Avro schema into Flink's type information. It uses {@link RowTypeInfo} for representing * objects and converts Avro types into types that are compatible with Flink's Table & SQL API. * * <p>Note: Changes in this class need to be kept in sync with the corresponding runtime * classes {@link AvroRowDeserializationSchema} and {@link AvroRowSerializationSchema}. */ public class AvroSchemaConverter { private AvroSchemaConverter() { // private } /** * Converts an Avro class into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroClass Avro specific record that contains schema information * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(Class<T> avroClass) { Preconditions.checkNotNull(avroClass, "Avro specific record class must not be null."); // determine schema to retrieve deterministic field order final Schema schema = SpecificData.get().getSchema(avroClass); return (TypeInformation<Row>) convertToTypeInfo(schema); } /** * Converts an Avro schema string into a nested row structure with deterministic field order and data * types that are compatible with Flink's Table & SQL API. * * @param avroSchemaString Avro schema definition string * @return type information matching the schema */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convertToTypeInfo(String avroSchemaString) { Preconditions.checkNotNull(avroSchemaString, "Avro schema must not be null."); final Schema schema; try { schema = new Schema.Parser().parse(avroSchemaString); } catch (SchemaParseException e) { throw new IllegalArgumentException("Could not parse Avro schema string.", e); } return (TypeInformation<T>) convertToTypeInfo(schema); } private static TypeInformation<?> convertToTypeInfo(Schema schema) { switch (schema.getType()) { case RECORD: final List<Schema.Field> fields = schema.getFields(); final TypeInformation<?>[] types = new TypeInformation<?>[fields.size()]; final String[] names = new String[fields.size()]; for (int i = 0; i < fields.size(); i++) { final Schema.Field field = fields.get(i); types[i] = convertToTypeInfo(field.schema()); names[i] = field.name(); } return Types.ROW_NAMED(names, types); case ENUM: return Types.STRING; case ARRAY: // result type might either be ObjectArrayTypeInfo or BasicArrayTypeInfo for Strings return Types.OBJECT_ARRAY(convertToTypeInfo(schema.getElementType())); case MAP: return Types.MAP(Types.STRING, convertToTypeInfo(schema.getValueType())); case UNION: final Schema actualSchema; if (schema.getTypes().size() == 2 && schema.getTypes().get(0).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(1); } else if (schema.getTypes().size() == 2 && schema.getTypes().get(1).getType() == Schema.Type.NULL) { actualSchema = schema.getTypes().get(0); } else if (schema.getTypes().size() == 1) { actualSchema = schema.getTypes().get(0); } else { // use Kryo for serialization return Types.GENERIC(Object.class); } return convertToTypeInfo(actualSchema); case FIXED: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } // convert fixed size binary data to primitive byte arrays return Types.PRIMITIVE_ARRAY(Types.BYTE); case STRING: // convert Avro's Utf8/CharSequence to String return Types.STRING; case BYTES: // logical decimal type if (schema.getLogicalType() instanceof LogicalTypes.Decimal) { return Types.BIG_DEC; } return Types.PRIMITIVE_ARRAY(Types.BYTE); case INT: // logical date and time type final org.apache.avro.LogicalType logicalType = schema.getLogicalType(); if (logicalType == LogicalTypes.date()) { return Types.SQL_DATE; } else if (logicalType == LogicalTypes.timeMillis()) { return Types.SQL_TIME; } return Types.INT; case LONG: // logical timestamp type if (schema.getLogicalType() == LogicalTypes.timestampMillis()) { return Types.SQL_TIMESTAMP; } return Types.LONG; case FLOAT: return Types.FLOAT; case DOUBLE: return Types.DOUBLE; case BOOLEAN: return Types.BOOLEAN; case NULL: return Types.VOID; } throw new IllegalArgumentException("Unsupported Avro type '" + schema.getType() + "'."); } /** * Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema. * * @param logicalType logical type * @return Avro's {@link Schema} matching this logical type. */ public static Schema convertToSchema(LogicalType logicalType) { return convertToSchema(logicalType, 0); } public static Schema convertToSchema(LogicalType logicalType, int rowTypeCounter) { switch (logicalType.getTypeRoot()) { case NULL: return SchemaBuilder.builder().nullType(); case BOOLEAN: return getNullableBuilder(logicalType).booleanType(); case INTEGER: return getNullableBuilder(logicalType).intType(); case BIGINT: return getNullableBuilder(logicalType).longType(); case FLOAT: return getNullableBuilder(logicalType).floatType(); case DOUBLE: return getNullableBuilder(logicalType).doubleType(); case CHAR: case VARCHAR: return getNullableBuilder(logicalType).stringType(); case BINARY: case VARBINARY: return getNullableBuilder(logicalType).bytesType(); case TIMESTAMP_WITHOUT_TIME_ZONE: // use long to represents Timestamp final TimestampType timestampType = (TimestampType) logicalType; int precision = timestampType.getPrecision(); org.apache.avro.LogicalType avroLogicalType; if (precision <= 3) { avroLogicalType = LogicalTypes.timestampMillis(); } else { throw new IllegalArgumentException("Avro Timestamp does not support Timestamp with precision: " + precision + ", it only supports precision of 3 or 9."); } return avroLogicalType.addToSchema(SchemaBuilder.builder().longType()); case DATE: // use int to represents Date return LogicalTypes.date().addToSchema(SchemaBuilder.builder().intType()); case TIME_WITHOUT_TIME_ZONE: // use int to represents Time, we only support millisecond when deserialization return LogicalTypes.timeMillis().addToSchema(SchemaBuilder.builder().intType()); case DECIMAL: DecimalType decimalType = (DecimalType) logicalType; // store BigDecimal as byte[] return LogicalTypes .decimal(decimalType.getPrecision(), decimalType.getScale()) .addToSchema(SchemaBuilder.builder().bytesType()); case ROW: RowType rowType = (RowType) logicalType; List<String> fieldNames = rowType.getFieldNames(); // we have to make sure the record name is different in a Schema SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder .builder() .record("row_" + rowTypeCounter) .fields(); rowTypeCounter++; for (int i = 0; i < rowType.getFieldCount(); i++) { builder = builder .name(fieldNames.get(i)) .type(convertToSchema(rowType.getTypeAt(i), rowTypeCounter)) .noDefault(); } return builder.endRecord(); case MULTISET: case MAP: return SchemaBuilder .builder() .nullable() .map() .values(convertToSchema(extractValueTypeToAvroMap(logicalType), rowTypeCounter)); case ARRAY: ArrayType arrayType = (ArrayType) logicalType; return SchemaBuilder .builder() .nullable() .array() .items(convertToSchema(arrayType.getElementType(), rowTypeCounter)); case RAW: // if the union type has more than 2 types, it will be recognized a generic type // see AvroRowDeserializationSchema#convertAvroType and AvroRowSerializationSchema#convertFlinkType return SchemaBuilder.builder().unionOf() .nullType().and() .booleanType().and() .longType().and() .doubleType() .endUnion(); case TIMESTAMP_WITH_LOCAL_TIME_ZONE: default: throw new UnsupportedOperationException("Unsupported to derive Schema for type: " + logicalType); } } public static LogicalType extractValueTypeToAvroMap(LogicalType type) { LogicalType keyType; LogicalType valueType; if (type instanceof MapType) { MapType mapType = (MapType) type; keyType = mapType.getKeyType(); valueType = mapType.getValueType(); } else { MultisetType multisetType = (MultisetType) type; keyType = multisetType.getElementType(); valueType = new IntType(); } if (!LogicalTypeChecks.hasFamily(keyType, LogicalTypeFamily.CHARACTER_STRING)) { throw new UnsupportedOperationException( "Avro format doesn't support non-string as key type of map. " + "The key type is: " + keyType.asSummaryString()); } return valueType; } private static SchemaBuilder.BaseTypeBuilder<Schema> getNullableBuilder(LogicalType logicalType) { SchemaBuilder.TypeBuilder<Schema> builder = SchemaBuilder.builder(); if (logicalType.isNullable()) { return builder.nullable(); } return builder; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.jms.client; import javax.jms.BytesMessage; import javax.jms.CompletionListener; import javax.jms.DeliveryMode; import javax.jms.Destination; import javax.jms.IllegalStateException; import javax.jms.InvalidDestinationException; import javax.jms.JMSException; import javax.jms.MapMessage; import javax.jms.Message; import javax.jms.MessageProducer; import javax.jms.ObjectMessage; import javax.jms.Queue; import javax.jms.QueueSender; import javax.jms.StreamMessage; import javax.jms.TextMessage; import javax.jms.Topic; import javax.jms.TopicPublisher; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQInterruptedException; import org.apache.activemq.artemis.api.core.ActiveMQQueueExistsException; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.client.ClientMessage; import org.apache.activemq.artemis.api.core.client.ClientProducer; import org.apache.activemq.artemis.api.core.client.ClientSession; import org.apache.activemq.artemis.api.core.client.SendAcknowledgementHandler; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.utils.UUID; import org.apache.activemq.artemis.utils.UUIDGenerator; /** * ActiveMQ Artemis implementation of a JMS MessageProducer. */ public class ActiveMQMessageProducer implements MessageProducer, QueueSender, TopicPublisher { private final ConnectionFactoryOptions options; private final ActiveMQConnection connection; private final SimpleString connID; private final ClientProducer clientProducer; private final ClientSession clientSession; private boolean disableMessageID = false; private boolean disableMessageTimestamp = false; private int defaultPriority = Message.DEFAULT_PRIORITY; private long defaultTimeToLive = Message.DEFAULT_TIME_TO_LIVE; private int defaultDeliveryMode = Message.DEFAULT_DELIVERY_MODE; private long defaultDeliveryDelay = Message.DEFAULT_DELIVERY_DELAY; private final ActiveMQDestination defaultDestination; // Constructors -------------------------------------------------- protected ActiveMQMessageProducer(final ActiveMQConnection connection, final ClientProducer producer, final ActiveMQDestination defaultDestination, final ClientSession clientSession, final ConnectionFactoryOptions options) throws JMSException { this.options = options; this.connection = connection; connID = connection.getClientID() != null ? new SimpleString(connection.getClientID()) : connection.getUID(); this.clientProducer = producer; this.defaultDestination = defaultDestination; this.clientSession = clientSession; } // MessageProducer implementation -------------------------------- @Override public void setDisableMessageID(final boolean value) throws JMSException { checkClosed(); disableMessageID = value; } @Override public boolean getDisableMessageID() throws JMSException { checkClosed(); return disableMessageID; } @Override public void setDisableMessageTimestamp(final boolean value) throws JMSException { checkClosed(); disableMessageTimestamp = value; } @Override public boolean getDisableMessageTimestamp() throws JMSException { checkClosed(); return disableMessageTimestamp; } @Override public void setDeliveryMode(final int deliveryMode) throws JMSException { checkClosed(); if (deliveryMode != DeliveryMode.NON_PERSISTENT && deliveryMode != DeliveryMode.PERSISTENT) { throw ActiveMQJMSClientBundle.BUNDLE.illegalDeliveryMode(deliveryMode); } defaultDeliveryMode = deliveryMode; } @Override public int getDeliveryMode() throws JMSException { checkClosed(); return defaultDeliveryMode; } @Override public void setPriority(final int defaultPriority) throws JMSException { checkClosed(); if (defaultPriority < 0 || defaultPriority > 9) { throw new JMSException("Illegal priority value: " + defaultPriority); } this.defaultPriority = defaultPriority; } @Override public int getPriority() throws JMSException { checkClosed(); return defaultPriority; } @Override public void setTimeToLive(final long timeToLive) throws JMSException { checkClosed(); defaultTimeToLive = timeToLive; } @Override public long getTimeToLive() throws JMSException { checkClosed(); return defaultTimeToLive; } @Override public Destination getDestination() throws JMSException { checkClosed(); return defaultDestination; } @Override public void close() throws JMSException { connection.getThreadAwareContext().assertNotCompletionListenerThread(); try { clientProducer.close(); } catch (ActiveMQException e) { throw JMSExceptionHelper.convertFromActiveMQException(e); } } @Override public void send(final Message message) throws JMSException { checkDefaultDestination(); doSendx(defaultDestination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, null); } @Override public void send(final Message message, final int deliveryMode, final int priority, final long timeToLive) throws JMSException { checkDefaultDestination(); doSendx(defaultDestination, message, deliveryMode, priority, timeToLive, null); } @Override public void send(final Destination destination, final Message message) throws JMSException { send(destination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive); } @Override public void send(final Destination destination, final Message message, final int deliveryMode, final int priority, final long timeToLive) throws JMSException { checkClosed(); checkDestination(destination); doSendx((ActiveMQDestination) destination, message, deliveryMode, priority, timeToLive, null); } @Override public void setDeliveryDelay(long deliveryDelay) throws JMSException { this.defaultDeliveryDelay = deliveryDelay; } @Override public long getDeliveryDelay() throws JMSException { return defaultDeliveryDelay; } @Override public void send(Message message, CompletionListener completionListener) throws JMSException { send(message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, completionListener); } @Override public void send(Message message, int deliveryMode, int priority, long timeToLive, CompletionListener completionListener) throws JMSException { checkCompletionListener(completionListener); checkDefaultDestination(); doSendx(defaultDestination, message, deliveryMode, priority, timeToLive, completionListener); } @Override public void send(Destination destination, Message message, CompletionListener completionListener) throws JMSException { send(destination, message, defaultDeliveryMode, defaultPriority, defaultTimeToLive, completionListener); } @Override public void send(Destination destination, Message message, int deliveryMode, int priority, long timeToLive, CompletionListener completionListener) throws JMSException { checkClosed(); checkCompletionListener(completionListener); checkDestination(destination); doSendx((ActiveMQDestination) destination, message, deliveryMode, priority, timeToLive, completionListener); } // TopicPublisher Implementation --------------------------------- @Override public Topic getTopic() throws JMSException { return (Topic) getDestination(); } @Override public void publish(final Message message) throws JMSException { send(message); } @Override public void publish(final Topic topic, final Message message) throws JMSException { send(topic, message); } @Override public void publish(final Message message, final int deliveryMode, final int priority, final long timeToLive) throws JMSException { send(message, deliveryMode, priority, timeToLive); } @Override public void publish(final Topic topic, final Message message, final int deliveryMode, final int priority, final long timeToLive) throws JMSException { checkDestination(topic); doSendx((ActiveMQDestination) topic, message, deliveryMode, priority, timeToLive, null); } // QueueSender Implementation ------------------------------------ @Override public void send(final Queue queue, final Message message) throws JMSException { send((Destination) queue, message); } @Override public void send(final Queue queue, final Message message, final int deliveryMode, final int priority, final long timeToLive) throws JMSException { checkDestination(queue); doSendx((ActiveMQDestination) queue, message, deliveryMode, priority, timeToLive, null); } @Override public Queue getQueue() throws JMSException { return (Queue) getDestination(); } // Public -------------------------------------------------------- @Override public String toString() { return "ActiveMQMessageProducer->" + clientProducer; } /** * Check if the default destination has been set */ private void checkDefaultDestination() { if (defaultDestination == null) { throw new UnsupportedOperationException("Producer does not have a default destination"); } } /** * Check if the destination is sent correctly */ private void checkDestination(Destination destination) throws InvalidDestinationException { if (destination != null && !(destination instanceof ActiveMQDestination)) { throw new InvalidDestinationException("Foreign destination:" + destination); } if (destination != null && defaultDestination != null) { throw new UnsupportedOperationException("Cannot specify destination if producer has a default destination"); } if (destination == null) { throw ActiveMQJMSClientBundle.BUNDLE.nullTopic(); } } private void checkCompletionListener(CompletionListener completionListener) { if (completionListener == null) { throw ActiveMQJMSClientBundle.BUNDLE.nullArgumentNotAllowed("CompletionListener"); } } private void doSendx(ActiveMQDestination destination, final Message jmsMessage, final int deliveryMode, final int priority, final long timeToLive, CompletionListener completionListener) throws JMSException { jmsMessage.setJMSDeliveryMode(deliveryMode); jmsMessage.setJMSPriority(priority); if (timeToLive == 0) { jmsMessage.setJMSExpiration(0); } else { jmsMessage.setJMSExpiration(System.currentTimeMillis() + timeToLive); } if (!disableMessageTimestamp) { jmsMessage.setJMSTimestamp(System.currentTimeMillis()); } else { jmsMessage.setJMSTimestamp(0); } SimpleString address = null; if (destination == null) { if (defaultDestination == null) { throw new UnsupportedOperationException("Destination must be specified on send with an anonymous producer"); } destination = defaultDestination; } else { if (defaultDestination != null) { if (!destination.equals(defaultDestination)) { throw new UnsupportedOperationException("Where a default destination is specified " + "for the sender and a destination is " + "specified in the arguments to the send, " + "these destinations must be equal"); } } address = destination.getSimpleAddress(); if (!connection.containsKnownDestination(address)) { try { ClientSession.AddressQuery query = clientSession.addressQuery(address); if (!query.isExists()) { if (destination.isQueue() && query.isAutoCreateQueues()) { clientSession.createAddress(address, RoutingType.ANYCAST, true); if (destination.isTemporary()) { // TODO is it right to use the address for the queue name here? clientSession.createTemporaryQueue(address, RoutingType.ANYCAST, address); } else { clientSession.createQueue(address, RoutingType.ANYCAST, address, null, true, true, query.getDefaultMaxConsumers(), query.isDefaultPurgeOnNoConsumers()); } } else if (!destination.isQueue() && query.isAutoCreateAddresses()) { clientSession.createAddress(address, RoutingType.MULTICAST, true); } else if ((destination.isQueue() && !query.isAutoCreateQueues()) || (!destination.isQueue() && !query.isAutoCreateAddresses())) { throw new InvalidDestinationException("Destination " + address + " does not exist"); } } else { ClientSession.QueueQuery queueQuery = clientSession.queueQuery(address); if (queueQuery.isExists()) { connection.addKnownDestination(address); } else if (destination.isQueue() && query.isAutoCreateQueues()) { if (destination.isTemporary()) { clientSession.createTemporaryQueue(address, RoutingType.ANYCAST, address); } else { clientSession.createQueue(address, RoutingType.ANYCAST, address, null, true, true, query.getDefaultMaxConsumers(), query.isDefaultPurgeOnNoConsumers()); } } } } catch (ActiveMQQueueExistsException e) { // The queue was created by another client/admin between the query check and send create queue packet } catch (ActiveMQException e) { throw JMSExceptionHelper.convertFromActiveMQException(e); } } } ActiveMQMessage activeMQJmsMessage; boolean foreign = false; // First convert from foreign message if appropriate if (!(jmsMessage instanceof ActiveMQMessage)) { // JMS 1.1 Sect. 3.11.4: A provider must be prepared to accept, from a client, // a message whose implementation is not one of its own. if (jmsMessage instanceof BytesMessage) { activeMQJmsMessage = new ActiveMQBytesMessage((BytesMessage) jmsMessage, clientSession); } else if (jmsMessage instanceof MapMessage) { activeMQJmsMessage = new ActiveMQMapMessage((MapMessage) jmsMessage, clientSession); } else if (jmsMessage instanceof ObjectMessage) { activeMQJmsMessage = new ActiveMQObjectMessage((ObjectMessage) jmsMessage, clientSession, options); } else if (jmsMessage instanceof StreamMessage) { activeMQJmsMessage = new ActiveMQStreamMessage((StreamMessage) jmsMessage, clientSession); } else if (jmsMessage instanceof TextMessage) { activeMQJmsMessage = new ActiveMQTextMessage((TextMessage) jmsMessage, clientSession); } else { activeMQJmsMessage = new ActiveMQMessage(jmsMessage, clientSession); } // Set the destination on the original message jmsMessage.setJMSDestination(destination); foreign = true; } else { activeMQJmsMessage = (ActiveMQMessage) jmsMessage; } if (!disableMessageID) { // Generate a JMS id UUID uid = UUIDGenerator.getInstance().generateUUID(); activeMQJmsMessage.getCoreMessage().setUserID(uid); activeMQJmsMessage.resetMessageID(null); } if (foreign) { jmsMessage.setJMSMessageID(activeMQJmsMessage.getJMSMessageID()); } activeMQJmsMessage.setJMSDestination(destination); try { activeMQJmsMessage.doBeforeSend(); } catch (Exception e) { JMSException je = new JMSException(e.getMessage()); je.initCause(e); throw je; } if (defaultDeliveryDelay > 0) { activeMQJmsMessage.setJMSDeliveryTime(System.currentTimeMillis() + defaultDeliveryDelay); } ClientMessage coreMessage = activeMQJmsMessage.getCoreMessage(); coreMessage.putStringProperty(ActiveMQConnection.CONNECTION_ID_PROPERTY_NAME, connID); coreMessage.setRoutingType(destination.isQueue() ? RoutingType.ANYCAST : RoutingType.MULTICAST); try { /** * Using a completionListener requires wrapping using a {@link CompletionListenerWrapper}, * so we avoid it if we can. */ if (completionListener != null) { clientProducer.send(address, coreMessage, new CompletionListenerWrapper(completionListener, jmsMessage, this)); } else { clientProducer.send(address, coreMessage); } } catch (ActiveMQInterruptedException e) { JMSException jmsException = new JMSException(e.getMessage()); jmsException.initCause(e); throw jmsException; } catch (ActiveMQException e) { throw JMSExceptionHelper.convertFromActiveMQException(e); } catch (java.lang.IllegalStateException e) { JMSException je = new IllegalStateException(e.getMessage()); je.setStackTrace(e.getStackTrace()); je.initCause(e); throw je; } } private void checkClosed() throws JMSException { if (clientProducer.isClosed() || clientSession.isClosed()) { throw new IllegalStateException("Producer is closed"); } } private static final class CompletionListenerWrapper implements SendAcknowledgementHandler { private final CompletionListener completionListener; private final Message jmsMessage; private final ActiveMQMessageProducer producer; /** * @param jmsMessage * @param producer */ private CompletionListenerWrapper(CompletionListener listener, Message jmsMessage, ActiveMQMessageProducer producer) { this.completionListener = listener; this.jmsMessage = jmsMessage; this.producer = producer; } @Override public void sendAcknowledged(org.apache.activemq.artemis.api.core.Message clientMessage) { if (jmsMessage instanceof StreamMessage) { try { ((StreamMessage) jmsMessage).reset(); } catch (JMSException e) { // HORNETQ-1209 XXX ignore? } } if (jmsMessage instanceof BytesMessage) { try { ((BytesMessage) jmsMessage).reset(); } catch (JMSException e) { // HORNETQ-1209 XXX ignore? } } try { producer.connection.getThreadAwareContext().setCurrentThread(true); completionListener.onCompletion(jmsMessage); } finally { producer.connection.getThreadAwareContext().clearCurrentThread(true); } } @Override public String toString() { return CompletionListenerWrapper.class.getSimpleName() + "( completionListener=" + completionListener + ")"; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.percolator; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class TransportPercolateAction extends HandledTransportAction<PercolateRequest, PercolateResponse> { private final Client client; private final ParseFieldMatcher parseFieldMatcher; private final IndicesQueriesRegistry queryRegistry; private final AggregatorParsers aggParsers; @Inject public TransportPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Client client, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers) { super(settings, PercolateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PercolateRequest::new); this.client = client; this.aggParsers = aggParsers; this.parseFieldMatcher = new ParseFieldMatcher(settings); this.queryRegistry = indicesQueriesRegistry; } @Override protected void doExecute(PercolateRequest request, ActionListener<PercolateResponse> listener) { if (request.getRequest() != null) { client.get(request.getRequest(), new ActionListener<GetResponse>() { @Override public void onResponse(GetResponse getResponse) { if (getResponse.isExists()) { innerDoExecute(request, getResponse.getSourceAsBytesRef(), listener); } else { onFailure(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist", request.getRequest().index(), request.getRequest().type(), request.getRequest().id())); } } @Override public void onFailure(Throwable e) { listener.onFailure(e); } }); } else { innerDoExecute(request, null, listener); } } private void innerDoExecute(PercolateRequest request, BytesReference docSource, ActionListener<PercolateResponse> listener) { SearchRequest searchRequest; try { searchRequest = createSearchRequest(request, docSource, queryRegistry, aggParsers, parseFieldMatcher); } catch (IOException e) { listener.onFailure(e); return; } client.search(searchRequest, new ActionListener<SearchResponse>() { @Override public void onResponse(SearchResponse searchResponse) { try { listener.onResponse(createPercolateResponse(searchResponse, request.onlyCount())); } catch (Exception e) { onFailure(e); } } @Override public void onFailure(Throwable e) { listener.onFailure(e); } }); } public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource, IndicesQueriesRegistry queryRegistry, AggregatorParsers aggParsers, ParseFieldMatcher parseFieldMatcher) throws IOException { SearchRequest searchRequest = new SearchRequest(); if (percolateRequest.indices() != null) { searchRequest.indices(percolateRequest.indices()); } searchRequest.indicesOptions(percolateRequest.indicesOptions()); searchRequest.routing(percolateRequest.routing()); searchRequest.preference(percolateRequest.preference()); BytesReference querySource = null; XContentBuilder searchSource = XContentFactory.jsonBuilder().startObject(); if (percolateRequest.source() != null && percolateRequest.source().length() > 0) { try (XContentParser parser = XContentHelper.createParser(percolateRequest.source())) { String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Unknown token [" + token+ "]"); } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("doc".equals(currentFieldName)) { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.copyCurrentStructure(parser); builder.flush(); documentSource = builder.bytes(); } else if ("query".equals(currentFieldName) || "filter".equals(currentFieldName)) { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.copyCurrentStructure(parser); builder.flush(); querySource = builder.bytes(); } else if ("sort".equals(currentFieldName)) { searchSource.field("sort"); searchSource.copyCurrentStructure(parser); } else if ("aggregations".equals(currentFieldName)) { searchSource.field("aggregations"); searchSource.copyCurrentStructure(parser); } else if ("highlight".equals(currentFieldName)) { searchSource.field("highlight"); searchSource.copyCurrentStructure(parser); } else { throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("sort".equals(currentFieldName)) { searchSource.field("sort"); searchSource.copyCurrentStructure(parser); } else { throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); } } else if (token.isValue()) { if ("size".equals(currentFieldName)) { searchSource.field("size", parser.intValue()); } else if ("sort".equals(currentFieldName)) { searchSource.field("sort", parser.text()); } else if ("track_scores".equals(currentFieldName) || "trackScores".equals(currentFieldName)) { searchSource.field("track_scores", parser.booleanValue()); } else { throw new IllegalArgumentException("Unknown field [" + currentFieldName+ "]"); } } else { throw new IllegalArgumentException("Unknown token [" + token + "]"); } } } } if (percolateRequest.onlyCount()) { searchSource.field("size", 0); } PercolateQueryBuilder percolateQueryBuilder = new PercolateQueryBuilder("query", percolateRequest.documentType(), documentSource); if (querySource != null) { try (XContentParser parser = XContentHelper.createParser(querySource)) { QueryParseContext queryParseContext = new QueryParseContext(queryRegistry, parser, parseFieldMatcher); QueryBuilder queryBuilder = queryParseContext.parseInnerQueryBuilder(); BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); boolQueryBuilder.must(queryBuilder); boolQueryBuilder.filter(percolateQueryBuilder); searchSource.field("query", boolQueryBuilder); } } else { // wrapping in a constant score query with boost 0 for bwc reason. // percolator api didn't emit scores before and never included scores // for how well percolator queries matched with the document being percolated searchSource.field("query", new ConstantScoreQueryBuilder(percolateQueryBuilder).boost(0f)); } searchSource.endObject(); searchSource.flush(); BytesReference source = searchSource.bytes(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) { QueryParseContext context = new QueryParseContext(queryRegistry, parser, parseFieldMatcher); searchSourceBuilder.parseXContent(context, aggParsers, null); searchRequest.source(searchSourceBuilder); return searchRequest; } } public static PercolateResponse createPercolateResponse(SearchResponse searchResponse, boolean onlyCount) { SearchHits hits = searchResponse.getHits(); PercolateResponse.Match[] matches; if (onlyCount) { matches = null; } else { matches = new PercolateResponse.Match[hits.getHits().length]; for (int i = 0; i < hits.getHits().length; i++) { SearchHit hit = hits.getHits()[i]; matches[i] = new PercolateResponse.Match(new Text(hit.getIndex()), new Text(hit.getId()), hit.getScore(), hit.getHighlightFields()); } } List<ShardOperationFailedException> shardFailures = new ArrayList<>(searchResponse.getShardFailures().length); for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { shardFailures.add(new DefaultShardOperationFailedException(shardSearchFailure.index(), shardSearchFailure.shardId(), shardSearchFailure.getCause())); } return new PercolateResponse( searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), shardFailures, matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations() ); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.ccr; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.RemoteClusterLicenseChecker; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.support.Exceptions; import java.util.Arrays; import java.util.Collections; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.BooleanSupplier; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; /** * Encapsulates licensing checking for CCR. */ public final class CcrLicenseChecker { private final BooleanSupplier isCcrAllowed; private final BooleanSupplier isAuthAllowed; /** * Constructs a CCR license checker with the default rule based on the license state for checking if CCR is allowed. */ CcrLicenseChecker() { this(XPackPlugin.getSharedLicenseState()::isCcrAllowed, XPackPlugin.getSharedLicenseState()::isAuthAllowed); } /** * Constructs a CCR license checker with the specified boolean suppliers. * * @param isCcrAllowed a boolean supplier that should return true if CCR is allowed and false otherwise * @param isAuthAllowed a boolean supplier that should return true if security, authentication, and authorization is allowed */ public CcrLicenseChecker(final BooleanSupplier isCcrAllowed, final BooleanSupplier isAuthAllowed) { this.isCcrAllowed = Objects.requireNonNull(isCcrAllowed, "isCcrAllowed"); this.isAuthAllowed = Objects.requireNonNull(isAuthAllowed, "isAuthAllowed"); } /** * Returns whether or not CCR is allowed. * * @return true if CCR is allowed, otherwise false */ public boolean isCcrAllowed() { return isCcrAllowed.getAsBoolean(); } /** * Fetches the leader index metadata and history UUIDs for leader index shards from the remote cluster. * Before fetching the index metadata, the remote cluster is checked for license compatibility with CCR. * If the remote cluster is not licensed for CCR, the {@code onFailure} consumer is is invoked. Otherwise, * the specified consumer is invoked with the leader index metadata fetched from the remote cluster. * * @param client the client * @param clusterAlias the remote cluster alias * @param leaderIndex the name of the leader index * @param onFailure the failure consumer * @param consumer the consumer for supplying the leader index metadata and historyUUIDs of all leader shards * @param <T> the type of response the listener is waiting for */ public <T> void checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs( final Client client, final String clusterAlias, final String leaderIndex, final Consumer<Exception> onFailure, final BiConsumer<String[], IndexMetaData> consumer) { final ClusterStateRequest request = new ClusterStateRequest(); request.clear(); request.metaData(true); request.indices(leaderIndex); checkRemoteClusterLicenseAndFetchClusterState( client, Collections.emptyMap(), clusterAlias, request, onFailure, leaderClusterState -> { IndexMetaData leaderIndexMetaData = leaderClusterState.getMetaData().index(leaderIndex); if (leaderIndexMetaData == null) { onFailure.accept(new IndexNotFoundException(leaderIndex)); return; } final Client leaderClient = client.getRemoteClusterClient(clusterAlias); hasPrivilegesToFollowIndices(leaderClient, new String[] {leaderIndex}, e -> { if (e == null) { fetchLeaderHistoryUUIDs(leaderClient, leaderIndexMetaData, onFailure, historyUUIDs -> consumer.accept(historyUUIDs, leaderIndexMetaData)); } else { onFailure.accept(e); } }); }, licenseCheck -> indexMetadataNonCompliantRemoteLicense(leaderIndex, licenseCheck), e -> indexMetadataUnknownRemoteLicense(leaderIndex, clusterAlias, e)); } /** * Fetches the leader cluster state from the remote cluster by the specified cluster state request. Before fetching the cluster state, * the remote cluster is checked for license compliance with CCR. If the remote cluster is not licensed for CCR, * the {@code onFailure} consumer is invoked. Otherwise, the specified consumer is invoked with the leader cluster state fetched from * the remote cluster. * * @param client the client * @param clusterAlias the remote cluster alias * @param headers the headers to use for leader client * @param request the cluster state request * @param onFailure the failure consumer * @param leaderClusterStateConsumer the leader cluster state consumer */ public void checkRemoteClusterLicenseAndFetchClusterState( final Client client, final Map<String, String> headers, final String clusterAlias, final ClusterStateRequest request, final Consumer<Exception> onFailure, final Consumer<ClusterState> leaderClusterStateConsumer) { checkRemoteClusterLicenseAndFetchClusterState( client, headers, clusterAlias, request, onFailure, leaderClusterStateConsumer, CcrLicenseChecker::clusterStateNonCompliantRemoteLicense, e -> clusterStateUnknownRemoteLicense(clusterAlias, e)); } /** * Fetches the leader cluster state from the remote cluster by the specified cluster state request. Before fetching the cluster state, * the remote cluster is checked for license compliance with CCR. If the remote cluster is not licensed for CCR, * the {@code onFailure} consumer is invoked. Otherwise, the specified consumer is invoked with the leader cluster state fetched from * the remote cluster. * * @param client the client * @param clusterAlias the remote cluster alias * @param headers the headers to use for leader client * @param request the cluster state request * @param onFailure the failure consumer * @param leaderClusterStateConsumer the leader cluster state consumer * @param nonCompliantLicense the supplier for when the license state of the remote cluster is non-compliant * @param unknownLicense the supplier for when the license state of the remote cluster is unknown due to failure * @param <T> the type of response the listener is waiting for */ private <T> void checkRemoteClusterLicenseAndFetchClusterState( final Client client, final Map<String, String> headers, final String clusterAlias, final ClusterStateRequest request, final Consumer<Exception> onFailure, final Consumer<ClusterState> leaderClusterStateConsumer, final Function<RemoteClusterLicenseChecker.LicenseCheck, ElasticsearchStatusException> nonCompliantLicense, final Function<Exception, ElasticsearchStatusException> unknownLicense) { // we have to check the license on the remote cluster new RemoteClusterLicenseChecker(client, XPackLicenseState::isCcrAllowedForOperationMode).checkRemoteClusterLicenses( Collections.singletonList(clusterAlias), new ActionListener<RemoteClusterLicenseChecker.LicenseCheck>() { @Override public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { if (licenseCheck.isSuccess()) { final Client leaderClient = wrapClient(client.getRemoteClusterClient(clusterAlias), headers); final ActionListener<ClusterStateResponse> clusterStateListener = ActionListener.wrap(s -> leaderClusterStateConsumer.accept(s.getState()), onFailure); // following an index in remote cluster, so use remote client to fetch leader index metadata leaderClient.admin().cluster().state(request, clusterStateListener); } else { onFailure.accept(nonCompliantLicense.apply(licenseCheck)); } } @Override public void onFailure(final Exception e) { onFailure.accept(unknownLicense.apply(e)); } }); } /** * Fetches the history UUIDs for leader index on per shard basis using the specified leaderClient. * * @param leaderClient the leader client * @param leaderIndexMetaData the leader index metadata * @param onFailure the failure consumer * @param historyUUIDConsumer the leader index history uuid and consumer */ // NOTE: Placed this method here; in order to avoid duplication of logic for fetching history UUIDs // in case of following a local or a remote cluster. public void fetchLeaderHistoryUUIDs( final Client leaderClient, final IndexMetaData leaderIndexMetaData, final Consumer<Exception> onFailure, final Consumer<String[]> historyUUIDConsumer) { String leaderIndex = leaderIndexMetaData.getIndex().getName(); CheckedConsumer<IndicesStatsResponse, Exception> indicesStatsHandler = indicesStatsResponse -> { IndexStats indexStats = indicesStatsResponse.getIndices().get(leaderIndex); String[] historyUUIDs = new String[leaderIndexMetaData.getNumberOfShards()]; for (IndexShardStats indexShardStats : indexStats) { for (ShardStats shardStats : indexShardStats) { // Ignore replica shards as they may not have yet started and // we just end up overwriting slots in historyUUIDs if (shardStats.getShardRouting().primary() == false) { continue; } CommitStats commitStats = shardStats.getCommitStats(); if (commitStats == null) { onFailure.accept(new IllegalArgumentException("leader index's commit stats are missing")); return; } String historyUUID = commitStats.getUserData().get(Engine.HISTORY_UUID_KEY); ShardId shardId = shardStats.getShardRouting().shardId(); historyUUIDs[shardId.id()] = historyUUID; } } for (int i = 0; i < historyUUIDs.length; i++) { if (historyUUIDs[i] == null) { onFailure.accept(new IllegalArgumentException("no history uuid for [" + leaderIndex + "][" + i + "]")); return; } } historyUUIDConsumer.accept(historyUUIDs); }; IndicesStatsRequest request = new IndicesStatsRequest(); request.clear(); request.indices(leaderIndex); leaderClient.admin().indices().stats(request, ActionListener.wrap(indicesStatsHandler, onFailure)); } /** * Check if the user executing the current action has privileges to follow the specified indices on the cluster specified by the leader * client. The specified callback will be invoked with null if the user has the necessary privileges to follow the specified indices, * otherwise the callback will be invoked with an exception outlining the authorization error. * * @param leaderClient the leader client * @param indices the indices * @param handler the callback */ public void hasPrivilegesToFollowIndices(final Client leaderClient, final String[] indices, final Consumer<Exception> handler) { Objects.requireNonNull(leaderClient, "leaderClient"); Objects.requireNonNull(indices, "indices"); if (indices.length == 0) { throw new IllegalArgumentException("indices must not be empty"); } Objects.requireNonNull(handler, "handler"); if (isAuthAllowed.getAsBoolean() == false) { handler.accept(null); return; } ThreadContext threadContext = leaderClient.threadPool().getThreadContext(); SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext); String username = securityContext.getUser().principal(); RoleDescriptor.IndicesPrivileges privileges = RoleDescriptor.IndicesPrivileges.builder() .indices(indices) .privileges(IndicesStatsAction.NAME, ShardChangesAction.NAME) .build(); HasPrivilegesRequest request = new HasPrivilegesRequest(); request.username(username); request.clusterPrivileges(Strings.EMPTY_ARRAY); request.indexPrivileges(privileges); request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); CheckedConsumer<HasPrivilegesResponse, Exception> responseHandler = response -> { if (response.isCompleteMatch()) { handler.accept(null); } else { StringBuilder message = new StringBuilder("insufficient privileges to follow"); message.append(indices.length == 1 ? " index " : " indices "); message.append(Arrays.toString(indices)); HasPrivilegesResponse.ResourcePrivileges resourcePrivileges = response.getIndexPrivileges().get(0); for (Map.Entry<String, Boolean> entry : resourcePrivileges.getPrivileges().entrySet()) { if (entry.getValue() == false) { message.append(", privilege for action ["); message.append(entry.getKey()); message.append("] is missing"); } } handler.accept(Exceptions.authorizationError(message.toString())); } }; leaderClient.execute(HasPrivilegesAction.INSTANCE, request, ActionListener.wrap(responseHandler, handler)); } public static Client wrapClient(Client client, Map<String, String> headers) { if (headers.isEmpty()) { return client; } else { final ThreadContext threadContext = client.threadPool().getThreadContext(); Map<String, String> filteredHeaders = headers.entrySet().stream() .filter(e -> ShardFollowTask.HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); return new FilterClient(client) { @Override protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(Action<Response> action, Request request, ActionListener<Response> listener) { final Supplier<ThreadContext.StoredContext> supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithHeaders(threadContext, filteredHeaders)) { super.doExecute(action, request, new ContextPreservingActionListener<>(supplier, listener)); } } }; } } private static ThreadContext.StoredContext stashWithHeaders(ThreadContext threadContext, Map<String, String> headers) { final ThreadContext.StoredContext storedContext = threadContext.stashContext(); threadContext.copyHeaders(headers.entrySet()); return storedContext; } private static ElasticsearchStatusException indexMetadataNonCompliantRemoteLicense( final String leaderIndex, final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias(); final String message = String.format( Locale.ROOT, "can not fetch remote index [%s:%s] metadata as the remote cluster [%s] is not licensed for [ccr]; %s", clusterAlias, leaderIndex, clusterAlias, RemoteClusterLicenseChecker.buildErrorMessage( "ccr", licenseCheck.remoteClusterLicenseInfo(), RemoteClusterLicenseChecker::isLicensePlatinumOrTrial)); return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); } private static ElasticsearchStatusException clusterStateNonCompliantRemoteLicense( final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias(); final String message = String.format( Locale.ROOT, "can not fetch remote cluster state as the remote cluster [%s] is not licensed for [ccr]; %s", clusterAlias, RemoteClusterLicenseChecker.buildErrorMessage( "ccr", licenseCheck.remoteClusterLicenseInfo(), RemoteClusterLicenseChecker::isLicensePlatinumOrTrial)); return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); } private static ElasticsearchStatusException indexMetadataUnknownRemoteLicense( final String leaderIndex, final String clusterAlias, final Exception cause) { final String message = String.format( Locale.ROOT, "can not fetch remote index [%s:%s] metadata as the license state of the remote cluster [%s] could not be determined", clusterAlias, leaderIndex, clusterAlias); return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST, cause); } private static ElasticsearchStatusException clusterStateUnknownRemoteLicense(final String clusterAlias, final Exception cause) { final String message = String.format( Locale.ROOT, "can not fetch remote cluster state as the license state of the remote cluster [%s] could not be determined", clusterAlias); return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST, cause); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package vista; import Controlador.detalleVenta; import Controlador.producto; import java.awt.print.PrinterException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import javax.swing.JTable; import javax.swing.table.DefaultTableModel; /** * * @author claudio */ public class FrmDetalleVentaBus extends javax.swing.JFrame { producto P= new producto(); private Connection con = null; private ResultSet rs = null; private Statement stmt= null; private ResultSetMetaData rsm = null; /** * Creates new form FrmDetalleVentaBus */ public FrmDetalleVentaBus() { initComponents(); setLocationRelativeTo(null); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jScrollPane1 = new javax.swing.JScrollPane(); jt1 = new javax.swing.JTable(); jLabel2 = new javax.swing.JLabel(); txtCodigo = new javax.swing.JTextField(); jLabel3 = new javax.swing.JLabel(); jButton5 = new javax.swing.JButton(); btnTodos = new javax.swing.JButton(); jLabel1 = new javax.swing.JLabel(); btnPrecioBus = new javax.swing.JButton(); jButton1 = new javax.swing.JButton(); jButton4 = new javax.swing.JButton(); btnMarca = new javax.swing.JButton(); btnCantidad = new javax.swing.JButton(); BtnNumeroVenta = new javax.swing.JButton(); jLabel5 = new javax.swing.JLabel(); jDateChooser2 = new com.toedter.calendar.JDateChooser(); jLabel4 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jDateChooser1 = new com.toedter.calendar.JDateChooser(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); jt1.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N jt1.setForeground(java.awt.Color.blue); jt1.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {}, {}, {}, {} }, new String [] { } )); jScrollPane1.setViewportView(jt1); jLabel2.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N jLabel2.setText("Ingrese parametros de Busqueda"); txtCodigo.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N txtCodigo.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { txtCodigoActionPerformed(evt); } }); jLabel3.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N jLabel3.setText("Detalles de Consulta Actual"); jButton5.setBackground(java.awt.Color.green); jButton5.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N jButton5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/impresora32.png"))); // NOI18N jButton5.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton5ActionPerformed(evt); } }); btnTodos.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N btnTodos.setText("TODOS"); btnTodos.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnTodosActionPerformed(evt); } }); jLabel1.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N jLabel1.setForeground(java.awt.Color.red); jLabel1.setText("BUSQUEDA DETALLE DE VENTA"); btnPrecioBus.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N btnPrecioBus.setText("PRECIO"); btnPrecioBus.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnPrecioBusActionPerformed(evt); } }); jButton1.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N jButton1.setText("FECHA"); jButton1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton1ActionPerformed(evt); } }); jButton4.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jButton4.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/atras3.png"))); // NOI18N jButton4.setText("Volver"); jButton4.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { jButton4MouseClicked(evt); } }); jButton4.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton4ActionPerformed(evt); } }); btnMarca.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N btnMarca.setText("LIMPIAR"); btnMarca.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnMarcaActionPerformed(evt); } }); btnCantidad.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N btnCantidad.setText("VTA-CANTIDAD"); btnCantidad.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btnCantidadActionPerformed(evt); } }); BtnNumeroVenta.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N BtnNumeroVenta.setText("NRO-VENTA"); BtnNumeroVenta.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { BtnNumeroVentaActionPerformed(evt); } }); jLabel5.setIcon(new javax.swing.ImageIcon(getClass().getResource("/modelo/producto48.png"))); // NOI18N jLabel4.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N jLabel4.setText("Desde "); jLabel6.setFont(new java.awt.Font("Tahoma", 1, 10)); // NOI18N jLabel6.setText("Hasta"); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(txtCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, 166, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jLabel6, javax.swing.GroupLayout.PREFERRED_SIZE, 37, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel4)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jDateChooser2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jDateChooser1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(btnCantidad, javax.swing.GroupLayout.PREFERRED_SIZE, 123, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(BtnNumeroVenta, javax.swing.GroupLayout.PREFERRED_SIZE, 123, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addComponent(jLabel2) .addGap(18, 18, 18) .addComponent(btnTodos, javax.swing.GroupLayout.PREFERRED_SIZE, 123, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(btnMarca, javax.swing.GroupLayout.PREFERRED_SIZE, 139, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(btnPrecioBus, javax.swing.GroupLayout.PREFERRED_SIZE, 139, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 139, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 115, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addGroup(layout.createSequentialGroup() .addComponent(jLabel3) .addGap(250, 250, 250) .addComponent(jButton5)) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createSequentialGroup() .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 370, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel5))) .addContainerGap(23, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel5)) .addGap(18, 18, 18) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(btnTodos, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(btnMarca, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(btnCantidad, javax.swing.GroupLayout.PREFERRED_SIZE, 32, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(btnPrecioBus, javax.swing.GroupLayout.PREFERRED_SIZE, 32, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(txtCodigo, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(24, 24, 24) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jDateChooser1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel4))) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(BtnNumeroVenta, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel6) .addComponent(jDateChooser2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 12, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(25, 25, 25) .addComponent(jButton5, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 24, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 325, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 35, javax.swing.GroupLayout.PREFERRED_SIZE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void txtCodigoActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtCodigoActionPerformed // TODO add your handling code here: }//GEN-LAST:event_txtCodigoActionPerformed private void jButton5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton5ActionPerformed // TODO add your handling code here: try { jt1.print(JTable.PrintMode.NORMAL); } catch (PrinterException ex) { Logger.getLogger(frmTablaEjemplo.class.getName()).log(Level.SEVERE, null, ex); } }//GEN-LAST:event_jButton5ActionPerformed private void btnTodosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnTodosActionPerformed try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); String conexion = "jdbc:odbc:venta"; try { con= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} String consulta ="select VENTAID as CORRELATIVO,FECHADETALLE,CANTIDAD,PRECIO from detalleventa"; //String consulta = "select CODPROVEEDOR as CODIGO,CODPRODUCTO,RUCPROVEEDOR as RUT_EMP,NOMBRE from Proveedor order by CODPROVEEDOR "; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); } modelo.addRow(fila); } rs.close(); }catch (Exception ex){ex.printStackTrace();} }//GEN-LAST:event_btnTodosActionPerformed private void btnPrecioBusActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPrecioBusActionPerformed try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); String conexion = "jdbc:odbc:venta"; try { con= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} try{ int cod=Integer.parseInt(this.txtCodigo.getText()); // String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA"; String consulta = "select VENTAID as CORRELATIVO,FECHADETALLE,CANTIDAD,PRECIO from detalleventa where precio="+cod; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); } modelo.addRow(fila); } }catch (Exception ex){JOptionPane.showMessageDialog(this, "SE esperaba Numero");} }catch (Exception ex){ex.printStackTrace();} this.txtCodigo.setText(""); }//GEN-LAST:event_btnPrecioBusActionPerformed private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed // TODO add your handling code here: try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); String conexion = "jdbc:odbc:venta"; try { con= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} String cod=this.txtCodigo.getText(); // String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA"; String consulta = "select VENTAID as CORRELATIVO,FECHADETALLE,CANTIDAD,PRECIO from detalleventa where FECHADETALLE="+cod; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); } modelo.addRow(fila); } }catch (Exception ex){ex.printStackTrace();} this.txtCodigo.setText(""); }//GEN-LAST:event_jButton1ActionPerformed private void jButton4MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jButton4MouseClicked // TODO add your handling code here: framePrincipal formInicial = new framePrincipal(); formInicial.setVisible(true); this.dispose(); }//GEN-LAST:event_jButton4MouseClicked private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jButton4ActionPerformed private void btnMarcaActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnMarcaActionPerformed DefaultTableModel modelo = (DefaultTableModel) jt1.getModel(); modelo.setRowCount(0); jt1.updateUI(); this.txtCodigo.setText(""); } public void mostrartable(String nombre){ try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); P.conectar(); List tb = new ArrayList(); List nom = new ArrayList(); // String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA"; String consulta = "select ID,NOMBRE,APELLIDO,USUARIO from usuario where nombre='"+nombre+"'"; // "SELECT id_t FROM empleados WHERE nombre = '"+nombre+"'"; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); tb.add(jt1); } modelo.addRow(fila); } System.out.println(jt1); rs.close(); }catch (Exception ex){ex.printStackTrace();} }//GEN-LAST:event_btnMarcaActionPerformed private void btnCantidadActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCantidadActionPerformed try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); String conexion = "jdbc:odbc:venta"; try { con= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} try{ int cod=Integer.parseInt(this.txtCodigo.getText()); // String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA"; String consulta = "select VENTAID as CORRELATIVO,FECHADETALLE,CANTIDAD,PRECIO from detalleventa where CANTIDAD="+cod; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); } modelo.addRow(fila); } }catch (Exception ex){JOptionPane.showMessageDialog(this, "SE esperaba Numero");} }catch (Exception ex){ex.printStackTrace();} this.txtCodigo.setText(""); }//GEN-LAST:event_btnCantidadActionPerformed private void BtnNumeroVentaActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnNumeroVentaActionPerformed try{ DefaultTableModel modelo = new DefaultTableModel(); //this.jt1.setModel(vista); this.jt1.setModel(modelo); String conexion = "jdbc:odbc:venta"; try { con= DriverManager.getConnection(conexion, "root","1234"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"no conecto");} try{ int cod=Integer.parseInt(this.txtCodigo.getText()); // String consulta = "select id,nombre,apellido,usuario,A,PRECIOUNITARIO,CANTIDAD from AUTOMOVIL ORDER BY MARCA"; String consulta = "select VENTAID as CORRELATIVO,FECHADETALLE,CANTIDAD,PRECIO from detalleventa where ventaid="+cod; System.out.println(consulta); try{stmt=con.createStatement(); rs=stmt.executeQuery(consulta); }catch (Exception e){System.out.println ("error en la consulta");} ResultSetMetaData rsm = rs.getMetaData(); int cantidadColumnas = rsm.getColumnCount(); for(int i =1;i<=cantidadColumnas;i++) { modelo.addColumn(rsm.getColumnLabel(i));} while(rs.next()){ Object[] fila = new Object[cantidadColumnas]; for(int i = 0;i<cantidadColumnas;i++){ fila[i]=rs.getObject(i+1); } modelo.addRow(fila); } }catch (Exception ex){JOptionPane.showMessageDialog(this, "SE esperaba Numero");} }catch (Exception ex){ex.printStackTrace();} this.txtCodigo.setText(""); }//GEN-LAST:event_BtnNumeroVentaActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(FrmDetalleVentaBus.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(FrmDetalleVentaBus.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(FrmDetalleVentaBus.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(FrmDetalleVentaBus.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new FrmDetalleVentaBus().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton BtnNumeroVenta; private javax.swing.JButton btnCantidad; private javax.swing.JButton btnMarca; private javax.swing.JButton btnPrecioBus; private javax.swing.JButton btnTodos; private javax.swing.JButton jButton1; private javax.swing.JButton jButton4; private javax.swing.JButton jButton5; private com.toedter.calendar.JDateChooser jDateChooser1; private com.toedter.calendar.JDateChooser jDateChooser2; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JTable jt1; private javax.swing.JTextField txtCodigo; // End of variables declaration//GEN-END:variables }
/* * Copyright 2008, Unitils.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.unitils.core; import org.apache.commons.logging.Log; import org.junit.After; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; import org.unitils.UnitilsJUnit4; import static org.unitils.core.ConfigurationLoader.*; import org.unitils.core.util.PropertiesReader; import org.unitils.inject.annotation.InjectIntoByType; import org.unitils.inject.annotation.InjectIntoStaticByType; import org.unitils.inject.annotation.TestedObject; import org.unitils.mock.Mock; import java.util.Properties; /** * Test for {@link ConfigurationLoader}. * * @author Fabian Krueger * @author Tim Ducheyne * @author Filip Neven */ public class ConfigurationLoaderTest extends UnitilsJUnit4 { /* System under Test */ @TestedObject private ConfigurationLoader configurationLoader; /* PropertiesReader used by sut */ @InjectIntoByType private Mock<PropertiesReader> propertiesReader; /* Logger used by sut */ @InjectIntoStaticByType(target = ConfigurationLoader.class) private Mock<Log> usedLogger; /* Faked default Properties (unitils.properties) */ private Properties unitilsDefaultProperties; /* Faked custom Properties */ private Properties customProperties; /* Faked local Properties from user.home */ private Properties localProperties; /* The default custom property filename */ private final String CUSTOM_PROPERTIES_FILE_NAME = "unitils.properties"; /* The default local property filename */ private final String LOCAL_PROPERTIES_FILE_NAME = "unitils-local.properties"; @Before public void setUp() { configurationLoader = new ConfigurationLoader(); localProperties = new Properties(); localProperties.put("local", "value"); customProperties = new Properties(); customProperties.put("custom", "value"); unitilsDefaultProperties = new Properties(); unitilsDefaultProperties.put("default", "value"); unitilsDefaultProperties.put(PROPKEY_CUSTOM_CONFIGURATION, CUSTOM_PROPERTIES_FILE_NAME); unitilsDefaultProperties.put(PROPKEY_LOCAL_CONFIGURATION, LOCAL_PROPERTIES_FILE_NAME); } @After public void cleanup() { System.clearProperty(PROPKEY_CUSTOM_CONFIGURATION); System.clearProperty(PROPKEY_LOCAL_CONFIGURATION); } /** * Test scenario: * <ul> * <li>unitils.properties file not found</li> * <li>Exception thrown</li> * </ul> */ @Test public void noDefaultConfigurationFound() { String expectedMessage = "Configuration file: " + DEFAULT_PROPERTIES_FILE_NAME + " not found in classpath."; try { propertiesReader.returns(null).loadPropertiesFileFromClasspath(null); configurationLoader.loadConfiguration(); fail("Exception expected."); } catch (UnitilsException ue) { assertEquals(expectedMessage, ue.getMessage()); } } /** * Test scenario: * <ul> * <li>unitils.properties file found in classpath</li> * <li>custom configuration file not found</li> * <li>local configuration file not found</li> * </ul> */ @Test public void onlyDefaultConfigurationFound() { propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); propertiesReader.returns(null).loadPropertiesFileFromClasspath(CUSTOM_PROPERTIES_FILE_NAME); propertiesReader.returns(null).loadPropertiesFileFromUserHome(LOCAL_PROPERTIES_FILE_NAME); propertiesReader.returns(null).loadPropertiesFileFromClasspath(CUSTOM_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); assertDefaultPropertiesLoaded(returnedProperties); assertNoCustomConfigurationFound(CUSTOM_PROPERTIES_FILE_NAME); assertNoLocalConfigurationFound(LOCAL_PROPERTIES_FILE_NAME); } /** * Test scenario: * <ul> * <li>unitils.properties file found in classpath</li> * <li>custom configuration file found</li> * <li>local configuration file not found</li> * <li>returns properties from unitils.properties overwritten with properties from custom configuration</li> * </ul> */ @Test public void defaultAndCustomConfigurationFound() { propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); propertiesReader.returns(customProperties).loadPropertiesFileFromClasspath(CUSTOM_PROPERTIES_FILE_NAME); propertiesReader.returns(null).loadPropertiesFileFromUserHome(LOCAL_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); assertDefaultPropertiesLoaded(returnedProperties); assertCustomPropertiesLoaded(returnedProperties); assertNoLocalConfigurationFound(LOCAL_PROPERTIES_FILE_NAME); } /** * Test scenario: * <ul> * <li>no filename given</li> * <li>unitils.properties file found in classpath</li> * <li>custom configuration file found</li> * <li>local configuration file found in user home directory</li> * <li>returns properties from unitils.properties first overwritten with custom properties then with user properties</li> * </ul> */ @Test public void allConfigurationsFoundWithUserConfigurationFromHomeDir() { propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); propertiesReader.returns(customProperties).loadPropertiesFileFromClasspath(CUSTOM_PROPERTIES_FILE_NAME); propertiesReader.returns(localProperties).loadPropertiesFileFromUserHome(LOCAL_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); assertDefaultPropertiesLoaded(returnedProperties); assertCustomPropertiesLoaded(returnedProperties); assertLocalPropertiesLoaded(returnedProperties); } /** * Test scenario: * <ul> * <li>no filename given</li> * <li>unitils.properties file found in classpath</li> * <li>custom configuration file found</li> * <li>local configuration file not found in user home directory</li> * <li>local configuration file found in classpath</li> * <li>returns properties from unitils.properties first overwritten with custom properties then with user properties</li> * </ul> */ @Test public void allConfigurationsFoundWithUserConfigurationFromClasspath() { propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); propertiesReader.returns(customProperties).loadPropertiesFileFromClasspath(CUSTOM_PROPERTIES_FILE_NAME); propertiesReader.returns(localProperties).loadPropertiesFileFromClasspath(LOCAL_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); assertDefaultPropertiesLoaded(returnedProperties); assertCustomPropertiesLoaded(returnedProperties); assertLocalPropertiesLoaded(returnedProperties); } @Test public void customConfigurationFileNameOverriddenBySystemProperty() { System.setProperty(PROPKEY_CUSTOM_CONFIGURATION, "custom-filename.properties"); propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); propertiesReader.assertInvoked().loadPropertiesFileFromClasspath("custom-filename.properties"); } @Test public void localConfigurationFileNameOverriddenBySystemProperty() { System.setProperty(PROPKEY_LOCAL_CONFIGURATION, "custom-local-filename.properties"); propertiesReader.returns(unitilsDefaultProperties).loadPropertiesFileFromClasspath(DEFAULT_PROPERTIES_FILE_NAME); Properties returnedProperties = configurationLoader.loadConfiguration(); propertiesReader.assertInvoked().loadPropertiesFileFromClasspath("custom-local-filename.properties"); } private void assertNoCustomConfigurationFound(String fileName) { usedLogger.assertInvoked().warn("No custom configuration file " + fileName + " found."); } private void assertNoLocalConfigurationFound(String fileName) { usedLogger.assertInvoked().info("No local configuration file " + fileName + " found."); } private void assertDefaultPropertiesLoaded(Properties properties) { assertTrue("Expected default properties to be loaded.", properties.containsKey("default")); } private void assertCustomPropertiesLoaded(Properties properties) { assertTrue("Expected custom properties to be loaded.", properties.containsKey("custom")); } private void assertLocalPropertiesLoaded(Properties properties) { assertTrue("Expected local properties to be loaded.", properties.containsKey("local")); } }
/* * Copyright 2014 - 2020 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.dialog; import android.content.res.ColorStateList; import android.graphics.Bitmap; import android.graphics.PorterDuff; import android.graphics.drawable.Drawable; import android.os.Bundle; import androidx.annotation.CallSuper; import androidx.annotation.ColorInt; import androidx.annotation.DrawableRes; import androidx.annotation.LayoutRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import android.view.View; import de.mrapp.android.dialog.animation.BackgroundAnimation; import de.mrapp.android.dialog.animation.DrawableAnimation; import de.mrapp.android.dialog.decorator.HeaderDialogDecorator; import de.mrapp.android.dialog.model.HeaderDialog; /** * An abstract base class for all dialogs, which are designed according to Android 5's Material * Design guidelines even on pre-Lollipop devices, are able to show fragments and may contain a * header. * * @author Michael Rapp * @since 3.2.0 */ public abstract class AbstractHeaderDialogFragment extends AbstractMaterialDialogFragment implements HeaderDialog { /** * The decorator, which is used by the dialog. */ private final HeaderDialogDecorator decorator; /** * Creates a dialog, which is designed according to Android 5's Material Design guidelines even * on pre-Lollipop devices, is able to show fragments and may contain a header. */ public AbstractHeaderDialogFragment() { decorator = new HeaderDialogDecorator(this); addDecorator(decorator); } @Override public final boolean isHeaderShown() { return decorator.isHeaderShown(); } @Override public final void showHeader(final boolean show) { decorator.showHeader(show); } @Override public final boolean isCustomHeaderUsed() { return decorator.isCustomHeaderUsed(); } @Override public final void setCustomHeader(@Nullable final View view) { decorator.setCustomHeader(view); } @Override public final void setCustomHeader(@LayoutRes final int resourceId) { decorator.setCustomHeader(resourceId); } @Override public final int getHeaderHeight() { return decorator.getHeaderHeight(); } @Override public final void setHeaderHeight(final int height) { decorator.setHeaderHeight(height); } @Override public final Drawable getHeaderBackground() { return decorator.getHeaderBackground(); } @Override public final void setHeaderBackgroundColor(@ColorInt final int color) { decorator.setHeaderBackgroundColor(color); } @Override public final void setHeaderBackgroundColor(@ColorInt final int color, @Nullable final BackgroundAnimation animation) { decorator.setHeaderBackgroundColor(color, animation); } @Override public final void setHeaderBackground(@DrawableRes final int resourceId) { decorator.setHeaderBackground(resourceId); } @Override public final void setHeaderBackground(@DrawableRes final int resourceId, @Nullable final BackgroundAnimation animation) { decorator.setHeaderBackground(resourceId, animation); } @Override public final void setHeaderBackground(@Nullable final Bitmap background) { decorator.setHeaderBackground(background); } @Override public final void setHeaderBackground(@Nullable final Bitmap background, @Nullable final BackgroundAnimation animation) { decorator.setHeaderBackground(background, animation); } @Override public final Drawable getHeaderIcon() { return decorator.getHeaderIcon(); } @Override public final void setHeaderIcon(@DrawableRes final int resourceId) { decorator.setHeaderIcon(resourceId); } @Override public final void setHeaderIcon(@DrawableRes final int resourceId, @Nullable final DrawableAnimation animation) { decorator.setHeaderIcon(resourceId, animation); } @Override public final void setHeaderIcon(@Nullable final Bitmap icon) { decorator.setHeaderIcon(icon); } @Override public final void setHeaderIcon(@Nullable final Bitmap icon, @Nullable final DrawableAnimation animation) { decorator.setHeaderIcon(icon, animation); } @Override public final ColorStateList getHeaderIconTintList() { return decorator.getHeaderIconTintList(); } @Override public final void setHeaderIconTint(@ColorInt final int color) { decorator.setHeaderIconTint(color); } @Override public final void setHeaderIconTintList(@Nullable final ColorStateList tintList) { decorator.setHeaderIconTintList(tintList); } @NonNull @Override public final PorterDuff.Mode getHeaderIconTintMode() { return decorator.getHeaderIconTintMode(); } @Override public final void setHeaderIconTintMode(@NonNull final PorterDuff.Mode mode) { decorator.setHeaderIconTintMode(mode); } @Override public final int getHeaderDividerColor() { return decorator.getHeaderDividerColor(); } @Override public final void setHeaderDividerColor(@ColorInt final int color) { decorator.setHeaderDividerColor(color); } @Override public final boolean isHeaderDividerShown() { return decorator.isHeaderDividerShown(); } @Override public final void showHeaderDivider(final boolean show) { decorator.showHeaderDivider(show); } @CallSuper @Override public void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); decorator.onSaveInstanceState(outState); } @CallSuper @Override protected void onRestoreInstanceState(@NonNull final Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); decorator.onRestoreInstanceState(savedInstanceState); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.raid; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.LinkedList; import java.util.HashMap; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Counters.Group; import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.metrics.util.MetricsRegistry; import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong; import org.apache.hadoop.raid.DistRaid.Counter; /** * Periodically monitors the status of jobs registered with it. * * Jobs that are submitted for the same policy name are kept in the same list, * and the list itself is kept in a map that has the policy name as the key and * the list as value. */ public class JobMonitor implements Runnable { public static final Log LOG = LogFactory.getLog( "org.apache.hadoop.raid.JobMonitor"); volatile boolean running = true; private Map<String, List<DistRaid>> jobs; private Map<String, List<DistRaid>> history; private Map<String, Counters> raidProgress; private long jobMonitorInterval; private long maximumRunningTime; private volatile long jobsMonitored = 0; private volatile long jobsSucceeded = 0; private static final SimpleDateFormat dateForm = new SimpleDateFormat("yyyy-MM-dd"); private static final Counter[] INT_CTRS = {Counter.FILES_SUCCEEDED, Counter.PROCESSED_SIZE, Counter.SAVING_SIZE}; public enum STATUS { RUNNING, FINISHED, RAIDED } public static final String JOBMONITOR_INTERVAL_KEY = "raid.jobmonitor.interval"; public static final String JOBMONITOR_MAXIMUM_RUNNINGTIME_KEY = "raid.jobmonitor.max.runningtime"; public static final long DEFAULT_MAXIMUM_RUNNING_TIME = 24L * 3600L * 1000L; public JobMonitor(Configuration conf) { jobMonitorInterval = conf.getLong(JOBMONITOR_INTERVAL_KEY, 60000); maximumRunningTime = conf.getLong(JOBMONITOR_MAXIMUM_RUNNINGTIME_KEY, DEFAULT_MAXIMUM_RUNNING_TIME); jobs = new java.util.HashMap<String, List<DistRaid>>(); history = new java.util.HashMap<String, List<DistRaid>>(); raidProgress = new java.util.HashMap<String, Counters>(); } public void run() { while (running) { try { LOG.info("JobMonitor thread continuing to run..."); doMonitor(); } catch (Throwable e) { LOG.error("JobMonitor encountered exception " + StringUtils.stringifyException(e)); // All expected exceptions are caught by doMonitor(). It is better // to exit now, this will prevent RaidNode from submitting more jobs // since the number of running jobs will never decrease. return; } } } /** * Periodically checks status of running map-reduce jobs. */ public void doMonitor() { while (running) { String[] keys = null; // Make a copy of the names of the current jobs. synchronized(jobs) { keys = jobs.keySet().toArray(new String[0]); } // Check all the jobs. We do not want to block access to `jobs` // because that will prevent new jobs from being added. // This is safe because JobMonitor.run is the only code that can // remove a job from `jobs`. Thus all elements in `keys` will have // valid values. Map<String, List<DistRaid>> finishedJobs = new HashMap<String, List<DistRaid>>(); for (String key: keys) { // For each policy being monitored, get the list of jobs running. DistRaid[] jobListCopy = null; synchronized(jobs) { List<DistRaid> jobList = jobs.get(key); synchronized(jobList) { jobListCopy = jobList.toArray(new DistRaid[jobList.size()]); } } // The code that actually contacts the JobTracker is not synchronized, // it uses copies of the list of jobs. for (DistRaid job: jobListCopy) { // Check each running job. try { boolean complete = job.checkComplete(); if (complete) { addJob(finishedJobs, key, job); if (job.successful()) { jobsSucceeded++; } } else if (System.currentTimeMillis() - job.getStartTime() > maximumRunningTime){ // If the job is running for more than one day throw new Exception("Job " + job.getJobID() + " is hanging more than " + maximumRunningTime/1000 + " seconds. Kill it"); } } catch (Exception e) { // If there was an error, consider the job finished. addJob(finishedJobs, key, job); try { job.killJob(); } catch (Exception ee) { LOG.error(ee); } } } } if (finishedJobs.size() > 0) { for (String key: finishedJobs.keySet()) { List<DistRaid> finishedJobList = finishedJobs.get(key); // Iterate through finished jobs and remove from jobs. // removeJob takes care of locking. for (DistRaid job: finishedJobList) { addCounter(raidProgress, job, INT_CTRS); removeJob(jobs, key, job); addJob(history, key, job); // delete the temp directory job.cleanUp(); } } } try { Thread.sleep(jobMonitorInterval); } catch (InterruptedException ie) { } } } // For test code int runningJobsCount() { int total = 0; synchronized(jobs) { for (String key: jobs.keySet()) { total += jobs.get(key).size(); } } return total; } public int runningJobsCount(String key) { int count = 0; synchronized(jobs) { if (jobs.containsKey(key)) { List<DistRaid> jobList = jobs.get(key); synchronized(jobList) { count = jobList.size(); } } } return count; } // for test public List<DistRaid> getRunningJobs() { List<DistRaid> list = new LinkedList<DistRaid>(); synchronized(jobs) { for (List<DistRaid> jobList : jobs.values()) { synchronized(jobList) { list.addAll(jobList); } } } return list; } // for test public Map<String, Counters> getRaidProgress() { synchronized (raidProgress) { return Collections.unmodifiableMap(this.raidProgress); } } public void monitorJob(String key, DistRaid job) { addJob(jobs, key, job); jobsMonitored++; } public long jobsMonitored() { return this.jobsMonitored; } public long jobsSucceeded() { return this.jobsSucceeded; } private static void addJob(Map<String, List<DistRaid>> jobsMap, String jobName, DistRaid job) { synchronized(jobsMap) { List<DistRaid> list = null; if (jobsMap.containsKey(jobName)) { list = jobsMap.get(jobName); } else { list = new LinkedList<DistRaid>(); jobsMap.put(jobName, list); } synchronized(list) { list.add(job); } } } private static void addCounter(Map<String, Counters> countersMap, DistRaid job, Counter[] ctrNames) { Counters total_ctrs = null; Counters ctrs = null; try { ctrs = job.getCounters(); if (ctrs == null) { LOG.warn("No counters for " + job.getJobID()); return; } } catch (Exception e) { LOG.error(e); return; } //Adding to logMetrics Group counterGroup = ctrs.getGroup(LogUtils.LOG_COUNTER_GROUP_NAME); MetricsRegistry registry = RaidNodeMetrics.getInstance( RaidNodeMetrics.DEFAULT_NAMESPACE_ID).getMetricsRegistry(); Map<String, MetricsTimeVaryingLong> logMetrics = RaidNodeMetrics.getInstance( RaidNodeMetrics.DEFAULT_NAMESPACE_ID).logMetrics; synchronized(logMetrics) { for (Counters.Counter ctr: counterGroup) { if (!logMetrics.containsKey(ctr.getName())) { logMetrics.put(ctr.getName(), new MetricsTimeVaryingLong(ctr.getName(), registry)); } ((MetricsTimeVaryingLong)logMetrics.get(ctr.getName())).inc(ctr.getValue()); } } String currDate = dateForm.format(new Date(RaidNode.now())); synchronized(countersMap) { if (countersMap.containsKey(currDate)) { total_ctrs = countersMap.get(currDate); } else { total_ctrs = new Counters(); countersMap.put(currDate, total_ctrs); } for (Counter ctrName : ctrNames) { Counters.Counter ctr = ctrs.findCounter(ctrName); if (ctr != null) { total_ctrs.incrCounter(ctrName, ctr.getValue()); LOG.info(ctrName + " " + ctr.getValue() + ": " + total_ctrs.getCounter(ctrName)); } } } } private static void removeJob(Map<String, List<DistRaid>> jobsMap, String jobName, DistRaid job) { synchronized(jobsMap) { if (jobsMap.containsKey(jobName)) { List<DistRaid> list = jobsMap.get(jobName); synchronized(list) { for (Iterator<DistRaid> it = list.iterator(); it.hasNext(); ) { DistRaid val = it.next(); if (val == job) { it.remove(); } } if (list.size() == 0) { jobsMap.remove(jobName); } } } } } public String toHtml(STATUS st) { StringBuilder sb = new StringBuilder(); if (st == STATUS.RUNNING) { sb.append(DistRaid.htmlRowHeader()); synchronized(jobs) { for (List<DistRaid> jobList: jobs.values()) { for (DistRaid job: jobList) { sb.append(job.toHtmlRow()); } } } } else if (st == STATUS.FINISHED){ sb.append(DistRaid.htmlRowHeader()); synchronized(history) { for (List<DistRaid> jobList: history.values()) { for (DistRaid job: jobList) { sb.append(job.toHtmlRow()); } } } } else if (st == STATUS.RAIDED) { sb.append(raidProgressRowHeader()); synchronized(raidProgress) { for (String dateStr: raidProgress.keySet()) { sb.append(toRaidProgressHtmlRow(dateStr, raidProgress.get(dateStr))); } } } return JspUtils.table(sb.toString()); } private static String raidProgressRowHeader() { return JspUtils.tr( JspUtils.td("Date") + JspUtils.td("File Processed") + JspUtils.td("Size Processed") + JspUtils.td("Saved")); } private String toRaidProgressHtmlRow(String dateStr, Counters ctrs) { StringBuilder sb = new StringBuilder(); sb.append(td(dateStr)); sb.append(td(Long.toString(ctrs.getCounter(Counter.FILES_SUCCEEDED)))); sb.append(td(StringUtils.humanReadableInt(ctrs.getCounter(Counter.PROCESSED_SIZE)))); sb.append(td(StringUtils.humanReadableInt(ctrs.getCounter(Counter.SAVING_SIZE)))); return tr(sb.toString()); } private static String td(String s) { return JspUtils.td(s); } private static String tr(String s) { return JspUtils.tr(s); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.groupon.mesos.state; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static org.iq80.leveldb.impl.Iq80DBFactory.asString; import static org.iq80.leveldb.impl.Iq80DBFactory.bytes; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.util.Iterator; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import com.google.common.collect.AbstractIterator; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.groupon.mesos.util.Log; import org.apache.mesos.state.State; import org.apache.mesos.state.Variable; import org.iq80.leveldb.DB; import org.iq80.leveldb.DBIterator; import org.iq80.leveldb.Options; import org.iq80.leveldb.WriteBatch; import org.iq80.leveldb.WriteOptions; import org.iq80.leveldb.impl.Iq80DBFactory; import mesos.internal.state.State.Entry; public class JLevelDBState implements State, Closeable { private static final Log LOG = Log.getLog(JLevelDBState.class); private final DB db; private final AtomicBoolean closed = new AtomicBoolean(false); private final ExecutorService executor = Executors.newFixedThreadPool(10, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("JLevelDB-State-%d").build()); public JLevelDBState(final String path) throws IOException { checkNotNull(path, "path is null"); final Options options = new Options(); options.createIfMissing(true); this.db = Iq80DBFactory.factory.open(new File(path), options); } @Override public void close() throws IOException { if (!closed.getAndSet(true)) { executor.shutdown(); try { executor.awaitTermination(1, TimeUnit.DAYS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); } db.close(); } } @Override public Future<Variable> fetch(final String name) { checkNotNull(name, "name is null"); checkState(!closed.get(), "already closed"); return executor.submit(new Callable<Variable>() { @Override public Variable call() throws Exception { // Interning the string will make sure that all // synchronized blocks use the same java object monitor // for the same string value, which in turn serves // as poor man's row lock. final String internedName = name.intern(); synchronized (internedName) { final JVariable var = load(name); if (var == null) { return new JVariable(name, JVariable.EMPTY_BYTES); } else { return var; } } } }); } @Override public Future<Variable> store(final Variable variable) { checkNotNull(variable, "variable is null"); checkState(!closed.get(), "already closed"); checkState(variable instanceof JVariable, "can not process native variable, use JVariable"); final JVariable v = (JVariable) variable; return executor.submit(new Callable<Variable>() { @Override public Variable call() throws Exception { final WriteOptions writeOptions = new WriteOptions(); writeOptions.sync(true); final String internedName = v.getName().intern(); synchronized (internedName) { final JVariable current = load(internedName); if (current == null || current.getUuid().equals(v.getUuid())) { final JVariable update = new JVariable(internedName, v.value()); final WriteBatch writeBatch = db.createWriteBatch(); writeBatch.delete(bytes(internedName)); writeBatch.put(bytes(internedName), update.getEntry().toByteArray()); db.write(writeBatch, writeOptions); return update; } else { return null; } } } }); } @Override public Future<Boolean> expunge(final Variable variable) { checkNotNull(variable, "variable is null"); checkState(!closed.get(), "already closed"); checkState(variable instanceof JVariable, "can not process native variable, use JVariable"); final JVariable v = (JVariable) variable; return executor.submit(new Callable<Boolean>() { @Override public Boolean call() throws Exception { final WriteOptions writeOptions = new WriteOptions(); writeOptions.sync(true); final String internedName = v.getName().intern(); synchronized (internedName) { final JVariable current = load(internedName); if (current != null && current.getUuid().equals(v.getUuid())) { db.delete(bytes(internedName)); return Boolean.TRUE; } else { return Boolean.FALSE; } } } }); } @Override public Future<Iterator<String>> names() { checkState(!closed.get(), "already closed"); return executor.submit(new Callable<Iterator<String>>() { @Override public Iterator<String> call() throws Exception { return new ClosingIterator(db.iterator()); } }); } private JVariable load(final String name) throws IOException { final byte[] value = db.get(bytes(name)); if (value == null) { return null; } else { final Entry entry = Entry.parseFrom(value); return new JVariable(entry); } } private static class ClosingIterator extends AbstractIterator<String> implements Iterator<String>, Closeable { private final DBIterator dbIterator; private final AtomicBoolean closed = new AtomicBoolean(); private ClosingIterator(final DBIterator dbIterator) { this.dbIterator = checkNotNull(dbIterator, "dbIterator is null"); this.dbIterator.seekToFirst(); } @Override protected String computeNext() { if (!closed.get() && dbIterator.hasNext()) { final Map.Entry<byte[], byte[]> value = dbIterator.next(); return asString(value.getKey()); } else { if (!closed.getAndSet(true)) { try { dbIterator.close(); } catch (final IOException ioe) { LOG.warn(ioe, "while closing iterator"); } } return endOfData(); } } @Override public void close() throws IOException { if (!closed.getAndSet(true)) { dbIterator.close(); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; /** * Mapper for the {@code _seq_no} field. * * We expect to use the seq# for sorting, during collision checking and for * doing range searches. Therefore the {@code _seq_no} field is stored both * as a numeric doc value and as numeric indexed field. * * This mapper also manages the primary term field, which has no ES named * equivalent. The primary term is only used during collision after receiving * identical seq# values for two document copies. The primary term is stored as * a doc value field without being indexed, since it is only intended for use * as a key-value lookup. */ public class SeqNoFieldMapper extends MetadataFieldMapper { /** * A sequence ID, which is made up of a sequence number (both the searchable * and doc_value version of the field) and the primary term. */ public static class SequenceIDFields { public final Field seqNo; public final Field seqNoDocValue; public final Field primaryTerm; public final Field tombstoneField; public SequenceIDFields(Field seqNo, Field seqNoDocValue, Field primaryTerm, Field tombstoneField) { Objects.requireNonNull(seqNo, "sequence number field cannot be null"); Objects.requireNonNull(seqNoDocValue, "sequence number dv field cannot be null"); Objects.requireNonNull(primaryTerm, "primary term field cannot be null"); this.seqNo = seqNo; this.seqNoDocValue = seqNoDocValue; this.primaryTerm = primaryTerm; this.tombstoneField = tombstoneField; } public static SequenceIDFields emptySeqID() { return new SequenceIDFields(new LongPoint(NAME, SequenceNumbers.UNASSIGNED_SEQ_NO), new NumericDocValuesField(NAME, SequenceNumbers.UNASSIGNED_SEQ_NO), new NumericDocValuesField(PRIMARY_TERM_NAME, 0), new NumericDocValuesField(TOMBSTONE_NAME, 0)); } } public static final String NAME = "_seq_no"; public static final String CONTENT_TYPE = "_seq_no"; public static final String PRIMARY_TERM_NAME = "_primary_term"; public static final String TOMBSTONE_NAME = "_tombstone"; public static class SeqNoDefaults { public static final String NAME = SeqNoFieldMapper.NAME; public static final MappedFieldType FIELD_TYPE = new SeqNoFieldType(); static { FIELD_TYPE.setName(NAME); FIELD_TYPE.setDocValuesType(DocValuesType.SORTED); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.freeze(); } } public static class Builder extends MetadataFieldMapper.Builder<Builder, SeqNoFieldMapper> { public Builder() { super(SeqNoDefaults.NAME, SeqNoDefaults.FIELD_TYPE, SeqNoDefaults.FIELD_TYPE); } @Override public SeqNoFieldMapper build(BuilderContext context) { return new SeqNoFieldMapper(context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override public MetadataFieldMapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { throw new MapperParsingException(NAME + " is not configurable"); } @Override public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) { final Settings indexSettings = context.mapperService().getIndexSettings().getSettings(); return new SeqNoFieldMapper(indexSettings); } } static final class SeqNoFieldType extends SimpleMappedFieldType { SeqNoFieldType() { } protected SeqNoFieldType(SeqNoFieldType ref) { super(ref); } @Override public MappedFieldType clone() { return new SeqNoFieldType(this); } @Override public String typeName() { return CONTENT_TYPE; } private long parse(Object value) { if (value instanceof Number) { double doubleValue = ((Number) value).doubleValue(); if (doubleValue < Long.MIN_VALUE || doubleValue > Long.MAX_VALUE) { throw new IllegalArgumentException("Value [" + value + "] is out of range for a long"); } if (doubleValue % 1 != 0) { throw new IllegalArgumentException("Value [" + value + "] has a decimal part"); } return ((Number) value).longValue(); } if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } return Long.parseLong(value.toString()); } @Override public Query existsQuery(QueryShardContext context) { return new DocValuesFieldExistsQuery(name()); } @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { long v = parse(value); return LongPoint.newExactQuery(name(), v); } @Override public Query termsQuery(List<?> values, @Nullable QueryShardContext context) { long[] v = new long[values.size()]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i)); } return LongPoint.newSetQuery(name(), v); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { long l = Long.MIN_VALUE; long u = Long.MAX_VALUE; if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { if (l == Long.MAX_VALUE) { return new MatchNoDocsQuery(); } ++l; } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { if (u == Long.MIN_VALUE) { return new MatchNoDocsQuery(); } --u; } } return LongPoint.newRangeQuery(name(), l, u); } @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { failIfNoDocValues(); return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); } } public SeqNoFieldMapper(Settings indexSettings) { super(NAME, SeqNoDefaults.FIELD_TYPE, SeqNoDefaults.FIELD_TYPE, indexSettings); } @Override public void preParse(ParseContext context) throws IOException { super.parse(context); } @Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { // see InternalEngine.innerIndex to see where the real version value is set // also see ParsedDocument.updateSeqID (called by innerIndex) SequenceIDFields seqID = SequenceIDFields.emptySeqID(); context.seqID(seqID); fields.add(seqID.seqNo); fields.add(seqID.seqNoDocValue); fields.add(seqID.primaryTerm); } @Override public void parse(ParseContext context) throws IOException { // fields are added in parseCreateField } @Override public void postParse(ParseContext context) throws IOException { // In the case of nested docs, let's fill nested docs with the original // so that Lucene doesn't write a Bitset for documents that // don't have the field. This is consistent with the default value // for efficiency. // we share the parent docs fields to ensure good compression SequenceIDFields seqID = context.seqID(); assert seqID != null; for (Document doc : context.nonRootDocuments()) { doc.add(seqID.seqNo); doc.add(seqID.seqNoDocValue); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder; } @Override protected void doMerge(Mapper mergeWith) { // nothing to do } }
/** * Copyright (c) 2011 Terracotta, Inc. * Copyright (c) 2011 Oracle and/or its affiliates. * * All rights reserved. Use is subject to license terms. */ package javax.cache; import org.wso2.carbon.caching.impl.DataHolder; import javax.cache.spi.AnnotationProvider; import javax.cache.spi.CachingProvider; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * A factory for creating CacheManagers using the SPI conventions in the JDK's {@link java.util.ServiceLoader} * <p/> * For a provider to be discovered, its jar must contain a resource called: * <pre> * META-INF/services/javax.cache.spi.CachingProvider * </pre> * containing the class name implementing {@link javax.cache.spi.CachingProvider} * <p/> * For example, in the reference implementation the contents are: * <p/> * "javax.cache.implementation.RIServiceFactory" * <p/> * If more than one CachingProvider is found, getCacheManagerFactory will throw an exception * <p/> * Also keeps track of all CacheManagers created by the factory. Subsequent calls * to {@link #getCacheManager()} return the same CacheManager. * * @author Yannis Cosmadopoulos * @see java.util.ServiceLoader * @see javax.cache.spi.CachingProvider * @since 1.0 */ public final class Caching { /** * The name of the default cache manager. * This is the name of the CacheManager returned when {@link #getCacheManager()} is invoked. * The default CacheManager is always created. */ public static final String DEFAULT_CACHE_MANAGER_NAME = "__default__"; /** * No public constructor as all methods are static. */ private Caching() { } /** * Get the singleton CacheManagerFactory * * @return the cache manager factory * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static CacheManagerFactory getCacheManagerFactory() { return ServiceFactoryHolder.INSTANCE.getCachingProvider().getCacheManagerFactory(); } /** * Get the default cache manager with the default classloader. * The default cache manager is named {@link #DEFAULT_CACHE_MANAGER_NAME} * * @return the default cache manager * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static CacheManager getCacheManager() { return getCacheManager(DEFAULT_CACHE_MANAGER_NAME); } /** * Get the default cache manager. * The default cache manager is named {@link #DEFAULT_CACHE_MANAGER_NAME} * * @param classLoader the ClassLoader that should be used in converting values into Java Objects. May be null. * @return the default cache manager * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static CacheManager getCacheManager(ClassLoader classLoader) { return getCacheManager(classLoader, DEFAULT_CACHE_MANAGER_NAME); } /** * Get a named cache manager using the default cache loader as specified by * the implementation. * * @param name the name of the cache manager * @return the named cache manager * @throws NullPointerException if name is null * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static CacheManager getCacheManager(String name) { return getCacheManagerFactory().getCacheManager(name); } /** * Get a named cache manager. * <p/> * The first time a name is used, a new CacheManager is created. * Subsequent calls will return the same cache manager. * <p/> * During creation, the name of the CacheManager is passed through to {@link javax.cache.spi.CachingProvider} * so that an implementation it to concrete implementations may use it to point to a specific configuration * used to configure the CacheManager. This allows CacheManagers to have different configurations. For example, * one CacheManager might be configured for standalone operation and another might be configured to participate * in a cluster. * <p/> * Generally, It makes sense that a CacheManager is associated with a ClassLoader. I.e. all caches emanating * from the CacheManager, all code including key and value classes must be present in that ClassLoader. * <p/> * Secondly, the Caching may be in a different ClassLoader than the * CacheManager (i.e. the Caching may be shared in an application server setting). * <p/> * For this purpose a ClassLoader may be specified. If specified it will be used for all conversion between * values and Java Objects. While Java's in-built serialization may be used other schemes may also be used. * Either way the specified ClassLoader will be used. * <p/> * The name parameter may be used to associate a configuration with this CacheManager instance. * * @param classLoader the ClassLoader that should be used in converting values into Java Objects. * @param name the name of this cache manager * @return the new cache manager * @throws NullPointerException if classLoader or name is null * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static CacheManager getCacheManager(ClassLoader classLoader, String name) { return getCacheManagerFactory().getCacheManager(classLoader, name); } /** * Reclaims all resources obtained from this factory. * <p/> * All cache managers obtained from the factory are shutdown. * <p/> * Subsequent requests from this factory will return different cache managers than would have been obtained before * shutdown. So for example * <pre> * CacheManager cacheManager = CacheFactory.getCacheManager(); * assertSame(cacheManager, CacheFactory.getCacheManager()); * CacheFactory.close(); * assertNotSame(cacheManager, CacheFactory.getCacheManager()); * </pre> * * @throws javax.cache.CachingShutdownException * if any of the individual shutdowns failed * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static void close() throws CachingShutdownException { getCacheManagerFactory().close(); } /** * Reclaims all resources for a ClassLoader from this factory. * <p/> * All cache managers linked to the specified CacheLoader obtained from the factory are shutdown. * * @param classLoader the class loader for which managers will be shut down * @return true if found, false otherwise * @throws javax.cache.CachingShutdownException * if any of the individual shutdowns failed * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static boolean close(ClassLoader classLoader) throws CachingShutdownException { return getCacheManagerFactory().close(classLoader); } /** * Reclaims all resources for a ClassLoader from this factory. * <p/> * the named cache manager obtained from the factory is closed. * * @param classLoader the class loader for which managers will be shut down * @param name the name of the cache manager * @return true if found, false otherwise * @throws javax.cache.CachingShutdownException * if any of the individual shutdowns failed * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static boolean close(ClassLoader classLoader, String name) throws CachingShutdownException { return getCacheManagerFactory().close(classLoader, name); } /** * Indicates whether a optional feature is supported by this implementation * * @param optionalFeature the feature to check for * @return true if the feature is supported * @throws IllegalStateException if no CachingProvider is found or if more than one CachingProvider is found */ public static boolean isSupported(OptionalFeature optionalFeature) { return ServiceFactoryHolder.INSTANCE.getCachingProvider().isSupported(optionalFeature); } /** * Indicates whether annotations are supported * * @return true if annotations are supported */ public static boolean isAnnotationsSupported() { final AnnotationProvider annotationProvider = ServiceFactoryHolder.INSTANCE.getAnnotationProvider(); return annotationProvider != null && annotationProvider.isSupported(); } /** * Holds the ServiceFactory */ private enum ServiceFactoryHolder { /** * The singleton. */ INSTANCE; private List<CachingProvider> cachingProviders; private List<AnnotationProvider> annotationProviders; private ServiceFactoryHolder() { init(); } private void init() { cachingProviders = AccessController.doPrivileged(new PrivilegedAction<List<CachingProvider>>() { @Override public List<CachingProvider> run() { List<CachingProvider> result = new ArrayList<CachingProvider>(); result.add(DataHolder.getInstance().getCachingProvider()); return result; } }); annotationProviders = AccessController.doPrivileged(new PrivilegedAction<List<AnnotationProvider>>() { @Override public List<AnnotationProvider> run() { List<AnnotationProvider> result = new ArrayList<AnnotationProvider>(); result.add(DataHolder.getInstance().getAnnotationProvider()); return result; } }); } //todo support multiple providers public CachingProvider getCachingProvider() { switch (cachingProviders.size()) { case 0: init(); if (cachingProviders.size() == 0) { throw new IllegalStateException("No CachingProviders found in classpath."); } else { return cachingProviders.get(0); } case 1: return cachingProviders.get(0); default: throw new IllegalStateException("Multiple CachingProviders found in classpath." + " There should only be one. CachingProviders found were: " + createListOfClassNames(cachingProviders)); } } //todo support multiple providers public AnnotationProvider getAnnotationProvider() { switch (annotationProviders.size()) { case 0: return null; case 1: return annotationProviders.get(0); default: throw new IllegalStateException("Multiple AnnotationProviders found in classpath." + " There should only be one. CachingProviders found were: " + createListOfClassNames(annotationProviders)); } } private static String createListOfClassNames(Collection<?> names) { if (names.isEmpty()) { return "<none>"; } else { StringBuilder sb = new StringBuilder(); for (Iterator<?> it = names.iterator(); it.hasNext(); ) { Object o = it.next(); sb.append(o.getClass().getName()); if (it.hasNext()) { sb.append(", "); } } return sb.toString(); } } } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package bboss.org.objectweb.asm.tree; import java.util.ListIterator; import java.util.NoSuchElementException; import bboss.org.objectweb.asm.MethodVisitor; /** * A doubly linked list of {@link AbstractInsnNode} objects. <i>This * implementation is not thread safe</i>. */ public class InsnList { /** * The number of instructions in this list. */ private int size; /** * The first instruction in this list. May be <tt>null</tt>. */ private AbstractInsnNode first; /** * The last instruction in this list. May be <tt>null</tt>. */ private AbstractInsnNode last; /** * A cache of the instructions of this list. This cache is used to improve * the performance of the {@link #get} method. */ AbstractInsnNode[] cache; /** * Returns the number of instructions in this list. * * @return the number of instructions in this list. */ public int size() { return size; } /** * Returns the first instruction in this list. * * @return the first instruction in this list, or <tt>null</tt> if the list * is empty. */ public AbstractInsnNode getFirst() { return first; } /** * Returns the last instruction in this list. * * @return the last instruction in this list, or <tt>null</tt> if the list * is empty. */ public AbstractInsnNode getLast() { return last; } /** * Returns the instruction whose index is given. This method builds a cache * of the instructions in this list to avoid scanning the whole list each * time it is called. Once the cache is built, this method run in constant * time. This cache is invalidated by all the methods that modify the list. * * @param index * the index of the instruction that must be returned. * @return the instruction whose index is given. * @throws IndexOutOfBoundsException * if (index &lt; 0 || index &gt;= size()). */ public AbstractInsnNode get(final int index) { if (index < 0 || index >= size) { throw new IndexOutOfBoundsException(); } if (cache == null) { cache = toArray(); } return cache[index]; } /** * Returns <tt>true</tt> if the given instruction belongs to this list. This * method always scans the instructions of this list until it finds the * given instruction or reaches the end of the list. * * @param insn * an instruction. * @return <tt>true</tt> if the given instruction belongs to this list. */ public boolean contains(final AbstractInsnNode insn) { AbstractInsnNode i = first; while (i != null && i != insn) { i = i.next; } return i != null; } /** * Returns the index of the given instruction in this list. This method * builds a cache of the instruction indexes to avoid scanning the whole * list each time it is called. Once the cache is built, this method run in * constant time. The cache is invalidated by all the methods that modify * the list. * * @param insn * an instruction <i>of this list</i>. * @return the index of the given instruction in this list. <i>The result of * this method is undefined if the given instruction does not belong * to this list</i>. Use {@link #contains contains} to test if an * instruction belongs to an instruction list or not. */ public int indexOf(final AbstractInsnNode insn) { if (cache == null) { cache = toArray(); } return insn.index; } /** * Makes the given visitor visit all of the instructions in this list. * * @param mv * the method visitor that must visit the instructions. */ public void accept(final MethodVisitor mv) { AbstractInsnNode insn = first; while (insn != null) { insn.accept(mv); insn = insn.next; } } /** * Returns an iterator over the instructions in this list. * * @return an iterator over the instructions in this list. */ public ListIterator<AbstractInsnNode> iterator() { return iterator(0); } /** * Returns an iterator over the instructions in this list. * * @return an iterator over the instructions in this list. */ @SuppressWarnings("unchecked") public ListIterator<AbstractInsnNode> iterator(int index) { return new InsnListIterator(index); } /** * Returns an array containing all of the instructions in this list. * * @return an array containing all of the instructions in this list. */ public AbstractInsnNode[] toArray() { int i = 0; AbstractInsnNode elem = first; AbstractInsnNode[] insns = new AbstractInsnNode[size]; while (elem != null) { insns[i] = elem; elem.index = i++; elem = elem.next; } return insns; } /** * Replaces an instruction of this list with another instruction. * * @param location * an instruction <i>of this list</i>. * @param insn * another instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void set(final AbstractInsnNode location, final AbstractInsnNode insn) { AbstractInsnNode next = location.next; insn.next = next; if (next != null) { next.prev = insn; } else { last = insn; } AbstractInsnNode prev = location.prev; insn.prev = prev; if (prev != null) { prev.next = insn; } else { first = insn; } if (cache != null) { int index = location.index; cache[index] = insn; insn.index = index; } else { insn.index = 0; // insn now belongs to an InsnList } location.index = -1; // i no longer belongs to an InsnList location.prev = null; location.next = null; } /** * Adds the given instruction to the end of this list. * * @param insn * an instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void add(final AbstractInsnNode insn) { ++size; if (last == null) { first = insn; last = insn; } else { last.next = insn; insn.prev = last; } last = insn; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Adds the given instructions to the end of this list. * * @param insns * an instruction list, which is cleared during the process. This * list must be different from 'this'. */ public void add(final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; if (last == null) { first = insns.first; last = insns.last; } else { AbstractInsnNode elem = insns.first; last.next = elem; elem.prev = last; last = insns.last; } cache = null; insns.removeAll(false); } /** * Inserts the given instruction at the begining of this list. * * @param insn * an instruction, <i>which must not belong to any * {@link InsnList}</i>. */ public void insert(final AbstractInsnNode insn) { ++size; if (first == null) { first = insn; last = insn; } else { first.prev = insn; insn.next = first; } first = insn; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions at the begining of this list. * * @param insns * an instruction list, which is cleared during the process. This * list must be different from 'this'. */ public void insert(final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; if (first == null) { first = insns.first; last = insns.last; } else { AbstractInsnNode elem = insns.last; first.prev = elem; elem.next = first; first = insns.first; } cache = null; insns.removeAll(false); } /** * Inserts the given instruction after the specified instruction. * * @param location * an instruction <i>of this list</i> after which insn must be * inserted. * @param insn * the instruction to be inserted, <i>which must not belong to * any {@link InsnList}</i>. */ public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) { ++size; AbstractInsnNode next = location.next; if (next == null) { last = insn; } else { next.prev = insn; } location.next = insn; insn.next = next; insn.prev = location; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions after the specified instruction. * * @param location * an instruction <i>of this list</i> after which the * instructions must be inserted. * @param insns * the instruction list to be inserted, which is cleared during * the process. This list must be different from 'this'. */ public void insert(final AbstractInsnNode location, final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; AbstractInsnNode ifirst = insns.first; AbstractInsnNode ilast = insns.last; AbstractInsnNode next = location.next; if (next == null) { last = ilast; } else { next.prev = ilast; } location.next = ifirst; ilast.next = next; ifirst.prev = location; cache = null; insns.removeAll(false); } /** * Inserts the given instruction before the specified instruction. * * @param location * an instruction <i>of this list</i> before which insn must be * inserted. * @param insn * the instruction to be inserted, <i>which must not belong to * any {@link InsnList}</i>. */ public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) { ++size; AbstractInsnNode prev = location.prev; if (prev == null) { first = insn; } else { prev.next = insn; } location.prev = insn; insn.next = location; insn.prev = prev; cache = null; insn.index = 0; // insn now belongs to an InsnList } /** * Inserts the given instructions before the specified instruction. * * @param location * an instruction <i>of this list</i> before which the * instructions must be inserted. * @param insns * the instruction list to be inserted, which is cleared during * the process. This list must be different from 'this'. */ public void insertBefore(final AbstractInsnNode location, final InsnList insns) { if (insns.size == 0) { return; } size += insns.size; AbstractInsnNode ifirst = insns.first; AbstractInsnNode ilast = insns.last; AbstractInsnNode prev = location.prev; if (prev == null) { first = ifirst; } else { prev.next = ifirst; } location.prev = ilast; ilast.next = location; ifirst.prev = prev; cache = null; insns.removeAll(false); } /** * Removes the given instruction from this list. * * @param insn * the instruction <i>of this list</i> that must be removed. */ public void remove(final AbstractInsnNode insn) { --size; AbstractInsnNode next = insn.next; AbstractInsnNode prev = insn.prev; if (next == null) { if (prev == null) { first = null; last = null; } else { prev.next = null; last = prev; } } else { if (prev == null) { first = next; next.prev = null; } else { prev.next = next; next.prev = prev; } } cache = null; insn.index = -1; // insn no longer belongs to an InsnList insn.prev = null; insn.next = null; } /** * Removes all of the instructions of this list. * * @param mark * if the instructions must be marked as no longer belonging to * any {@link InsnList}. */ void removeAll(final boolean mark) { if (mark) { AbstractInsnNode insn = first; while (insn != null) { AbstractInsnNode next = insn.next; insn.index = -1; // insn no longer belongs to an InsnList insn.prev = null; insn.next = null; insn = next; } } size = 0; first = null; last = null; cache = null; } /** * Removes all of the instructions of this list. */ public void clear() { removeAll(false); } /** * Reset all labels in the instruction list. This method should be called * before reusing same instructions list between several * <code>ClassWriter</code>s. */ public void resetLabels() { AbstractInsnNode insn = first; while (insn != null) { if (insn instanceof LabelNode) { ((LabelNode) insn).resetLabel(); } insn = insn.next; } } // this class is not generified because it will create bridges private final class InsnListIterator implements ListIterator { AbstractInsnNode next; AbstractInsnNode prev; AbstractInsnNode remove; InsnListIterator(int index) { if (index == size()) { next = null; prev = getLast(); } else { next = get(index); prev = next.prev; } } public boolean hasNext() { return next != null; } public Object next() { if (next == null) { throw new NoSuchElementException(); } AbstractInsnNode result = next; prev = result; next = result.next; remove = result; return result; } public void remove() { if (remove != null) { if (remove == next) { next = next.next; } else { prev = prev.prev; } InsnList.this.remove(remove); remove = null; } else { throw new IllegalStateException(); } } public boolean hasPrevious() { return prev != null; } public Object previous() { AbstractInsnNode result = prev; next = result; prev = result.prev; remove = result; return result; } public int nextIndex() { if (next == null) { return size(); } if (cache == null) { cache = toArray(); } return next.index; } public int previousIndex() { if (prev == null) { return -1; } if (cache == null) { cache = toArray(); } return prev.index; } public void add(Object o) { InsnList.this.insertBefore(next, (AbstractInsnNode) o); prev = (AbstractInsnNode) o; remove = null; } public void set(Object o) { InsnList.this.set(next.prev, (AbstractInsnNode) o); prev = (AbstractInsnNode) o; } } }
package ch.hsr.whitespace.javapilot.akka; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.zuehlke.carrera.relayapi.messages.PenaltyMessage; import com.zuehlke.carrera.relayapi.messages.VelocityMessage; import akka.actor.ActorRef; import akka.actor.Props; import akka.actor.UntypedActor; import ch.hsr.whitespace.javapilot.akka.messages.ChainTrackPartActorsMessage; import ch.hsr.whitespace.javapilot.akka.messages.DirectionChangedMessage; import ch.hsr.whitespace.javapilot.akka.messages.InitializePositionDetection; import ch.hsr.whitespace.javapilot.akka.messages.LostPositionMessage; import ch.hsr.whitespace.javapilot.akka.messages.PrintTrackPositionMessage; import ch.hsr.whitespace.javapilot.akka.messages.RestartWithTrackRecognitionMessage; import ch.hsr.whitespace.javapilot.akka.messages.SpeedupFinishedMessage; import ch.hsr.whitespace.javapilot.akka.messages.SpeedupMessage; import ch.hsr.whitespace.javapilot.akka.messages.TrackPartEnteredMessage; import ch.hsr.whitespace.javapilot.algorithms.speedup_strategy.SpeedupOrderStrategy; import ch.hsr.whitespace.javapilot.algorithms.speedup_strategy.SpeedupOrderStrategyFactory; import ch.hsr.whitespace.javapilot.algorithms.speedup_strategy.SpeedupOrderStrategyFactory.SpeedupOrderStrategyType; import ch.hsr.whitespace.javapilot.model.track.Direction; import ch.hsr.whitespace.javapilot.model.track.TrackPart; import ch.hsr.whitespace.javapilot.model.track.VelocityBarrier; import ch.hsr.whitespace.javapilot.util.MessageUtil; import ch.hsr.whitespace.javapilot.util.TrackPartUtil; public class DrivingCoordinatorActor extends UntypedActor { private final Logger LOGGER = LoggerFactory.getLogger(DrivingCoordinatorActor.class); private static final int MAX_LOSTS_WITHIN_10_SECS = 4; private static final SpeedupOrderStrategyType SPEEDUP_STRATEGY = SpeedupOrderStrategyType.ALL_TOGETHER; private ActorRef whitespacePilot; private Map<Integer, TrackPart> trackParts; private Map<Integer, ActorRef> trackPartActors; private List<VelocityBarrier> barriers; private Map<Integer, TrackPart> barrierIndexToTrackPartMap; private int lastBarrierIndex = 0; private boolean lostPosition = false; private int initialPower; private List<TrackPart> straights; private List<TrackPart> curves; private List<LostPositionMessage> lostMessages; private SpeedupOrderStrategy speedupStrategy; private boolean straightSpeedupFinished = false; private boolean curveSpeedupFinished = false; public static Props props(ActorRef pilot, int initialPower) { return Props.create(DrivingCoordinatorActor.class, () -> new DrivingCoordinatorActor(pilot, initialPower)); } public DrivingCoordinatorActor(ActorRef whitespacePilot, int initialPower) { this.whitespacePilot = whitespacePilot; this.initialPower = initialPower; this.trackParts = new TreeMap<>(); this.trackPartActors = new TreeMap<>(); this.lostMessages = new ArrayList<>(); } @Override public void onReceive(Object message) throws Exception { forwardMessagesToDriverActors(message); if (message instanceof InitializePositionDetection) { initializeTrackPartMap(((InitializePositionDetection) message).getTrackParts()); initializeBarriers(); trackPartActors.get(1).tell(new TrackPartEnteredMessage(0, trackParts.get(1).getDirection()), getSelf()); startSpeedingUpStraightParts(); } else if (message instanceof VelocityMessage) { handleVelocityMessage((VelocityMessage) message); } else if (message instanceof PrintTrackPositionMessage) { printCurrentPosition(((PrintTrackPositionMessage) message).getCurrentTrackPartId()); } else if (message instanceof LostPositionMessage) { handleLostPosition((LostPositionMessage) message); } else if (message instanceof SpeedupFinishedMessage) { speedupStrategy.speedupFinished(((SpeedupFinishedMessage) message).getTrackPart()); } } public void speedupTrackPartById(int trackPartId) { trackPartActors.get(trackPartId).tell(new SpeedupMessage(true), getSelf()); } public void speedupStrategyFinishedPhase() { if (!straightSpeedupFinished) { straightSpeedupFinished = true; startSpeedingUpCurveParts(); } else if (!curveSpeedupFinished) { curveSpeedupFinished = true; } } private void handleLostPosition(LostPositionMessage message) { lostPosition = true; this.whitespacePilot.tell(message, getSender()); if (!isDetectedDirectionPartOfTrackPart(message.getDetectedDirection())) { restart(); LOGGER.warn("The direction '" + message.getDetectedDirection() + "' is not part of our pattern. :-/ Start over..."); } if (doWeHaveTooManyLosts(message)) { restart(); LOGGER.warn("We have to many losts :-/ Start over..."); } } private boolean doWeHaveTooManyLosts(LostPositionMessage message) { lostMessages.add(message); long currentTimeStamp = message.getTimeStamp(); long currentMinus10SecsTimeStamp = currentTimeStamp - 10000; List<LostPositionMessage> lostsInLast10Seconds = lostMessages.stream().filter(l -> l.getTimeStamp() > currentMinus10SecsTimeStamp && l.getTimeStamp() <= currentTimeStamp) .collect(Collectors.toList()); return lostsInLast10Seconds.size() > MAX_LOSTS_WITHIN_10_SECS; } private void restart() { for (ActorRef actor : trackPartActors.values()) { getContext().stop(actor); } whitespacePilot.tell(new RestartWithTrackRecognitionMessage(), getSelf()); } private boolean isDetectedDirectionPartOfTrackPart(Direction detectedDirection) { for (TrackPart trackPart : trackParts.values()) { if (trackPart.getDirection() == detectedDirection) return true; } return false; } private void startSpeedingUpStraightParts() { this.straights = TrackPartUtil.getStraightPartsByDuration(trackParts.values()); speedupStrategy = new SpeedupOrderStrategyFactory(straights, this).createStrategy(SPEEDUP_STRATEGY); speedupStrategy.startSpeedup(); } private void startSpeedingUpCurveParts() { this.curves = TrackPartUtil.getCurveParts(trackParts.values()); speedupStrategy = new SpeedupOrderStrategyFactory(curves, this).createStrategy(SPEEDUP_STRATEGY); speedupStrategy.startSpeedup(); } private void forwardMessagesToDriverActors(Object message) { if (!MessageUtil.isMessageForwardNeeded(message, new Class[] { DirectionChangedMessage.class, PenaltyMessage.class })) return; for (ActorRef actor : trackPartActors.values()) { actor.tell(message, getSender()); } } private void handleVelocityMessage(VelocityMessage message) { int trackPartId = barrierIndexToTrackPartMap.get(lastBarrierIndex).getId(); LOGGER.info("Passed barrier in trackpart '" + trackPartId + "'"); if (lostPosition) correctPositionWithLightBarrier(trackPartId); incrementBarrierIndex(); } private void correctPositionWithLightBarrier(int currentTrackPartId) { trackPartActors.get(currentTrackPartId).tell(new TrackPartEnteredMessage(0, trackParts.get(currentTrackPartId).getDirection(), true), getSelf()); lostPosition = false; } private void incrementBarrierIndex() { if (lastBarrierIndex == (barriers.size() - 1)) { lastBarrierIndex = 0; } else { lastBarrierIndex++; } } private void printCurrentPosition(int currentTrackPartId) { LOGGER.info("Position: " + getCurrentPositionString(currentTrackPartId)); } private String getCurrentPositionString(int currentTrackPartId) { StringBuffer sb = new StringBuffer(); sb.append("-"); for (int i = 1; i <= trackParts.size(); i++) { if (i == currentTrackPartId) sb.append((char) 27 + "[35m"); sb.append(trackParts.get(i).getDirection().toShortString()); sb.append("(" + i + ")"); if (i == currentTrackPartId) sb.append((char) 27 + "[0m"); sb.append("-"); } return sb.toString(); } private void initializeTrackPartMap(List<TrackPart> trackParts) { int idCounter = 1; for (TrackPart trackPart : trackParts) { trackPart.setId(idCounter); this.trackParts.put(trackPart.getId(), trackPart); createTrackPartActor(idCounter, trackPart); idCounter++; } initializeTrackPartActorList(); } private void initializeTrackPartActorList() { for (int i = 1; i <= trackPartActors.size(); i++) { int previousId = i - 1; int nextId = i + 1; if (i == 1) { previousId = trackPartActors.size(); } else if (i == trackPartActors.size()) { nextId = 1; } LOGGER.info("Chain trackpart-actor with id '" + i + "': previous='" + previousId + "', next='" + nextId + "'"); trackPartActors.get(i).tell( new ChainTrackPartActorsMessage(trackParts.get(previousId), trackPartActors.get(previousId), trackParts.get(nextId), trackPartActors.get(nextId)), getSelf()); } } private void createTrackPartActor(int idCounter, TrackPart trackPart) { ActorRef actor = getContext().actorOf(Props.create(AbstractTrackPartDrivingActor.getDrivingActorClass(trackPart.getDirection()), whitespacePilot, trackPart, initialPower)); trackPartActors.put(idCounter, actor); } private void initializeBarriers() { barriers = new ArrayList<>(); barrierIndexToTrackPartMap = new TreeMap<>(); int index = 0; for (TrackPart trackPart : trackParts.values()) { for (VelocityBarrier barrier : trackPart.getVelocityBarriers()) { barriers.add(barrier); barrierIndexToTrackPartMap.put(index, trackPart); index++; } } } }
package liquibase.servicelocator; import liquibase.logging.Logger; import liquibase.logging.core.DefaultLogger; import liquibase.util.StringUtils; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Modifier; import java.net.*; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarInputStream; /** * Default implement of {@link PackageScanClassResolver} */ public class DefaultPackageScanClassResolver implements PackageScanClassResolver { protected final transient Logger log = new DefaultLogger(); private Set<ClassLoader> classLoaders; private Set<PackageScanFilter> scanFilters; private Map<String, Set<Class>> allClassesByPackage = new HashMap<String, Set<Class>>(); private Set<String> loadedPackages = new HashSet<String>(); @Override public void addClassLoader(ClassLoader classLoader) { try { getClassLoaders().add(classLoader); } catch (UnsupportedOperationException ex) { // Ignore this exception as the PackageScanClassResolver // don't want use any other classloader } } @Override public void addFilter(PackageScanFilter filter) { if (scanFilters == null) { scanFilters = new LinkedHashSet<PackageScanFilter>(); } scanFilters.add(filter); } @Override public void removeFilter(PackageScanFilter filter) { if (scanFilters != null) { scanFilters.remove(filter); } } @Override public Set<ClassLoader> getClassLoaders() { if (classLoaders == null) { classLoaders = new HashSet<ClassLoader>(); ClassLoader ccl = Thread.currentThread().getContextClassLoader(); if (ccl != null) { log.debug("The thread context class loader: " + ccl + " is used to load the class"); classLoaders.add(ccl); } classLoaders.add(DefaultPackageScanClassResolver.class.getClassLoader()); } return classLoaders; } @Override public void setClassLoaders(Set<ClassLoader> classLoaders) { this.classLoaders = classLoaders; } @Override @SuppressWarnings("unchecked") public Set<Class<?>> findImplementations(Class parent, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } log.debug("Searching for implementations of " + parent.getName() + " in packages: " + Arrays.asList(packageNames)); PackageScanFilter test = getCompositeFilter(new AssignableToPackageScanFilter(parent)); Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(test, pkg, classes); } log.debug("Found: " + classes); return classes; } @Override @SuppressWarnings("unchecked") public Set<Class<?>> findByFilter(PackageScanFilter filter, String... packageNames) { if (packageNames == null) { return Collections.EMPTY_SET; } Set<Class<?>> classes = new LinkedHashSet<Class<?>>(); for (String pkg : packageNames) { find(filter, pkg, classes); } log.debug("Found: " + classes); return classes; } protected void find(PackageScanFilter test, String packageName, Set<Class<?>> classes) { packageName = packageName.replace('.', '/'); Set<ClassLoader> set = getClassLoaders(); if (!loadedPackages.contains(packageName)) { for (ClassLoader classLoader : set) { this.findAllClasses(packageName, classLoader); } loadedPackages.add(packageName); } findInAllClasses(test, packageName, classes); } protected void findAllClasses(String packageName, ClassLoader loader) { log.debug("Searching for all classes in package: " + packageName + " using classloader: " + loader.getClass().getName()); Enumeration<URL> urls; try { urls = getResources(loader, packageName); if (!urls.hasMoreElements()) { log.debug("No URLs returned by classloader"); } } catch (IOException ioe) { log.warning("Cannot read package: " + packageName, ioe); return; } while (urls.hasMoreElements()) { URL url = null; try { url = urls.nextElement(); log.debug("URL from classloader: " + url); url = customResourceLocator(url); String urlPath = url.getFile(); String host = null; urlPath = URLDecoder.decode(urlPath, "UTF-8"); if (url.getProtocol().equals("vfs") && !urlPath.startsWith("vfs")) { urlPath = "vfs:"+urlPath; } if (url.getProtocol().equals("vfszip") && !urlPath.startsWith("vfszip")) { urlPath = "vfszip:"+urlPath; } log.debug("Decoded urlPath: " + urlPath + " with protocol: " + url.getProtocol()); // If it's a file in a directory, trim the stupid file: spec if (urlPath.startsWith("file:")) { // file path can be temporary folder which uses characters that the URLDecoder decodes wrong // for example + being decoded to something else (+ can be used in temp folders on Mac OS) // to remedy this then create new path without using the URLDecoder try { URI uri = new URI(url.getFile()); host = uri.getHost(); urlPath = uri.getPath(); } catch (URISyntaxException e) { // fallback to use as it was given from the URLDecoder // this allows us to work on Windows if users have spaces in paths } if (urlPath.startsWith("file:")) { urlPath = urlPath.substring(5); } } // osgi bundles should be skipped if (url.toString().startsWith("bundle:") || urlPath.startsWith("bundle:")) { log.debug("It's a virtual osgi bundle, skipping"); continue; } // Else it's in a JAR, grab the path to the jar if (urlPath.contains(".jar/") && !urlPath.contains(".jar!/")) { urlPath = urlPath.replace(".jar/", ".jar!/"); } if (urlPath.indexOf('!') > 0) { urlPath = urlPath.substring(0, urlPath.indexOf('!')); } // If a host component was given prepend it to the decoded path. // This still has its problems as we silently skip user and password // information etc. but it fixes UNC urls on windows. if (host != null) { if (urlPath.startsWith("/")) { urlPath = "//" + host + urlPath; } else { urlPath = "//" + host + "/" + urlPath; } } File file = new File(urlPath); if (file.isDirectory()) { log.debug("Loading from directory using file: " + file); loadImplementationsInDirectory(packageName, file, loader); } else { InputStream stream; if (urlPath.startsWith("http:") || urlPath.startsWith("https:") || urlPath.startsWith("sonicfs:") || urlPath.startsWith("vfs:") || urlPath.startsWith("vfszip:")) { // load resources using http/https // sonic ESB requires to be loaded using a regular URLConnection URL urlStream = new URL(urlPath); log.debug("Loading from jar using "+urlStream.getProtocol()+": " + urlPath); URLConnection con = urlStream.openConnection(); // disable cache mainly to avoid jar file locking on Windows con.setUseCaches(false); stream = con.getInputStream(); } else { log.debug("Loading from jar using file: " + file); stream = new FileInputStream(file); } try { loadImplementationsInJar(packageName, stream, loader); } catch (IOException ioe) { log.warning("Cannot search jar file '" + urlPath + "' for classes due to an IOException: " + ioe.getMessage(), ioe); } finally { stream.close(); } } } catch (IOException e) { // use debug logging to avoid being to noisy in logs log.debug("Cannot read entries in url: " + url, e); } } } protected void findInAllClasses(PackageScanFilter test, String packageName, Set<Class<?>> classes) { log.debug("Searching for: " + test + " in package: " + packageName ); Set<Class> packageClasses = this.allClassesByPackage.get(packageName); if (packageClasses == null) { log.debug("No classes found in package: " + packageName ); return; } for (Class type : packageClasses) { if (test.matches(type)) { classes.add(type); } } } // We can override this method to support the custom ResourceLocator protected URL customResourceLocator(URL url) throws IOException { // Do nothing here return url; } /** * Strategy to get the resources by the given classloader. * <p/> * Notice that in WebSphere platforms there is a {@link WebSpherePackageScanClassResolver} * to take care of WebSphere's odditiy of resource loading. * * @param loader the classloader * @param packageName the packagename for the package to load * @return URL's for the given package * @throws IOException is thrown by the classloader */ protected Enumeration<URL> getResources(ClassLoader loader, String packageName) throws IOException { log.debug("Getting resource URL for package: " + packageName + " with classloader: " + loader); // If the URL is a jar, the URLClassloader.getResources() seems to require a trailing slash. The // trailing slash is harmless for other URLs if (!packageName.endsWith("/")) { packageName = packageName + "/"; } return loader.getResources(packageName); } private PackageScanFilter getCompositeFilter(PackageScanFilter filter) { if (scanFilters != null) { CompositePackageScanFilter composite = new CompositePackageScanFilter(scanFilters); composite.addFilter(filter); return composite; } return filter; } /** * Finds matches in a physical directory on a filesystem. Examines all files * within a directory - if the File object is not a directory, and ends with * <i>.class</i> the file is loaded. Operates recursively to find classes within a * folder structure matching the package structure. * * @param parent the package name up to this directory in the package * hierarchy. E.g. if /classes is in the classpath and we wish to * examine files in /classes/org/apache then the values of * <i>parent</i> would be <i>org/apache</i> * @param location a File object representing a directory */ private void loadImplementationsInDirectory(String parent, File location, ClassLoader classLoader) { File[] files = location.listFiles(); StringBuilder builder = null; for (File file : files) { builder = new StringBuilder(100); String name = file.getName(); if (name != null) { name = name.trim(); builder.append(parent).append("/").append(name); String packageOrClass = parent == null ? name : builder.toString(); if (file.isDirectory()) { loadImplementationsInDirectory(packageOrClass, file, classLoader); } else if (name.endsWith(".class")) { this.loadClass(packageOrClass, classLoader); } } } } private void loadClass(String className, ClassLoader classLoader) { try { String externalName = className.substring(0, className.indexOf('.')).replace('/', '.'); Class<?> type = classLoader.loadClass(externalName); log.debug("Loaded the class: " + type + " in classloader: " + classLoader); if (Modifier.isAbstract(type.getModifiers()) || Modifier.isInterface(type.getModifiers())) { return; } String packageName = type.getPackage().getName(); List<String> packageNameParts = Arrays.asList(packageName.split("\\.")); for (int i=0; i<packageNameParts.size(); i++) { String thisPackage = StringUtils.join(packageNameParts.subList(0, i+1), "/"); if (!this.allClassesByPackage.containsKey(thisPackage)) { this.allClassesByPackage.put(thisPackage, new HashSet<Class>()); } this.allClassesByPackage.get(thisPackage).add(type); } } catch (ClassNotFoundException e) { log.debug("Cannot find class '" + className + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (NoClassDefFoundError e) { log.debug("Cannot find the class definition '" + className + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (LinkageError e) { log.debug("Cannot find the class definition '" + className + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (Throwable e) { log.severe("Cannot load class '"+className+"' in classloader: "+classLoader+". Reason: "+e, e); } } /** * Finds matching classes within a jar files that contains a folder * structure matching the package structure. If the File is not a JarFile or * does not exist a warning will be logged, but no error will be raised. * * @param parent the parent package under which classes must be in order to * be considered * @param stream the inputstream of the jar file to be examined for classes */ protected void loadImplementationsInJar(String parent, InputStream stream, ClassLoader loader) throws IOException { JarInputStream jarStream = null; if (stream instanceof JarInputStream) { jarStream = (JarInputStream) stream; } else { jarStream = new JarInputStream(stream); } JarEntry entry; while ((entry = jarStream.getNextJarEntry()) != null) { String name = entry.getName(); if (name != null) { name = name.trim(); if (!entry.isDirectory() && name.endsWith(".class")) { loadClass(name, loader); } } } } /** * Add the class designated by the fully qualified class name provided to * the set of resolved classes if and only if it is approved by the Test * supplied. * * @param test the test used to determine if the class matches * @param fqn the fully qualified name of a class */ protected void addIfMatching(PackageScanFilter test, String fqn, Set<Class<?>> classes) { try { String externalName = fqn.substring(0, fqn.indexOf('.')).replace('/', '.'); Set<ClassLoader> set = getClassLoaders(); boolean found = false; for (ClassLoader classLoader : set) { log.debug("Testing that class " + externalName + " matches criteria [" + test + "] using classloader:" + classLoader); try { Class<?> type = classLoader.loadClass(externalName); log.debug("Loaded the class: " + type + " in classloader: " + classLoader); if (test.matches(type)) { log.debug("Found class: " + type + " which matches the filter in classloader: " + classLoader); classes.add(type); } found = true; break; } catch (ClassNotFoundException e) { log.debug("Cannot find class '" + fqn + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (NoClassDefFoundError e) { log.debug("Cannot find the class definition '" + fqn + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (LinkageError e) { log.debug("Cannot find the class definition '" + fqn + "' in classloader: " + classLoader + ". Reason: " + e, e); } catch (Throwable e) { log.severe("Cannot load class '"+fqn+"' in classloader: "+classLoader+". Reason: "+e, e); } } if (!found) { // use debug to avoid being noisy in logs log.debug("Cannot find class '" + fqn + "' in any classloaders: " + set); } } catch (Exception e) { log.warning("Cannot examine class '" + fqn + "' due to a " + e.getClass().getName() + " with message: " + e.getMessage(), e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.crypto; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.EnumSet; import java.util.Random; import org.apache.hadoop.fs.ByteBufferReadable; import org.apache.hadoop.fs.CanUnbuffer; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.HasEnhancedByteBufferAccess; import org.apache.hadoop.fs.PositionedReadable; import org.apache.hadoop.fs.ReadOption; import org.apache.hadoop.fs.Seekable; import org.apache.hadoop.fs.Syncable; import org.apache.hadoop.io.ByteBufferPool; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.RandomDatum; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class CryptoStreamsTestBase { protected static final Logger LOG = LoggerFactory.getLogger( CryptoStreamsTestBase.class); protected static CryptoCodec codec; protected static final byte[] key = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16}; protected static final byte[] iv = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08}; protected static final int count = 10000; protected static int defaultBufferSize = 8192; protected static int smallBufferSize = 1024; private byte[] data; private int dataLen; @Before public void setUp() throws IOException { // Generate data final int seed = new Random().nextInt(); final DataOutputBuffer dataBuf = new DataOutputBuffer(); final RandomDatum.Generator generator = new RandomDatum.Generator(seed); for(int i = 0; i < count; ++i) { generator.next(); final RandomDatum key = generator.getKey(); final RandomDatum value = generator.getValue(); key.write(dataBuf); value.write(dataBuf); } LOG.info("Generated " + count + " records"); data = dataBuf.getData(); dataLen = dataBuf.getLength(); } protected void writeData(OutputStream out) throws Exception { out.write(data, 0, dataLen); out.close(); } protected int getDataLen() { return dataLen; } private int readAll(InputStream in, byte[] b, int off, int len) throws IOException { int n = 0; int total = 0; while (n != -1) { total += n; if (total >= len) { break; } n = in.read(b, off + total, len - total); } return total; } private int preadAll(PositionedReadable in, byte[] b, int off, int len) throws IOException { int n = 0; int total = 0; while (n != -1) { total += n; if (total >= len) { break; } n = in.read(total, b, off + total, len - total); } return total; } private void preadCheck(PositionedReadable in) throws Exception { byte[] result = new byte[dataLen]; int n = preadAll(in, result, 0, dataLen); Assert.assertEquals(dataLen, n); byte[] expectedData = new byte[n]; System.arraycopy(data, 0, expectedData, 0, n); Assert.assertArrayEquals(result, expectedData); } protected OutputStream getOutputStream(int bufferSize) throws IOException { return getOutputStream(bufferSize, key, iv); } protected abstract OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv) throws IOException; protected InputStream getInputStream(int bufferSize) throws IOException { return getInputStream(bufferSize, key, iv); } protected abstract InputStream getInputStream(int bufferSize, byte[] key, byte[] iv) throws IOException; /** Test crypto reading with different buffer size. */ @Test(timeout=120000) public void testRead() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); // Default buffer size InputStream in = getInputStream(defaultBufferSize); readCheck(in); in.close(); // Small buffer size in = getInputStream(smallBufferSize); readCheck(in); in.close(); } private void readCheck(InputStream in) throws Exception { byte[] result = new byte[dataLen]; int n = readAll(in, result, 0, dataLen); Assert.assertEquals(dataLen, n); byte[] expectedData = new byte[n]; System.arraycopy(data, 0, expectedData, 0, n); Assert.assertArrayEquals(result, expectedData); // EOF n = in.read(result, 0, dataLen); Assert.assertEquals(n, -1); } /** Test crypto writing with different buffer size. */ @Test(timeout = 120000) public void testWrite() throws Exception { // Default buffer size writeCheck(defaultBufferSize); // Small buffer size writeCheck(smallBufferSize); } private void writeCheck(int bufferSize) throws Exception { OutputStream out = getOutputStream(bufferSize); writeData(out); if (out instanceof FSDataOutputStream) { Assert.assertEquals(((FSDataOutputStream) out).getPos(), getDataLen()); } } /** Test crypto with different IV. */ @Test(timeout=120000) public void testCryptoIV() throws Exception { byte[] iv1 = iv.clone(); // Counter base: Long.MAX_VALUE setCounterBaseForIV(iv1, Long.MAX_VALUE); cryptoCheck(iv1); // Counter base: Long.MAX_VALUE - 1 setCounterBaseForIV(iv1, Long.MAX_VALUE - 1); cryptoCheck(iv1); // Counter base: Integer.MAX_VALUE setCounterBaseForIV(iv1, Integer.MAX_VALUE); cryptoCheck(iv1); // Counter base: 0 setCounterBaseForIV(iv1, 0); cryptoCheck(iv1); // Counter base: -1 setCounterBaseForIV(iv1, -1); cryptoCheck(iv1); } private void cryptoCheck(byte[] iv) throws Exception { OutputStream out = getOutputStream(defaultBufferSize, key, iv); writeData(out); InputStream in = getInputStream(defaultBufferSize, key, iv); readCheck(in); in.close(); } private void setCounterBaseForIV(byte[] iv, long counterBase) { ByteBuffer buf = ByteBuffer.wrap(iv); buf.order(ByteOrder.BIG_ENDIAN); buf.putLong(iv.length - 8, counterBase); } /** * Test hflush/hsync of crypto output stream, and with different buffer size. */ @Test(timeout=120000) public void testSyncable() throws IOException { syncableCheck(); } private void syncableCheck() throws IOException { OutputStream out = getOutputStream(smallBufferSize); try { int bytesWritten = dataLen / 3; out.write(data, 0, bytesWritten); ((Syncable) out).hflush(); InputStream in = getInputStream(defaultBufferSize); verify(in, bytesWritten, data); in.close(); out.write(data, bytesWritten, dataLen - bytesWritten); ((Syncable) out).hsync(); in = getInputStream(defaultBufferSize); verify(in, dataLen, data); in.close(); } finally { out.close(); } } private void verify(InputStream in, int bytesToVerify, byte[] expectedBytes) throws IOException { final byte[] readBuf = new byte[bytesToVerify]; readAll(in, readBuf, 0, bytesToVerify); for (int i = 0; i < bytesToVerify; i++) { Assert.assertEquals(expectedBytes[i], readBuf[i]); } } private int readAll(InputStream in, long pos, byte[] b, int off, int len) throws IOException { int n = 0; int total = 0; while (n != -1) { total += n; if (total >= len) { break; } n = ((PositionedReadable) in).read(pos + total, b, off + total, len - total); } return total; } /** Test positioned read. */ @Test(timeout=120000) public void testPositionedRead() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); InputStream in = getInputStream(defaultBufferSize); // Pos: 1/3 dataLen positionedReadCheck(in , dataLen / 3); // Pos: 1/2 dataLen positionedReadCheck(in, dataLen / 2); in.close(); } private void positionedReadCheck(InputStream in, int pos) throws Exception { byte[] result = new byte[dataLen]; int n = readAll(in, pos, result, 0, dataLen); Assert.assertEquals(dataLen, n + pos); byte[] readData = new byte[n]; System.arraycopy(result, 0, readData, 0, n); byte[] expectedData = new byte[n]; System.arraycopy(data, pos, expectedData, 0, n); Assert.assertArrayEquals(readData, expectedData); } /** Test read fully */ @Test(timeout=120000) public void testReadFully() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); InputStream in = getInputStream(defaultBufferSize); final int len1 = dataLen / 4; // Read len1 bytes byte[] readData = new byte[len1]; readAll(in, readData, 0, len1); byte[] expectedData = new byte[len1]; System.arraycopy(data, 0, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); // Pos: 1/3 dataLen readFullyCheck(in, dataLen / 3); // Read len1 bytes readData = new byte[len1]; readAll(in, readData, 0, len1); expectedData = new byte[len1]; System.arraycopy(data, len1, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); // Pos: 1/2 dataLen readFullyCheck(in, dataLen / 2); // Read len1 bytes readData = new byte[len1]; readAll(in, readData, 0, len1); expectedData = new byte[len1]; System.arraycopy(data, 2 * len1, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); in.close(); } private void readFullyCheck(InputStream in, int pos) throws Exception { byte[] result = new byte[dataLen - pos]; ((PositionedReadable) in).readFully(pos, result); byte[] expectedData = new byte[dataLen - pos]; System.arraycopy(data, pos, expectedData, 0, dataLen - pos); Assert.assertArrayEquals(result, expectedData); result = new byte[dataLen]; // Exceeds maximum length try { ((PositionedReadable) in).readFully(pos, result); Assert.fail("Read fully exceeds maximum length should fail."); } catch (EOFException e) { } } /** Test seek to different position. */ @Test(timeout=120000) public void testSeek() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); InputStream in = getInputStream(defaultBufferSize); // Pos: 1/3 dataLen seekCheck(in, dataLen / 3); // Pos: 0 seekCheck(in, 0); // Pos: 1/2 dataLen seekCheck(in, dataLen / 2); final long pos = ((Seekable) in).getPos(); // Pos: -3 try { seekCheck(in, -3); Assert.fail("Seek to negative offset should fail."); } catch (EOFException e) { GenericTestUtils.assertExceptionContains( FSExceptionMessages.NEGATIVE_SEEK, e); } Assert.assertEquals(pos, ((Seekable) in).getPos()); // Pos: dataLen + 3 try { seekCheck(in, dataLen + 3); Assert.fail("Seek after EOF should fail."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Cannot seek after EOF", e); } Assert.assertEquals(pos, ((Seekable) in).getPos()); in.close(); } private void seekCheck(InputStream in, int pos) throws Exception { byte[] result = new byte[dataLen]; ((Seekable) in).seek(pos); int n = readAll(in, result, 0, dataLen); Assert.assertEquals(dataLen, n + pos); byte[] readData = new byte[n]; System.arraycopy(result, 0, readData, 0, n); byte[] expectedData = new byte[n]; System.arraycopy(data, pos, expectedData, 0, n); Assert.assertArrayEquals(readData, expectedData); } /** Test get position. */ @Test(timeout=120000) public void testGetPos() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); // Default buffer size InputStream in = getInputStream(defaultBufferSize); byte[] result = new byte[dataLen]; int n1 = readAll(in, result, 0, dataLen / 3); Assert.assertEquals(n1, ((Seekable) in).getPos()); int n2 = readAll(in, result, n1, dataLen - n1); Assert.assertEquals(n1 + n2, ((Seekable) in).getPos()); in.close(); } @Test(timeout=120000) public void testAvailable() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); // Default buffer size InputStream in = getInputStream(defaultBufferSize); byte[] result = new byte[dataLen]; int n1 = readAll(in, result, 0, dataLen / 3); Assert.assertEquals(in.available(), dataLen - n1); int n2 = readAll(in, result, n1, dataLen - n1); Assert.assertEquals(in.available(), dataLen - n1 - n2); in.close(); } /** Test skip. */ @Test(timeout=120000) public void testSkip() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); // Default buffer size InputStream in = getInputStream(defaultBufferSize); byte[] result = new byte[dataLen]; int n1 = readAll(in, result, 0, dataLen / 3); Assert.assertEquals(n1, ((Seekable) in).getPos()); long skipped = in.skip(dataLen / 3); int n2 = readAll(in, result, 0, dataLen); Assert.assertEquals(dataLen, n1 + skipped + n2); byte[] readData = new byte[n2]; System.arraycopy(result, 0, readData, 0, n2); byte[] expectedData = new byte[n2]; System.arraycopy(data, dataLen - n2, expectedData, 0, n2); Assert.assertArrayEquals(readData, expectedData); try { skipped = in.skip(-3); Assert.fail("Skip Negative length should fail."); } catch (IllegalArgumentException e) { GenericTestUtils.assertExceptionContains("Negative skip length", e); } // Skip after EOF skipped = in.skip(3); Assert.assertEquals(skipped, 0); in.close(); } private void byteBufferReadCheck(InputStream in, ByteBuffer buf, int bufPos) throws Exception { buf.position(bufPos); int n = ((ByteBufferReadable) in).read(buf); Assert.assertEquals(bufPos + n, buf.position()); byte[] readData = new byte[n]; buf.rewind(); buf.position(bufPos); buf.get(readData); byte[] expectedData = new byte[n]; System.arraycopy(data, 0, expectedData, 0, n); Assert.assertArrayEquals(readData, expectedData); } /** Test byte buffer read with different buffer size. */ @Test(timeout=120000) public void testByteBufferRead() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); // Default buffer size, initial buffer position is 0 InputStream in = getInputStream(defaultBufferSize); ByteBuffer buf = ByteBuffer.allocate(dataLen + 100); byteBufferReadCheck(in, buf, 0); in.close(); // Default buffer size, initial buffer position is not 0 in = getInputStream(defaultBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 11); in.close(); // Small buffer size, initial buffer position is 0 in = getInputStream(smallBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 0); in.close(); // Small buffer size, initial buffer position is not 0 in = getInputStream(smallBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 11); in.close(); // Direct buffer, default buffer size, initial buffer position is 0 in = getInputStream(defaultBufferSize); buf = ByteBuffer.allocateDirect(dataLen + 100); byteBufferReadCheck(in, buf, 0); in.close(); // Direct buffer, default buffer size, initial buffer position is not 0 in = getInputStream(defaultBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 11); in.close(); // Direct buffer, small buffer size, initial buffer position is 0 in = getInputStream(smallBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 0); in.close(); // Direct buffer, small buffer size, initial buffer position is not 0 in = getInputStream(smallBufferSize); buf.clear(); byteBufferReadCheck(in, buf, 11); in.close(); } @Test(timeout=120000) public void testCombinedOp() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); final int len1 = dataLen / 8; final int len2 = dataLen / 10; InputStream in = getInputStream(defaultBufferSize); // Read len1 data. byte[] readData = new byte[len1]; readAll(in, readData, 0, len1); byte[] expectedData = new byte[len1]; System.arraycopy(data, 0, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); long pos = ((Seekable) in).getPos(); Assert.assertEquals(len1, pos); // Seek forward len2 ((Seekable) in).seek(pos + len2); // Skip forward len2 long n = in.skip(len2); Assert.assertEquals(len2, n); // Pos: 1/4 dataLen positionedReadCheck(in , dataLen / 4); // Pos should be len1 + len2 + len2 pos = ((Seekable) in).getPos(); Assert.assertEquals(len1 + len2 + len2, pos); // Read forward len1 ByteBuffer buf = ByteBuffer.allocate(len1); int nRead = ((ByteBufferReadable) in).read(buf); Assert.assertEquals(nRead, buf.position()); readData = new byte[nRead]; buf.rewind(); buf.get(readData); expectedData = new byte[nRead]; System.arraycopy(data, (int)pos, expectedData, 0, nRead); Assert.assertArrayEquals(readData, expectedData); long lastPos = pos; // Pos should be lastPos + nRead pos = ((Seekable) in).getPos(); Assert.assertEquals(lastPos + nRead, pos); // Pos: 1/3 dataLen positionedReadCheck(in , dataLen / 3); // Read forward len1 readData = new byte[len1]; readAll(in, readData, 0, len1); expectedData = new byte[len1]; System.arraycopy(data, (int)pos, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); lastPos = pos; // Pos should be lastPos + len1 pos = ((Seekable) in).getPos(); Assert.assertEquals(lastPos + len1, pos); // Read forward len1 buf = ByteBuffer.allocate(len1); nRead = ((ByteBufferReadable) in).read(buf); Assert.assertEquals(nRead, buf.position()); readData = new byte[nRead]; buf.rewind(); buf.get(readData); expectedData = new byte[nRead]; System.arraycopy(data, (int)pos, expectedData, 0, nRead); Assert.assertArrayEquals(readData, expectedData); lastPos = pos; // Pos should be lastPos + nRead pos = ((Seekable) in).getPos(); Assert.assertEquals(lastPos + nRead, pos); // ByteBuffer read after EOF ((Seekable) in).seek(dataLen); buf.clear(); n = ((ByteBufferReadable) in).read(buf); Assert.assertEquals(n, -1); in.close(); } @Test(timeout=120000) public void testSeekToNewSource() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); InputStream in = getInputStream(defaultBufferSize); final int len1 = dataLen / 8; byte[] readData = new byte[len1]; readAll(in, readData, 0, len1); // Pos: 1/3 dataLen seekToNewSourceCheck(in, dataLen / 3); // Pos: 0 seekToNewSourceCheck(in, 0); // Pos: 1/2 dataLen seekToNewSourceCheck(in, dataLen / 2); // Pos: -3 try { seekToNewSourceCheck(in, -3); Assert.fail("Seek to negative offset should fail."); } catch (IllegalArgumentException e) { GenericTestUtils.assertExceptionContains("Cannot seek to negative " + "offset", e); } // Pos: dataLen + 3 try { seekToNewSourceCheck(in, dataLen + 3); Assert.fail("Seek after EOF should fail."); } catch (IOException e) { GenericTestUtils.assertExceptionContains("Attempted to read past " + "end of file", e); } in.close(); } private void seekToNewSourceCheck(InputStream in, int targetPos) throws Exception { byte[] result = new byte[dataLen]; ((Seekable) in).seekToNewSource(targetPos); int n = readAll(in, result, 0, dataLen); Assert.assertEquals(dataLen, n + targetPos); byte[] readData = new byte[n]; System.arraycopy(result, 0, readData, 0, n); byte[] expectedData = new byte[n]; System.arraycopy(data, targetPos, expectedData, 0, n); Assert.assertArrayEquals(readData, expectedData); } private ByteBufferPool getBufferPool() { return new ByteBufferPool() { @Override public ByteBuffer getBuffer(boolean direct, int length) { return ByteBuffer.allocateDirect(length); } @Override public void putBuffer(ByteBuffer buffer) { } }; } @Test(timeout=120000) public void testHasEnhancedByteBufferAccess() throws Exception { OutputStream out = getOutputStream(defaultBufferSize); writeData(out); InputStream in = getInputStream(defaultBufferSize); final int len1 = dataLen / 8; // ByteBuffer size is len1 ByteBuffer buffer = ((HasEnhancedByteBufferAccess) in).read( getBufferPool(), len1, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); int n1 = buffer.remaining(); byte[] readData = new byte[n1]; buffer.get(readData); byte[] expectedData = new byte[n1]; System.arraycopy(data, 0, expectedData, 0, n1); Assert.assertArrayEquals(readData, expectedData); ((HasEnhancedByteBufferAccess) in).releaseBuffer(buffer); // Read len1 bytes readData = new byte[len1]; readAll(in, readData, 0, len1); expectedData = new byte[len1]; System.arraycopy(data, n1, expectedData, 0, len1); Assert.assertArrayEquals(readData, expectedData); // ByteBuffer size is len1 buffer = ((HasEnhancedByteBufferAccess) in).read( getBufferPool(), len1, EnumSet.of(ReadOption.SKIP_CHECKSUMS)); int n2 = buffer.remaining(); readData = new byte[n2]; buffer.get(readData); expectedData = new byte[n2]; System.arraycopy(data, n1 + len1, expectedData, 0, n2); Assert.assertArrayEquals(readData, expectedData); ((HasEnhancedByteBufferAccess) in).releaseBuffer(buffer); in.close(); } /** Test unbuffer. */ @Test(timeout=120000) public void testUnbuffer() throws Exception { OutputStream out = getOutputStream(smallBufferSize); writeData(out); // Test buffered read try (InputStream in = getInputStream(smallBufferSize)) { // Test unbuffer after buffered read readCheck(in); ((CanUnbuffer) in).unbuffer(); if (in instanceof Seekable) { // Test buffered read again after unbuffer // Must seek to the beginning first ((Seekable) in).seek(0); readCheck(in); } // Test close after unbuffer ((CanUnbuffer) in).unbuffer(); // The close will be called when exiting this try-with-resource block } // Test pread try (InputStream in = getInputStream(smallBufferSize)) { if (in instanceof PositionedReadable) { PositionedReadable pin = (PositionedReadable) in; // Test unbuffer after pread preadCheck(pin); ((CanUnbuffer) in).unbuffer(); // Test pread again after unbuffer preadCheck(pin); // Test close after unbuffer ((CanUnbuffer) in).unbuffer(); // The close will be called when exiting this try-with-resource block } } } }
package mat.server.service.impl; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.sql.Timestamp; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.zip.ZipException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.tools.zip.ZipOutputStream; import org.exolab.castor.mapping.MappingException; import org.exolab.castor.xml.MarshalException; import org.exolab.castor.xml.ValidationException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.stereotype.Service; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import mat.CQLFormatter; import mat.client.measure.ManageCompositeMeasureDetailModel; import mat.client.measure.ManageMeasureSearchModel.Result; import mat.dao.ListObjectDAO; import mat.dao.QualityDataSetDAO; import mat.dao.clause.CQLLibraryDAO; import mat.dao.clause.CQLLibraryExportDAO; import mat.dao.clause.MeasureDAO; import mat.dao.clause.MeasureExportDAO; import mat.dao.clause.MeasureXMLDAO; import mat.model.ListObject; import mat.model.MatValueSet; import mat.model.QualityDataSetDTO; import mat.model.clause.CQLLibrary; import mat.model.clause.CQLLibraryExport; import mat.model.clause.ComponentMeasure; import mat.model.clause.MeasureExport; import mat.model.clause.MeasureXML; import mat.model.cql.CQLModel; import mat.server.CQLUtilityClass; import mat.server.bonnie.BonnieServiceImpl; import mat.server.bonnie.api.result.BonnieCalculatedResult; import mat.server.export.ExportResult; import mat.server.hqmf.Generator; import mat.server.hqmf.HQMFGeneratorFactory; import mat.server.humanreadable.HumanReadableGenerator; import mat.server.service.MeasurePackageService; import mat.server.service.SimpleEMeasureService; import mat.server.util.CQLUtil; import mat.server.util.CompositeMeasureDetailUtil; import mat.server.util.XmlProcessor; import mat.shared.ConstantMessages; import mat.shared.DateUtility; import mat.shared.FileNameUtility; import mat.shared.SaveUpdateCQLResult; import mat.shared.StringUtility; import mat.shared.bonnie.error.BonnieBadParameterException; import mat.shared.bonnie.error.BonnieDoesNotExistException; import mat.shared.bonnie.error.BonnieNotFoundException; import mat.shared.bonnie.error.BonnieServerException; import mat.shared.bonnie.error.BonnieUnauthorizedException; @Service public class SimpleEMeasureServiceImpl implements SimpleEMeasureService { private static final String conversionFile1 = "xsl/New_HQMF.xsl"; private static final String conversionFile2 = "xsl/mat_narrGen.xsl"; private static final String conversionFileHtml = "xsl/eMeasure.xsl"; private static String userDefinedOID = ConstantMessages.USER_DEFINED_QDM_OID; private static final String XPATH_ELEMENTLOOKUP_QDM = "/measure/elementLookUp/qdm[not(@oid='" + userDefinedOID +"')]"; private static final String XPATH_SUPPLEMENTDATA_ELEMENTREF = "/measure/supplementalDataElements/elementRef/@id"; private static final String XPATH_ALL_GROUPED_ELEMENTREF_ID = "/measure/measureGrouping/group/clause//elementRef[not(@id = preceding:: clause//elementRef/@id)]/@id"; private static final String XPATH_ALL_GROUPED_ATTRIBUTES_UUID = "/measure/measureGrouping/group/clause//attribute[not(@qdmUUID = preceding:: clause//attribute/@qdmUUID)]/@qdmUUID"; private static final String XPATH_ALL_SUBTREE_ELEMENTREF_ID = "/measure/subTreeLookUp/subTree//elementRef[not(@id = preceding:: subTree//elementRef/@id)]/@id"; private static final String XPATH_ALL_SUBTREE_ATTRIBUTES_UUID = "/measure/subTreeLookUp/subTree//attribute[not(@qdmUUID = preceding:: subTree//attribute/@qdmUUID)]/@qdmUUID"; private static final Log LOGGER = LogFactory.getLog(SimpleEMeasureServiceImpl.class); @Autowired private MeasureDAO measureDAO; @Autowired private MeasureXMLDAO measureXMLDAO; @Autowired private MeasureExportDAO measureExportDAO; @Autowired private CQLLibraryDAO cqlLibraryDAO; @Autowired private CQLLibraryExportDAO cqlLibraryExportDAO; @Autowired private ApplicationContext context; @Autowired private QualityDataSetDAO qualityDataSetDAO; @Autowired private ListObjectDAO listObjectDAO; @Autowired private HQMFGeneratorFactory hqmfGeneratoryFactory; @Autowired private BonnieServiceImpl bonnieServiceImpl; @Autowired private CompositeMeasureDetailUtil compositeMeasureDetailUtil; /** MeasureExportDAO. **/ private HSSFWorkbook wkbk = null; @Autowired private HumanReadableGenerator humanReadableGenerator; @Override public final ExportResult exportMeasureIntoSimpleXML(final String measureId, final String xmlString, final List<MatValueSet> matValueSets) throws Exception { ExportResult result = new ExportResult(); DocumentBuilderFactory documentBuilderFactory = XMLUtility.getInstance().buildDocumentBuilderFactory(); DocumentBuilder docBuilder = documentBuilderFactory.newDocumentBuilder(); InputSource oldXmlstream = new InputSource(new StringReader(xmlString)); Document originalDoc = docBuilder.parse(oldXmlstream); javax.xml.xpath.XPath xPath = XPathFactory.newInstance().newXPath(); List<String> qdmRefID = new ArrayList<String>(); List<String> supplRefID = new ArrayList<String>(); List<QualityDataSetDTO> masterRefID = new ArrayList<QualityDataSetDTO>(); transform(originalDoc); NodeList allGroupedElementRefIDs = (NodeList) xPath.evaluate(XPATH_ALL_GROUPED_ELEMENTREF_ID, originalDoc.getDocumentElement(), XPathConstants.NODESET); NodeList allGroupedAttributesUUIDs = (NodeList) xPath.evaluate(XPATH_ALL_GROUPED_ATTRIBUTES_UUID, originalDoc.getDocumentElement(), XPathConstants.NODESET); NodeList allQDMRefIDs = (NodeList) xPath.evaluate(XPATH_ELEMENTLOOKUP_QDM, originalDoc.getDocumentElement(), XPathConstants.NODESET); NodeList allSupplementIDs = (NodeList) xPath.evaluate(XPATH_SUPPLEMENTDATA_ELEMENTREF, originalDoc.getDocumentElement(), XPathConstants.NODESET); NodeList allSubTreeElementRefIDs = (NodeList) xPath.evaluate(XPATH_ALL_SUBTREE_ELEMENTREF_ID, originalDoc.getDocumentElement(), XPathConstants.NODESET); NodeList allSubTreeAttributeIDs = (NodeList) xPath.evaluate(XPATH_ALL_SUBTREE_ATTRIBUTES_UUID, originalDoc.getDocumentElement(), XPathConstants.NODESET); for (int i = 0; i < allQDMRefIDs.getLength(); i++) { Node newNode = allQDMRefIDs.item(i); QualityDataSetDTO dataSetDTO = new QualityDataSetDTO(); dataSetDTO.setId(newNode.getAttributes().getNamedItem("id").getNodeValue().toString()); dataSetDTO.setUuid(newNode.getAttributes().getNamedItem("uuid").getNodeValue().toString()); masterRefID.add(dataSetDTO); } findAndAddDTO(allGroupedElementRefIDs, masterRefID, qdmRefID); findAndAddDTO(allGroupedAttributesUUIDs, masterRefID, qdmRefID); findAndAddDTO(allSubTreeElementRefIDs, masterRefID, qdmRefID); findAndAddDTO(allSubTreeAttributeIDs, masterRefID, qdmRefID); Set<String> uniqueRefIds = new HashSet<String>(qdmRefID); qdmRefID = new ArrayList<String>(uniqueRefIds); findAndAddDTO(allSupplementIDs, masterRefID, supplRefID); wkbk = createEMeasureXLS(measureId, qdmRefID, supplRefID, matValueSets); result.wkbkbarr = getHSSFWorkbookBytes(wkbk); wkbk = null; return result; } private static String transform(Node node) { ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream(); TransformerFactory transformerFactory = XMLUtility.getInstance().buildTransformerFactory(); DOMSource source = new DOMSource(node); StreamResult result = new StreamResult(arrayOutputStream); try { transformerFactory.newTransformer().transform(source, result); } catch (TransformerException e) { e.printStackTrace(); } return arrayOutputStream.toString(); } /** * Find and add dto. * * @param nodeList * - NodeList. * @param masterRefID * - List of QualityDataSetDTO. * @param finalIdList * - List of String. * */ private void findAndAddDTO(final NodeList nodeList, final List<QualityDataSetDTO> masterRefID, final List<String> finalIdList) { for (int i = 0; i < nodeList.getLength(); i++) { Node idNode = nodeList.item(i); String idNodeValue = idNode.getNodeValue(); for (QualityDataSetDTO dataSetDTO : masterRefID) { if (dataSetDTO.getUuid().equalsIgnoreCase(idNodeValue)) { finalIdList.add(dataSetDTO.getId()); } } } } /** * Gets the measure name. * * @param measureId * - String. * @return Measure. * */ private mat.model.clause.Measure getMeasureName(final String measureId) { MeasurePackageService measureService = (MeasurePackageService) context.getBean("measurePackageService"); mat.model.clause.Measure measure = measureService.getById(measureId); return measure; } /* * (non-Javadoc) * * @see mat.server.service.SimpleEMeasureService#getSimpleXML(java.lang.String) */ @Override public final ExportResult getSimpleXML(final String measureId) throws Exception { mat.model.clause.Measure measure = measureDAO.find(measureId); MeasureExport measureExport = getMeasureExport(measureId); if (measureExport == null) { return null; } ExportResult result = new ExportResult(); result.measureName = measure.getaBBRName(); result.export = measureExport.getSimpleXML(); return result; } @Override public final ExportResult getCQLLibraryFile(final String measureId) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); String simpleXML = measureExport.getSimpleXML(); CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(simpleXML); // get the name from the simple xml String xPathName = "/measure/cqlLookUp[1]/library[1]"; XmlProcessor xmlProcessor = new XmlProcessor(simpleXML); Node cqlFileName = xmlProcessor.findNode(xmlProcessor.getOriginalDoc(), xPathName); String cqlFileString = CQLUtilityClass.getCqlString(cqlModel, "").toString(); if (cqlFileString != null && !cqlFileString.isEmpty()) { CQLFormatter formatter = new CQLFormatter(); cqlFileString = formatter.format(cqlFileString); } ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); result.export = cqlFileString; // if the cql file name is blank(before 4.5 measures), then we'll give the file // name as // the measure name. if (cqlFileName == null) { result.setCqlLibraryName(result.measureName); } else { result.setCqlLibraryName(cqlModel.getLibraryName() + "-" + cqlModel.getVersionUsed()); } getIncludedCQLLibs(result, xmlProcessor); return result; } private void getIncludedCQLLibs(ExportResult result, XmlProcessor xmlProcessor) throws XPathExpressionException { String xPathForIncludedLibs = "//allUsedCQLLibs/lib[not( preceding::lib/@id =@id)]"; NodeList includedCQLLibNodes = xmlProcessor.findNodeList(xmlProcessor.getOriginalDoc(), xPathForIncludedLibs); for (int i = 0; i < includedCQLLibNodes.getLength(); i++) { Node libNode = includedCQLLibNodes.item(i); if(!isComposite(libNode)) { String libId = libNode.getAttributes().getNamedItem("id").getNodeValue(); CQLLibrary cqlLibrary = this.cqlLibraryDAO.find(libId); CQLLibraryExport cqlLibraryExport = cqlLibraryExportDAO.findByLibraryId(cqlLibrary.getId()); String includeCqlXMLString = new String(cqlLibrary.getCQLByteArray()); if(cqlLibraryExport == null) { cqlLibraryExport = new CQLLibraryExport(); cqlLibraryExport.setCqlLibrary(cqlLibrary); } if(cqlLibraryExport.getCql() == null) { String cqlFileString = CQLUtilityClass.getCqlString(CQLUtilityClass.getCQLModelFromXML(includeCqlXMLString), ""); try { CQLFormatter formatter = new CQLFormatter(); cqlFileString = formatter.format(cqlFileString); } catch (IOException e) { e.printStackTrace(); } cqlLibraryExport.setCql(cqlFileString); cqlLibraryExportDAO.save(cqlLibraryExport); } ExportResult includeResult = new ExportResult(); includeResult.export = cqlLibraryExport.getCql(); String libName = libNode.getAttributes().getNamedItem("name").getNodeValue(); String libVersion = libNode.getAttributes().getNamedItem("version").getNodeValue(); includeResult.setCqlLibraryName(libName + "-" + libVersion); result.includedCQLExports.add(includeResult); } } } @Override public final ExportResult getJSONFile(final String measureId) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); String measureSimpleXML = measureExport.getSimpleXML(); XmlProcessor xmlProcessor = new XmlProcessor(measureSimpleXML); CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(measureSimpleXML); String cqlFileString = CQLUtilityClass.getCqlString(cqlModel, ""); ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); // if the cqlFile String is blank, don't even parse it. if (!cqlFileString.isEmpty()) { SaveUpdateCQLResult jsonResult = CQLUtil.generateELM(cqlModel, cqlLibraryDAO); result.export = jsonResult.getJsonString(); result.setCqlLibraryName(cqlModel.getLibraryName() + "-" + cqlModel.getVersionUsed()); } else { result.export = ""; result.measureName = measureExport.getMeasure().getaBBRName(); result.setCqlLibraryName(result.measureName); } getIncludedCQLJSONs(result, xmlProcessor); return result; } private void getIncludedCQLJSONs(ExportResult result, XmlProcessor xmlProcessor) throws XPathExpressionException { String xPathForIncludedLibs = "//allUsedCQLLibs/lib[not( preceding::lib/@id =@id)]"; NodeList includedCQLLibNodes = xmlProcessor.findNodeList(xmlProcessor.getOriginalDoc(), xPathForIncludedLibs); for (int i = 0; i < includedCQLLibNodes.getLength(); i++) { Node libNode = includedCQLLibNodes.item(i); if(!isComposite(libNode)) { String libId = libNode.getAttributes().getNamedItem("id").getNodeValue(); CQLLibrary cqlLibrary = this.cqlLibraryDAO.find(libId); CQLLibraryExport cqlLibraryExport = cqlLibraryExportDAO.findByLibraryId(cqlLibrary.getId()); String includeCqlXMLString = new String(cqlLibrary.getCQLByteArray()); CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(includeCqlXMLString); if(cqlLibraryExport == null) { cqlLibraryExport = new CQLLibraryExport(); cqlLibraryExport.setCqlLibrary(cqlLibrary); } if(cqlLibraryExport.getJson() == null) { SaveUpdateCQLResult jsonResult = CQLUtil.generateELM(cqlModel, cqlLibraryDAO); cqlLibraryExport.setJson(jsonResult.getJsonString()); cqlLibraryExportDAO.save(cqlLibraryExport); } ExportResult includeResult = new ExportResult(); includeResult.export = cqlLibraryExport.getJson(); String libName = libNode.getAttributes().getNamedItem("name").getNodeValue(); String libVersion = libNode.getAttributes().getNamedItem("version").getNodeValue(); includeResult.setCqlLibraryName(libName + "-" + libVersion); result.includedCQLExports.add(includeResult); } } } @Override public final ExportResult getELMFile(final String measureId) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); String measureSimpleXML = measureExport.getSimpleXML(); XmlProcessor xmlProcessor = new XmlProcessor(measureSimpleXML); CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(measureSimpleXML); String cqlFileString = CQLUtilityClass.getCqlString(cqlModel, ""); ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); // if the cqlFile String is blank, don't even parse it. if (!cqlFileString.isEmpty()) { SaveUpdateCQLResult elmResult = CQLUtil.generateELM(cqlModel, cqlLibraryDAO); result.export = elmResult.getElmString(); result.setCqlLibraryName(cqlModel.getLibraryName() + "-" + cqlModel.getVersionUsed()); } else { result.export = ""; result.measureName = measureExport.getMeasure().getaBBRName(); result.setCqlLibraryName(result.measureName); } getIncludedCQLELMs(result, xmlProcessor); return result; } private void getIncludedCQLELMs(ExportResult result, XmlProcessor xmlProcessor) throws XPathExpressionException { String xPathForIncludedLibs = "//allUsedCQLLibs/lib[not( preceding::lib/@id =@id)]"; NodeList includedCQLLibNodes = xmlProcessor.findNodeList(xmlProcessor.getOriginalDoc(), xPathForIncludedLibs); for (int i = 0; i < includedCQLLibNodes.getLength(); i++) { Node libNode = includedCQLLibNodes.item(i); if(!isComposite(libNode)) { String libId = libNode.getAttributes().getNamedItem("id").getNodeValue(); CQLLibrary cqlLibrary = this.cqlLibraryDAO.find(libId); CQLLibraryExport cqlLibraryExport = cqlLibraryExportDAO.findByLibraryId(cqlLibrary.getId()); String includeCqlXMLString = new String(cqlLibrary.getCQLByteArray()); CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(includeCqlXMLString); if(cqlLibraryExport == null) { cqlLibraryExport = new CQLLibraryExport(); cqlLibraryExport.setCqlLibrary(cqlLibrary); } if(cqlLibraryExport.getElm() == null) { SaveUpdateCQLResult elmResult = CQLUtil.generateELM(cqlModel, cqlLibraryDAO); cqlLibraryExport.setElm(elmResult.getElmString()); cqlLibraryExportDAO.save(cqlLibraryExport); } ExportResult includeResult = new ExportResult(); includeResult.export = cqlLibraryExport.getElm(); String libName = libNode.getAttributes().getNamedItem("name").getNodeValue(); String libVersion = libNode.getAttributes().getNamedItem("version").getNodeValue(); includeResult.setCqlLibraryName(libName + "-" + libVersion); result.includedCQLExports.add(includeResult); } } } private boolean isComposite(Node libNode) { return libNode.getAttributes().getNamedItem("isComponent") != null && ("true").equals(libNode.getAttributes().getNamedItem("isComponent").getNodeValue()); } /** * *. * * @param measureId * - String. * @return ExportResult. * @throws Exception * - Exception. */ @Override public final ExportResult getHQMFForv3Measure(final String measureId) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); result.export = getHQMFForv3MeasureString(measureId, measureExport); return result; } public final ExportResult createOrGetHQMFForv3Measure(final String measureId) { MeasureExport measureExport = getMeasureExport(measureId); ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); if(measureExport.getHqmf() == null) { measureExport.setHqmf(getHQMFForv3MeasureString(measureId, measureExport)); measureExportDAO.save(measureExport); } result.export = measureExport.getHqmf(); return result; } private String getHQMFForv3MeasureString(final String measureId, final MeasureExport measureExport) { String tempXML = XMLUtility.getInstance().applyXSL(measureExport.getSimpleXML(), XMLUtility.getInstance().getXMLResource(conversionFile1)); String eMeasureXML = XMLUtility.getInstance().applyXSL(tempXML, XMLUtility.getInstance().getXMLResource(conversionFile2)); return eMeasureXML; } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getEMeasureHTML(java.lang.String) */ @Override public final ExportResult getEMeasureHTML(final String measureId) throws Exception { ExportResult result = getHQMFForv3Measure(measureId); String html = emeasureXMLToEmeasureHTML(result.export, getMeasureExport(measureId)); result.export = html; return result; } @Override public final ExportResult createOrGetEMeasureHTML(final String measureId) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); ExportResult result = createOrGetHQMFForv3Measure(measureId); if(measureExport.getHumanReadable() == null) { measureExport.setHumanReadable(createOrGetEmeasureXMLToEmeasureHTML(result.export, getMeasureExport(measureId))); measureExportDAO.save(measureExport); } String html = measureExport.getHumanReadable(); result.export = html; return result; } @Override public final ExportResult getHumanReadable(final String measureId, final String measureVersionNumber) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); String emeasureHTMLStr = getHumanReadableForMeasure(measureId, measureExport.getSimpleXML(), measureVersionNumber, measureExport); ExportResult exportResult = new ExportResult(); exportResult.export = emeasureHTMLStr; exportResult.measureName = measureExport.getMeasure().getaBBRName(); return exportResult; } @Override public final ExportResult createOrGetHumanReadable(final String measureId, final String measureVersionNumber) throws Exception { MeasureExport measureExport = getMeasureExport(measureId); if(measureExport.getHumanReadable() == null) { measureExport.setHumanReadable(getHumanReadableForMeasure(measureId, measureExport.getSimpleXML(), measureVersionNumber, measureExport)); measureExportDAO.save(measureExport); } ExportResult exportResult = new ExportResult(); exportResult.export = measureExport.getHumanReadable(); exportResult.measureName = measureExport.getMeasure().getaBBRName(); return exportResult; } /** * Emeasure xml to emeasure html. * * @param emeasureXMLStr * - String. * @return String. * */ private String createOrGetEmeasureXMLToEmeasureHTML(final String emeasureXMLStr, final MeasureExport measureExport) { if(measureExport.getHumanReadable() == null) { String html = XMLUtility.getInstance().applyXSL(emeasureXMLStr, XMLUtility.getInstance().getXMLResource(conversionFileHtml)); measureExport.setHumanReadable(html); measureExportDAO.save(measureExport); } return measureExport.getHumanReadable(); } private String emeasureXMLToEmeasureHTML(final String emeasureXMLStr, final MeasureExport measureExport) { return XMLUtility.getInstance().applyXSL(emeasureXMLStr, XMLUtility.getInstance().getXMLResource(conversionFileHtml)); } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getHumanReadableForNode(java.lang. * String, java.lang.String) */ @Override public ExportResult getHumanReadableForNode(final String measureId, final String populationSubXML) throws Exception { ExportResult result = new ExportResult(); MeasureXML measureExport = measureXMLDAO.findForMeasure(measureId); String measureXML = measureExport.getMeasureXMLAsString(); String html = humanReadableGenerator.generateHTMLForPopulationOrSubtree(measureId, populationSubXML, measureXML, cqlLibraryDAO); result.export = html; return result; } /** * *. * * @param ctx * - ApplicationContext. */ public void setApplicationContext(final ApplicationContext ctx) { this.context = ctx; } /** * Creates the e measure xls. * * @param measureId * - String. * @param allQDMs * - List. * @param supplementalQDMS * - List. * @param matValueSets * - List. * @return HSSFWorkbook * @throws Exception * - Exception. ** */ public final HSSFWorkbook createEMeasureXLS(final String measureId, final List<String> allQDMs, final List<String> supplementalQDMS, final List<MatValueSet> matValueSets) throws Exception { CodeListXLSGenerator clgen = new CodeListXLSGenerator(); return clgen.getXLS(getMeasureName(measureId), allQDMs, qualityDataSetDAO, listObjectDAO, supplementalQDMS, matValueSets); } /** * Creates the error e measure xls. * * @return HSSFWorkbook * @throws Exception * - Exception. * */ public final HSSFWorkbook createErrorEMeasureXLS() throws Exception { CodeListXLSGenerator clgen = new CodeListXLSGenerator(); return clgen.getErrorXLS(); } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getEMeasureXLS(java.lang.String) */ @Override public final ExportResult getEMeasureXLS(final String measureId) throws Exception { ExportResult result = new ExportResult(); result.measureName = getMeasureName(measureId).getaBBRName(); result.packageDate = DateUtility.convertDateToStringNoTime2(getMeasureName(measureId).getValueSetDate()); MeasureExport me = getMeasureExport(measureId); if (me.getCodeList() == null) { byte[] codes = getHSSFWorkbookBytes(createErrorEMeasureXLS()); me.setCodeListBarr(codes); measureExportDAO.save(me); result.wkbkbarr = codes; } else { result.wkbkbarr = me.getCodeListBarr(); } return result; } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getValueSetXLS(java.lang.String) */ @Override public final ExportResult getValueSetXLS(final String valueSetId) throws Exception { ExportResult result = new ExportResult(); ListObject lo = listObjectDAO.find(valueSetId); ValueSetXLSGenerator vsxg = new ValueSetXLSGenerator(); HSSFWorkbook workBook = null; try { workBook = vsxg.getXLS(valueSetId, lo); } catch (Exception e) { LOGGER.error(e.getMessage()); workBook = vsxg.getErrorXLS(); } result.wkbkbarr = vsxg.getHSSFWorkbookBytes(workBook); result.valueSetName = lo.getName(); result.lastModifiedDate = lo.getLastModified() != null ? DateUtility.convertDateToStringNoTime2(lo.getLastModified()) : null; return result; } /** * Gets the hSSF workbook bytes. * * @param hssfwkbk * - HSSFWorkbook. * @return byte[]. * @throws IOException * - IOException. * */ private byte[] getHSSFWorkbookBytes(final HSSFWorkbook hssfwkbk) throws IOException { CodeListXLSGenerator clgen = new CodeListXLSGenerator(); return clgen.getHSSFWorkbookBytes(hssfwkbk); } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getEMeasureZIP(java.lang.String) */ @Override public final ExportResult getEMeasureZIP(final String measureId, final Date exportDate) throws Exception { ExportResult result = new ExportResult(); result.measureName = getMeasureName(measureId).getaBBRName(); MeasureExport me = getMeasureExport(measureId); if (me.getMeasure().getReleaseVersion().equals("v3")) { result.zipbarr = getZipBarr(measureId, exportDate, me, me.getMeasure().getReleaseVersion()); } else { String currentReleaseVersion = getFormatedReleaseVersion(me.getMeasure().getReleaseVersion()); FileNameUtility fnu = new FileNameUtility(); String parentPath = fnu.getParentPath(me.getMeasure().getaBBRName() +"_" + currentReleaseVersion); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zip = new ZipOutputStream(baos); getZipBarr(measureId, me, parentPath, zip); zip.close(); result.zipbarr = baos.toByteArray(); } return result; } public final ExportResult getCompositeExportResult(final String compositeMeasureId, List<ComponentMeasure> componentMeasures) throws Exception { MeasureExport compositeMeasureExport = getMeasureExport(compositeMeasureId); ExportResult result = new ExportResult(); result.measureName = getMeasureName(compositeMeasureId).getaBBRName(); result.zipbarr = getCompositeZipBarr(compositeMeasureId, compositeMeasureExport, componentMeasures); return result; } /** * Gets the zip barr. * * @param measureId * the measure id * @param me * the me * @return the zip barr * @throws Exception * the exception */ public final void getZipBarr(final String measureId, final MeasureExport me, final String parentPath, ZipOutputStream zip) throws Exception { String simpleXmlStr = me.getSimpleXML(); String emeasureHTMLStr = createOrGetHumanReadableFile(measureId, me, simpleXmlStr); ExportResult emeasureExportResult = createOrGetHQMF(measureId); String emeasureXML = emeasureExportResult.export; MeasureExport measureExport = getMeasureExport(measureId); ExportResult cqlExportResult= createOrGetCQLLibraryFile(measureId, measureExport); ExportResult elmExportResult = createOrGetELMLibraryFile(measureId, measureExport); ExportResult jsonExportResult = createOrGetJSONLibraryFile(measureId, measureExport); ZipPackager zp = new ZipPackager(); zp.getZipBarr(me.getMeasure().getaBBRName(), zip, (new Date()).toString(), emeasureHTMLStr, simpleXmlStr, emeasureXML, cqlExportResult, elmExportResult, jsonExportResult, me.getMeasure().getReleaseVersion(), parentPath); } private String getFormatedReleaseVersion(String currentReleaseVersion) { return StringUtils.replace(currentReleaseVersion, ".", "_"); } /** * Gets the zip barr. * * @param measureId * the measure id * @param me * the me * @param componentMeasures * a list of component measures for the composite measure * @return the zip barr * @throws Exception * the exception */ public final byte[] getCompositeZipBarr(final String measureId, final MeasureExport me, List<ComponentMeasure> componentMeasures) throws Exception { String currentReleaseVersion = me.getMeasure().getReleaseVersion(); currentReleaseVersion = getFormatedReleaseVersion(currentReleaseVersion); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zip = new ZipOutputStream(baos); String parentSimpleXML = me.getSimpleXML(); FileNameUtility fnu = new FileNameUtility(); //get composite file String parentPath = fnu.getParentPath(me.getMeasure().getaBBRName() +"_" + currentReleaseVersion); getZipBarr(measureId, me, parentPath, zip); //get component files for(ComponentMeasure measure : componentMeasures) { String componentMeasureId = measure.getComponentMeasure().getId(); if(checkIfComponentMeasureIsUsed(parentSimpleXML, componentMeasureId)) { MeasureExport componentMeasureExport = getMeasureExport(componentMeasureId); String componentParentPath = parentPath + File.separator + fnu.getParentPath(componentMeasureExport.getMeasure().getaBBRName() +"_" + currentReleaseVersion); getZipBarr(componentMeasureId, componentMeasureExport, componentParentPath, zip); } } zip.close(); return baos.toByteArray(); } private boolean checkIfComponentMeasureIsUsed(String parentSimpleXML, String componentMeasureId) throws MarshalException, ValidationException, IOException, MappingException, XPathExpressionException { ManageCompositeMeasureDetailModel usedCompositeModel = compositeMeasureDetailUtil.convertXMLIntoCompositeMeasureDetailModel(parentSimpleXML); List<Result> usedCompositeMeasures = usedCompositeModel.getAppliedComponentMeasures(); for (Result usedCompositeMeasure: usedCompositeMeasures) { String measureId = String.valueOf(usedCompositeMeasure.getId()); measureId = StringUtils.replace(measureId, "-", ""); if(measureId.equals(componentMeasureId)) { return true; } } return false; } private String getHumanReadableForMeasure(String measureId, String simpleXmlStr, String measureVersionNumber, MeasureExport measureExport) { return humanReadableGenerator.generateHTMLForMeasure(measureId, simpleXmlStr, measureVersionNumber, cqlLibraryDAO); } public ExportResult getHQMF(String measureId) { MeasureExport measureExport = getMeasureExport(measureId); ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); result.export = getHQMFString(measureExport); return result; } public ExportResult createOrGetHQMF(String measureId) { MeasureExport measureExport = getMeasureExport(measureId); if(measureExport.getHqmf() == null) { measureExport.setHqmf(getHQMFString(measureExport)); measureExportDAO.save(measureExport); } ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); result.export = measureExport.getHqmf(); return result; } private String getHQMFString(MeasureExport measureExport) { Generator hqmfGenerator = hqmfGeneratoryFactory.getHQMFGenerator(measureExport.getMeasure().getReleaseVersion()); String hqmf = ""; try { hqmf = hqmfGenerator.generate(measureExport); } catch (Exception e) { e.printStackTrace(); } return hqmf; } /** * Gets the zip barr. * * @param measureId * - String. * @param exportDate * the export date * @param releaseDate * the release date * @param me * - MeasureExport. * @return byte[]. * @throws Exception * - Exception. * */ public final byte[] getZipBarr(final String measureId, Date exportDate, final MeasureExport me, String releaseVersion) throws Exception { byte[] wkbkbarr = null; StringUtility su = new StringUtility(); ExportResult emeasureXMLResult = createOrGetHQMFForv3Measure(measureId); String emeasureName = emeasureXMLResult.measureName; String emeasureXMLStr = emeasureXMLResult.export; String repee = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"; String repor = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + su.nl + "<?xml-stylesheet type=\"text/xsl\" href=\"xslt/eMeasure.xsl\"?>"; emeasureXMLStr = repor + emeasureXMLStr.substring(repee.length()); if(me.getHumanReadable() == null) { me.setHumanReadable(createOrGetEmeasureXMLToEmeasureHTML(emeasureXMLStr, me)); measureExportDAO.save(me); } String emeasureHTMLStr = me.getHumanReadable(); String simpleXmlStr = me.getSimpleXML(); String emeasureXSLUrl = XMLUtility.getInstance().getXMLResource(conversionFileHtml); ZipPackager zp = new ZipPackager(); return zp.getZipBarr(emeasureName, exportDate, releaseVersion, wkbkbarr, emeasureXMLStr, emeasureHTMLStr, emeasureXSLUrl, (new Date()).toString(), simpleXmlStr); } /** * Gets the measure export. * * @param measureId * - String. * @return MeasureExport. * */ public MeasureExport getMeasureExport(final String measureId) { MeasureExport measureExport = measureExportDAO.findByMeasureId(measureId); if (measureExport == null) { return null; } String emeasureXMLStr = measureExport.getSimpleXML(); mat.model.clause.Measure measure = measureDAO.find(measureId); Timestamp fdts = measure.getFinalizedDate(); StringUtility su = new StringUtility(); // 1 add finalizedDate field if ((fdts != null) && !emeasureXMLStr.contains("<finalizedDate")) { String fdstr = convertTimestampToString(fdts); String repee = "</measureDetails>"; String repor = su.nl + "<finalizedDate value=\"" + fdstr + "\"/>" + su.nl + "</measureDetails>"; int offset = emeasureXMLStr.indexOf(repee); emeasureXMLStr = emeasureXMLStr.substring(0, offset) + repor + emeasureXMLStr.substring(offset + repee.length()); measureExport.setSimpleXML(emeasureXMLStr); } return measureExport; } /** * Convert timestamp to string. * * @param ts * - Time Stamp. * @return yyyymmddhhss-zzzz */ @SuppressWarnings("deprecation") private String convertTimestampToString(final Timestamp ts) { String hours = getTwoDigitString(ts.getHours()); String mins = getTwoDigitString(ts.getMinutes()); String month = getTwoDigitString(ts.getMonth() + 1); String day = getTwoDigitString(ts.getDate()); String timeZone = "-" + getTwoDigitString(ts.getTimezoneOffset() / 60) + "00"; String tsStr = (ts.getYear() + 1900) + month + day + hours + mins + timeZone; return tsStr; } /** * Gets the two digit string. * * @param i * -Integer. * @return String. * */ private String getTwoDigitString(final int i) { String ret = i + ""; if (ret.length() == 1) { ret = "0" + ret; } return ret; } /** * Getter for wkbk. * * @return HSSFWorkbook. */ public final HSSFWorkbook getWkbk() { return wkbk; } /* * (non-Javadoc) * * @see * mat.server.service.SimpleEMeasureService#getBulkExportZIP(java.lang.String[]) */ @Override public final ExportResult getBulkExportZIP(final String[] measureIds, final Date[] exportDates) throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ExportResult result = null; try(ZipOutputStream zip = new ZipOutputStream(baos);){ Map<String, byte[]> filesMap = new HashMap<>(); int fileNameCounter = 1; DecimalFormat format = new DecimalFormat("#00"); Date exportDate; FileNameUtility fnu = new FileNameUtility(); for (String measureId : measureIds) { result = new ExportResult(); result.measureName = getMeasureName(measureId).getaBBRName(); exportDate = getMeasureName(measureId).getExportedDate(); MeasureExport me = getMeasureExport(measureId); String currentReleaseVersion = getFormatedReleaseVersion(me.getMeasure().getReleaseVersion()); String sequence = format.format(fileNameCounter++); if (me.getMeasure().getReleaseVersion().equals("v3")) { createFilesInBulkZip(measureId, exportDate, me, filesMap, sequence); } else if(me.getMeasure().getIsCompositeMeasure()) { createCompositeFilesInBuildZip(measureId, me, filesMap, sequence, currentReleaseVersion, sequence); } else { String parentPath = fnu.getParentPath(sequence +"_"+ result.measureName + "_" + currentReleaseVersion); createFilesInBulkZip(measureId, me, filesMap, sequence, parentPath); } } ZipPackager zp = new ZipPackager(); double size = 1024 * 1024 * 100; Set<Entry<String, byte[]>> set = filesMap.entrySet(); for (Entry<String, byte[]> fileArr : set) { zp.addBytesToZip(fileArr.getKey(), fileArr.getValue(), zip); } if (baos.size() > size) { throw new ZipException("Exceeded Limit :" + baos.size()); } } LOGGER.debug(baos.size()); result.zipbarr = baos.toByteArray(); return result; } /** * Creates the files in bulk zip. * * @param measureId * the measure id * @param me * the me * @param filesMap * the files map * @param seqNum * the seq num * @throws Exception * the exception */ public void createFilesInBulkZip(final String measureId, final MeasureExport me, final Map<String, byte[]> filesMap, final String seqNum, String parentPath) throws Exception { byte[] wkbkbarr = null; String simpleXmlStr = me.getSimpleXML(); String emeasureHTMLStr = createOrGetHumanReadableFile(measureId, me, simpleXmlStr); ExportResult emeasureExportResult = createOrGetHQMF(measureId); String emeasureXMLStr = emeasureExportResult.export; String emeasureName = me.getMeasure().getaBBRName(); String currentReleaseVersion = me.getMeasure().getReleaseVersion(); MeasureExport measureExport = getMeasureExport(measureId); ExportResult cqlExportResult = createOrGetCQLLibraryFile(measureId, measureExport); ExportResult elmExportResult = createOrGetELMLibraryFile(measureId, measureExport); ExportResult jsonExportResult = createOrGetJSONLibraryFile(measureId, measureExport); ZipPackager zp = new ZipPackager(); zp.createBulkExportZip(emeasureName, wkbkbarr, emeasureXMLStr, emeasureHTMLStr, (new Date()).toString(), simpleXmlStr, filesMap, seqNum, currentReleaseVersion, cqlExportResult, elmExportResult, jsonExportResult, parentPath); } private void createCompositeFilesInBuildZip(String measureId, MeasureExport me, Map<String, byte[]> filesMap, String format, String currentReleaseVersion, String sequance) throws Exception { List<ComponentMeasure> componentMeasures = me.getMeasure().getComponentMeasures(); FileNameUtility fnu = new FileNameUtility(); String parentSimpleXML = me.getSimpleXML(); String parentPath = fnu.getParentPath(sequance +"_"+ me.getMeasure().getaBBRName() + "_" + currentReleaseVersion); //get composite file createFilesInBulkZip(measureId, me, filesMap, format, parentPath); //get component files for(ComponentMeasure measure : componentMeasures) { String componentMeasureId = measure.getComponentMeasure().getId(); if(checkIfComponentMeasureIsUsed(parentSimpleXML, componentMeasureId)) { MeasureExport componentMeasureExport = getMeasureExport(componentMeasureId); String componentParentPath = parentPath + File.separator + fnu.getParentPath(componentMeasureExport.getMeasure().getaBBRName() +"_" + currentReleaseVersion); createFilesInBulkZip(componentMeasureId, componentMeasureExport, filesMap, format, componentParentPath); } } } /** * Creates the files in bulk zip. * * @param measureId * - String. * @param exportDate * the export date * @param releaseDate * the release date * @param me * - MeasureExport. * @param filesMap * - Map. * @param seqNum * - String. * @throws Exception * - Exception. * * * */ public final void createFilesInBulkZip(final String measureId, final Date exportDate, final MeasureExport me, final Map<String, byte[]> filesMap, final String seqNum) throws Exception { byte[] wkbkbarr = null; StringUtility su = new StringUtility(); ExportResult emeasureXMLResult = createOrGetHQMFForv3Measure(measureId); String emeasureName = emeasureXMLResult.measureName; String emeasureXMLStr = emeasureXMLResult.export; String repee = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"; String repor = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + su.nl + "<?xml-stylesheet type=\"text/xsl\" href=\"xslt/eMeasure.xsl\"?>"; emeasureXMLStr = repor + emeasureXMLStr.substring(repee.length()); String emeasureHTMLStr = createOrGetEmeasureXMLToEmeasureHTML(emeasureXMLStr, me); String simpleXmlStr = me.getSimpleXML(); String emeasureXSLUrl = XMLUtility.getInstance().getXMLResource(conversionFileHtml); MeasureExport measureExport = getMeasureExport(measureId); ExportResult cqlExportResult= createOrGetCQLLibraryFile(measureId, measureExport); ExportResult elmExportResult = createOrGetELMLibraryFile(measureId, measureExport); ExportResult jsonExportResult = createOrGetJSONLibraryFile(measureId, measureExport); ZipPackager zp = new ZipPackager(); zp.createBulkExportZip(emeasureName, exportDate, wkbkbarr, emeasureXMLStr, emeasureHTMLStr, emeasureXSLUrl, (new Date()).toString(), simpleXmlStr, filesMap, seqNum, me.getMeasure().getReleaseVersion(), cqlExportResult, elmExportResult, jsonExportResult); } public ExportResult createOrGetCQLLibraryFile(String measureId, MeasureExport measureExport) throws Exception { ExportResult cqlExportResult = null; //if measure export is null then create the file if(measureExport.getCql() == null) { cqlExportResult = getCQLLibraryFile(measureId); measureExport.setCql(cqlExportResult.export); measureExportDAO.save(measureExport); } else { // else create the export result from the cql in the model and return that; CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(measureExport.getSimpleXML()); cqlExportResult = createExportResultForFile(measureExport, measureExport.getCql(), cqlModel); String simpleXML = measureExport.getSimpleXML(); XmlProcessor xmlProcessor = new XmlProcessor(simpleXML); // find included CQL libraries and add them to result getIncludedCQLLibs(cqlExportResult, xmlProcessor); } return cqlExportResult; } public String createOrGetHumanReadableFile(String measureId, MeasureExport measureExport, String simpleXmlStr) throws Exception { //if measure export is null then create the file if(measureExport.getHumanReadable() == null) { measureExport.setHumanReadable(getHumanReadableForMeasure(measureId, simpleXmlStr, measureExport.getMeasure().getReleaseVersion(), measureExport)); measureExportDAO.save(measureExport); } return measureExport.getHumanReadable(); } public ExportResult createOrGetELMLibraryFile(String measureId, MeasureExport measureExport) throws Exception { ExportResult elmExportResult = null; //if measure export is null then create the file if(measureExport.getElm() == null) { elmExportResult = getELMFile(measureId); measureExport.setElm(elmExportResult.export); measureExportDAO.save(measureExport); } else { // else create the export result from the elm in the model and return that; CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(measureExport.getSimpleXML()); elmExportResult = createExportResultForFile(measureExport, measureExport.getElm(), cqlModel); String simpleXML = measureExport.getSimpleXML(); XmlProcessor xmlProcessor = new XmlProcessor(simpleXML); getIncludedCQLELMs(elmExportResult, xmlProcessor); } return elmExportResult; } public ExportResult createOrGetJSONLibraryFile(String measureId, MeasureExport measureExport) throws Exception { ExportResult jsonExportResult = null; //if measure export is null then create the file if(measureExport.getJson() == null) { jsonExportResult = getJSONFile(measureId); measureExport.setJson(jsonExportResult.export); measureExportDAO.save(measureExport); } else { // else create the export result from the json in the model and return that; CQLModel cqlModel = CQLUtilityClass.getCQLModelFromXML(measureExport.getSimpleXML()); jsonExportResult = createExportResultForFile(measureExport, measureExport.getJson(), cqlModel); String simpleXML = measureExport.getSimpleXML(); XmlProcessor xmlProcessor = new XmlProcessor(simpleXML); getIncludedCQLJSONs(jsonExportResult, xmlProcessor); } return jsonExportResult; } private ExportResult createExportResultForFile(MeasureExport measureExport, String fileString, CQLModel cqlModel) { ExportResult result = new ExportResult(); result.measureName = measureExport.getMeasure().getaBBRName(); result.export = fileString; result.setCqlLibraryName(fileString == null ? result.measureName : cqlModel.getLibraryName() + "-" + cqlModel.getVersionUsed()); return result; } @Override public BonnieCalculatedResult getBonnieExportCalculation(String measureId, String userId) throws IOException, BonnieUnauthorizedException, BonnieNotFoundException, BonnieServerException, BonnieBadParameterException, BonnieDoesNotExistException { BonnieCalculatedResult results = bonnieServiceImpl.getBonnieExportForMeasure(userId, measureId); return results; } }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.authorize; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.ConfigurationManager; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.event.Event; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; /** * AuthorizeManager handles all authorization checks for DSpace. For better * security, DSpace assumes that you do not have the right to do something * unless that permission is spelled out somewhere. That "somewhere" is the * ResourcePolicy table. The AuthorizeManager is given a user, an object, and an * action, and it then does a lookup in the ResourcePolicy table to see if there * are any policies giving the user permission to do that action. * <p> * ResourcePolicies now apply to single objects (such as submit (ADD) permission * to a collection.) * <p> * Note: If an eperson is a member of the administrator group (id 1), then they * are automatically given permission for all requests another special group is * group 0, which is anonymous - all EPeople are members of group 0. */ public class AuthorizeManager { /** * Utility method, checks that the current user of the given context can * perform all of the specified actions on the given object. An * <code>AuthorizeException</code> if all the authorizations fail. * * @param c * context with the current user * @param o * DSpace object user is attempting to perform action on * @param actions * array of action IDs from * <code>org.dspace.core.Constants</code> * @throws AuthorizeException * if any one of the specified actions cannot be performed by * the current user on the given object. * @throws SQLException * if there's a database problem */ public static void authorizeAnyOf(Context c, DSpaceObject o, int[] actions) throws AuthorizeException, SQLException { AuthorizeException ex = null; for (int i = 0; i < actions.length; i++) { try { authorizeAction(c, o, actions[i]); return; } catch (AuthorizeException e) { if (ex == null) { ex = e; } } } throw ex; } /** * Checks that the context's current user can perform the given action on * the given object. Throws an exception if the user is not authorized, * otherwise the method call does nothing. * * @param c * context * @param o * a DSpaceObject * @param action * action to perform from <code>org.dspace.core.Constants</code> * * @throws AuthorizeException * if the user is denied */ public static void authorizeAction(Context c, DSpaceObject o, int action) throws AuthorizeException, SQLException { authorizeAction(c, o, action, true); } /** * Checks that the context's current user can perform the given action on * the given object. Throws an exception if the user is not authorized, * otherwise the method call does nothing. * * @param c * context * @param o * a DSpaceObject * @param useInheritance * flag to say if ADMIN action on the current object or parent * object can be used * @param action * action to perform from <code>org.dspace.core.Constants</code> * * @throws AuthorizeException * if the user is denied */ public static void authorizeAction(Context c, DSpaceObject o, int action, boolean useInheritance) throws AuthorizeException, SQLException { if (o == null) { // action can be -1 due to a null entry String actionText; if (action == -1) { actionText = "null"; } else { actionText = Constants.actionText[action]; } EPerson e = c.getCurrentUser(); int userid; if (e == null) { userid = 0; } else { userid = e.getID(); } throw new AuthorizeException( "Authorization attempted on null DSpace object " + actionText + " by user " + userid); } if (!authorize(c, o, action, c.getCurrentUser(), useInheritance)) { // denied, assemble and throw exception int otype = o.getType(); int oid = o.getID(); int userid; EPerson e = c.getCurrentUser(); if (e == null) { userid = 0; } else { userid = e.getID(); } // AuthorizeException j = new AuthorizeException("Denied"); // j.printStackTrace(); // action can be -1 due to a null entry String actionText; if (action == -1) { actionText = "null"; } else { actionText = Constants.actionText[action]; } throw new AuthorizeException("Authorization denied for action " + actionText + " on " + Constants.typeText[otype] + ":" + oid + " by user " + userid, o, action); } } /** * same authorize, returns boolean for those who don't want to deal with * catching exceptions. * * @param c * DSpace context, containing current user * @param o * DSpaceObject * @param a * action being attempted, from * <code>org.dspace.core.Constants</code> * * @return <code>true</code> if the current user in the context is * authorized to perform the given action on the given object */ public static boolean authorizeActionBoolean(Context c, DSpaceObject o, int a) throws SQLException { return authorizeActionBoolean(c, o, a, true); } /** * same authorize, returns boolean for those who don't want to deal with * catching exceptions. * * @param c * DSpace context, containing current user * @param o * DSpaceObject * @param a * action being attempted, from * <code>org.dspace.core.Constants</code> * @param useInheritance * flag to say if ADMIN action on the current object or parent * object can be used * * @return <code>true</code> if the current user in the context is * authorized to perform the given action on the given object */ public static boolean authorizeActionBoolean(Context c, DSpaceObject o, int a, boolean useInheritance) throws SQLException { boolean isAuthorized = true; if (o == null) { return false; } try { authorizeAction(c, o, a, useInheritance); } catch (AuthorizeException e) { isAuthorized = false; } return isAuthorized; } /** * Check to see if the given user can perform the given action on the given * object. Always returns true if the ignore authorization flat is set in * the current context. * * @param c * current context. User is irrelevant; "ignore authorization" * flag is relevant * @param o * object action is being attempted on * @param action * ID of action being attempted, from * <code>org.dspace.core.Constants</code> * @param e * user attempting action * @param useInheritance * flag to say if ADMIN action on the current object or parent * object can be used * @return <code>true</code> if user is authorized to perform the given * action, <code>false</code> otherwise * @throws SQLException */ private static boolean authorize(Context c, DSpaceObject o, int action, EPerson e, boolean useInheritance) throws SQLException { // return FALSE if there is no DSpaceObject if (o == null) { return false; } // is authorization disabled for this context? if (c.ignoreAuthorization()) { return true; } // is eperson set? if not, userid = 0 (anonymous) int userid = 0; if (e != null) { userid = e.getID(); // perform isAdmin check to see // if user is an Admin on this object DSpaceObject testObject = useInheritance?o.getAdminObject(action):null; if (isAdmin(c, testObject)) { return true; } } for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action)) { // check policies for date validity if (rp.isDateValid()) { if ((rp.getEPersonID() != -1) && (rp.getEPersonID() == userid)) { return true; // match } if ((rp.getGroupID() != -1) && (Group.isMember(c, rp.getGroupID()))) { // group was set, and eperson is a member // of that group return true; } } } // default authorization is denial return false; } /////////////////////////////////////////////// // admin check methods /////////////////////////////////////////////// /** * Check to see if the current user is an Administrator of a given object * within DSpace. Always return <code>true</code> if the user is a System * Admin * * @param c * current context * @param o * current DSpace Object, if <code>null</code> the call will be * equivalent to a call to the <code>isAdmin(Context c)</code> * method * * @return <code>true</code> if user has administrative privileges on the * given DSpace object */ public static boolean isAdmin(Context c, DSpaceObject o) throws SQLException { // return true if user is an Administrator if (isAdmin(c)) { return true; } if (o == null) { return false; } // is eperson set? if not, userid = 0 (anonymous) int userid = 0; EPerson e = c.getCurrentUser(); if(e != null) { userid = e.getID(); } // // First, check all Resource Policies directly on this object // List<ResourcePolicy> policies = getPoliciesActionFilter(c, o, Constants.ADMIN); for (ResourcePolicy rp : policies) { // check policies for date validity if (rp.isDateValid()) { if ((rp.getEPersonID() != -1) && (rp.getEPersonID() == userid)) { return true; // match } if ((rp.getGroupID() != -1) && (Group.isMember(c, rp.getGroupID()))) { // group was set, and eperson is a member // of that group return true; } } } // If user doesn't have specific Admin permissions on this object, // check the *parent* objects of this object. This allows Admin // permissions to be inherited automatically (e.g. Admin on Community // is also an Admin of all Collections/Items in that Community) DSpaceObject parent = o.getParentObject(); if (parent != null) { return isAdmin(c, parent); } return false; } /** * Check to see if the current user is a System Admin. Always return * <code>true</code> if c.ignoreAuthorization is set. Anonymous users * can't be Admins (EPerson set to NULL) * * @param c * current context * * @return <code>true</code> if user is an admin or ignore authorization * flag set */ public static boolean isAdmin(Context c) throws SQLException { // if we're ignoring authorization, user is member of admin if (c.ignoreAuthorization()) { return true; } EPerson e = c.getCurrentUser(); if (e == null) { return false; // anonymous users can't be admins.... } else { return Group.isMember(c, 1); } } public static boolean isSeniorCurator(Context c) throws SQLException { // if we're ignoring authorization, user is member of admin if (c.ignoreAuthorization()) { return true; } EPerson e = c.getCurrentUser(); if (e == null) { return false; // anonymous users can't be admins.... } else { Group seniorCurator = Group.findByName(c, ConfigurationManager.getProperty("core.authorization.site-admin.group")); if(seniorCurator==null) { return false; } return Group.isMember(c, seniorCurator.getID()); } } public static boolean isCuratorOrAdmin(Context context){ try{ boolean isSystemAdmin = isAdmin(context); if(isSystemAdmin) { return true; } else { boolean isSeniorCurator = isSeniorCurator(context); if(isSeniorCurator) { return true; } else { return false; } } }catch (Exception e) { } return false; } /////////////////////////////////////////////// // policy manipulation methods /////////////////////////////////////////////// /** * Add a policy for an individual eperson * * @param c * context. Current user irrelevant * @param o * DSpaceObject to add policy to * @param actionID * ID of action from <code>org.dspace.core.Constants</code> * @param e * eperson who can perform the action * * @throws AuthorizeException * if current user in context is not authorized to add policies */ public static void addPolicy(Context c, DSpaceObject o, int actionID, EPerson e) throws SQLException, AuthorizeException { ResourcePolicy rp = ResourcePolicy.create(c); rp.setResource(o); rp.setAction(actionID); rp.setEPerson(e); rp.update(); //In case the policies are changing for an item fire an item modify event if(o instanceof Item){ c.addEvent(new Event(Event.MODIFY, Constants.ITEM, o.getID(), null)); } } /** * Add a policy for a group * * @param c * current context * @param o * object to add policy for * @param actionID * ID of action from <code>org.dspace.core.Constants</code> * @param g * group to add policy for * @throws SQLException * if there's a database problem * @throws AuthorizeException * if the current user is not authorized to add this policy */ public static void addPolicy(Context c, DSpaceObject o, int actionID, Group g) throws SQLException, AuthorizeException { ResourcePolicy rp = ResourcePolicy.create(c); rp.setResource(o); rp.setAction(actionID); rp.setGroup(g); rp.update(); //In case the policies are changing for an item fire an item modify event if(o instanceof Item){ c.addEvent(new Event(Event.MODIFY, Constants.ITEM, o.getID(), null)); } } /** * Return a List of the policies for an object * * @param c current context * @param o object to retrieve policies for * * @return List of <code>ResourcePolicy</code> objects */ public static List<ResourcePolicy> getPolicies(Context c, DSpaceObject o) throws SQLException { TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy", "SELECT * FROM resourcepolicy WHERE resource_type_id= ? AND resource_id= ? ", o.getType(),o.getID()); List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); try { while (tri.hasNext()) { TableRow row = tri.next(); // first check the cache (FIXME: is this right?) ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache( ResourcePolicy.class, row.getIntColumn("policy_id")); if (cachepolicy != null) { policies.add(cachepolicy); } else { policies.add(new ResourcePolicy(c, row)); } } } finally { if (tri != null) { tri.close(); } } return policies; } /** * Return a List of the policies for a group * * @param c current context * @param g group to retrieve policies for * * @return List of <code>ResourcePolicy</code> objects */ public static List<ResourcePolicy> getPoliciesForGroup(Context c, Group g) throws SQLException { TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy", "SELECT * FROM resourcepolicy WHERE epersongroup_id= ? ", g.getID()); List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); try { while (tri.hasNext()) { TableRow row = tri.next(); // first check the cache (FIXME: is this right?) ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache( ResourcePolicy.class, row.getIntColumn("policy_id")); if (cachepolicy != null) { policies.add(cachepolicy); } else { policies.add(new ResourcePolicy(c, row)); } } } finally { if (tri != null) { tri.close(); } } return policies; } /** * Return a list of policies for an object that match the action * * @param c * context * @param o * DSpaceObject policies relate to * @param actionID * action (defined in class Constants) * @throws SQLException * if there's a database problem */ public static List<ResourcePolicy> getPoliciesActionFilter(Context c, DSpaceObject o, int actionID) throws SQLException { TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy", "SELECT * FROM resourcepolicy WHERE resource_type_id= ? "+ "AND resource_id= ? AND action_id= ? ", o.getType(), o.getID(),actionID); List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); try { while (tri.hasNext()) { TableRow row = tri.next(); // first check the cache (FIXME: is this right?) ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache( ResourcePolicy.class, row.getIntColumn("policy_id")); if (cachepolicy != null) { policies.add(cachepolicy); } else { policies.add(new ResourcePolicy(c, row)); } } } finally { if (tri != null) { tri.close(); } } return policies; } /** * Add policies to an object to match those from a previous object * * @param c context * @param src * source of policies * @param dest * destination of inherited policies * @throws SQLException * if there's a database problem * @throws AuthorizeException * if the current user is not authorized to add these policies */ public static void inheritPolicies(Context c, DSpaceObject src, DSpaceObject dest) throws SQLException, AuthorizeException { // find all policies for the source object List<ResourcePolicy> policies = getPolicies(c, src); //Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited) List<ResourcePolicy> nonAdminPolicies = new ArrayList<ResourcePolicy>(); for (ResourcePolicy rp : policies) { if (rp.getAction() != Constants.ADMIN) { nonAdminPolicies.add(rp); } } addPolicies(c, nonAdminPolicies, dest); } /** * Copies policies from a list of resource policies to a given DSpaceObject * * @param c * DSpace context * @param policies * List of ResourcePolicy objects * @param dest * object to have policies added * @throws SQLException * if there's a database problem * @throws AuthorizeException * if the current user is not authorized to add these policies */ public static void addPolicies(Context c, List<ResourcePolicy> policies, DSpaceObject dest) throws SQLException, AuthorizeException { // now add them to the destination object for (ResourcePolicy srp : policies) { ResourcePolicy drp = ResourcePolicy.create(c); // copy over values drp.setResource(dest); drp.setAction(srp.getAction()); drp.setEPerson(srp.getEPerson()); drp.setGroup(srp.getGroup()); drp.setStartDate(srp.getStartDate()); drp.setEndDate(srp.getEndDate()); // and write out new policy drp.update(); } //In case the policies are changing for an item fire an item modify event if(dest instanceof Item){ c.addEvent(new Event(Event.MODIFY, Constants.ITEM, dest.getID(), null)); } } /** * removes ALL policies for an object. FIXME doesn't check authorization * * @param c * DSpace context * @param o * object to remove policies for * @throws SQLException * if there's a database problem */ public static void removeAllPolicies(Context c, DSpaceObject o) throws SQLException { // FIXME: authorization check? DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE " + "resource_type_id= ? AND resource_id= ? ", o.getType(), o.getID()); //In case the policies are changing for an item fire an item modify event if(o instanceof Item){ c.addEvent(new Event(Event.MODIFY, Constants.ITEM, o.getID(), null)); } } /** * Remove all policies from an object that match a given action. FIXME * doesn't check authorization * * @param context * current context * @param dso * object to remove policies from * @param actionID * ID of action to match from * <code>org.dspace.core.Constants</code>, or -1=all * @throws SQLException * if there's a database problem */ public static void removePoliciesActionFilter(Context context, DSpaceObject dso, int actionID) throws SQLException { if (actionID == -1) { // remove all policies from object removeAllPolicies(context, dso); } else { DatabaseManager.updateQuery(context, "DELETE FROM resourcepolicy WHERE resource_type_id= ? AND "+ "resource_id= ? AND action_id= ? ", dso.getType(), dso.getID(), actionID); } //In case the policies are changing for an item fire an item modify event if(dso instanceof Item){ context.addEvent(new Event(Event.MODIFY, Constants.ITEM, dso.getID(), null)); } } /** * Removes all policies relating to a particular group. FIXME doesn't check * authorization * * @param c * current context * @param groupID * ID of the group * @throws SQLException * if there's a database problem */ public static void removeGroupPolicies(Context c, int groupID) throws SQLException { DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE " + "epersongroup_id= ? ", groupID); } /** * Removes all policies from a group for a particular object that belong to * a Group. FIXME doesn't check authorization * * @param c * current context * @param o * the object * @param g * the group * @throws SQLException * if there's a database problem */ public static void removeGroupPolicies(Context c, DSpaceObject o, Group g) throws SQLException { DatabaseManager.updateQuery(c, "DELETE FROM resourcepolicy WHERE " + "resource_type_id= ? AND resource_id= ? AND epersongroup_id= ? ", o.getType(), o.getID(), g.getID()); } /** * Returns all groups authorized to perform an action on an object. Returns * empty array if no matches. * * @param c * current context * @param o * object * @param actionID * ID of action frm <code>org.dspace.core.Constants</code> * @return array of <code>Group</code>s that can perform the specified * action on the specified object * @throws java.sql.SQLException * if there's a database problem */ public static Group[] getAuthorizedGroups(Context c, DSpaceObject o, int actionID) throws java.sql.SQLException { // do query matching groups, actions, and objects TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy", "SELECT * FROM resourcepolicy WHERE resource_type_id= ? "+ "AND resource_id= ? AND action_id= ? ",o.getType(),o.getID(),actionID); List<Group> groups = new ArrayList<Group>(); try { while (tri.hasNext()) { TableRow row = tri.next(); // first check the cache (FIXME: is this right?) ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache( ResourcePolicy.class, row.getIntColumn("policy_id")); ResourcePolicy myPolicy = null; if (cachepolicy != null) { myPolicy = cachepolicy; } else { myPolicy = new ResourcePolicy(c, row); } // now do we have a group? Group myGroup = myPolicy.getGroup(); if (myGroup != null) { groups.add(myGroup); } } } finally { if (tri != null) { tri.close(); } } Group[] groupArray = new Group[groups.size()]; groupArray = groups.toArray(groupArray); return groupArray; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi.internal; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.enterprise.context.spi.CreationalContext; import javax.enterprise.event.Observes; import javax.enterprise.inject.spi.AfterBeanDiscovery; import javax.enterprise.inject.spi.AfterDeploymentValidation; import javax.enterprise.inject.spi.Annotated; import javax.enterprise.inject.spi.AnnotatedType; import javax.enterprise.inject.spi.Bean; import javax.enterprise.inject.spi.BeanManager; import javax.enterprise.inject.spi.Extension; import javax.enterprise.inject.spi.InjectionTarget; import javax.enterprise.inject.spi.ProcessAnnotatedType; import javax.enterprise.inject.spi.ProcessBean; import javax.enterprise.inject.spi.ProcessInjectionTarget; import javax.enterprise.inject.spi.ProcessProducerMethod; import javax.enterprise.util.AnnotationLiteral; import javax.inject.Inject; import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; import org.apache.camel.Consume; import org.apache.camel.EndpointInject; import org.apache.camel.Produce; import org.apache.camel.RoutesBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.cdi.CdiCamelContext; import org.apache.camel.cdi.ContextName; import org.apache.camel.impl.DefaultCamelBeanPostProcessor; import org.apache.camel.model.RouteContainer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.ReflectionHelper; import org.apache.deltaspike.core.api.provider.BeanProvider; import org.apache.deltaspike.core.util.metadata.builder.AnnotatedTypeBuilder; /** * Set of camel specific hooks for CDI. */ public class CamelExtension implements Extension { private static class InjectLiteral extends AnnotationLiteral<Inject> { private static final long serialVersionUID = 1L; } CamelContextMap camelContextMap; private final Set<Bean<?>> eagerBeans = new HashSet<Bean<?>>(); private final Map<String, CamelContextConfig> camelContextConfigMap = new HashMap<String, CamelContextConfig>(); private final List<CamelContextBean> camelContextBeans = new ArrayList<CamelContextBean>(); public CamelExtension() { } /** * If no context name is specified then default it to the value from * the {@link org.apache.camel.cdi.ContextName} annotation */ public static String getCamelContextName(String context, ContextName annotation) { if (ObjectHelper.isEmpty(context) && annotation != null) { return annotation.value(); } return context; } /** * Process camel context aware bean definitions. * * @param process Annotated type. * @throws Exception In case of exceptions. */ protected void contextAwareness(@Observes ProcessAnnotatedType<CamelContextAware> process) throws Exception { AnnotatedType<CamelContextAware> at = process.getAnnotatedType(); Method method = at.getJavaClass().getMethod("setCamelContext", CamelContext.class); AnnotatedTypeBuilder<CamelContextAware> builder = new AnnotatedTypeBuilder<CamelContextAware>() .readFromType(at) .addToMethod(method, new InjectLiteral()); process.setAnnotatedType(builder.create()); } protected void detectRouteBuilders(@Observes ProcessAnnotatedType<?> process) throws Exception { AnnotatedType<?> annotatedType = process.getAnnotatedType(); ContextName annotation = annotatedType.getAnnotation(ContextName.class); Class<?> javaClass = annotatedType.getJavaClass(); if (annotation != null && isRoutesBean(javaClass)) { addRouteBuilderBean(annotatedType, annotation); } } private void addRouteBuilderBean(final AnnotatedType<?> process, ContextName annotation) { final CamelContextConfig config = getCamelConfig(annotation.value()); config.addRouteBuilderBean(process); } /** * Disable creation of default CamelContext bean and rely on context created * and managed by extension. * * @param process Annotated type. */ protected void disableDefaultContext(@Observes ProcessAnnotatedType<? extends CamelContext> process) { process.veto(); } /** * Registers managed camel bean. * * @param abd After bean discovery event. * @param manager Bean manager. */ protected void registerManagedCamelContext(@Observes AfterBeanDiscovery abd, BeanManager manager) { // lets ensure we have at least one camel context if (camelContextConfigMap.isEmpty()) { abd.addBean(new CamelContextBean(manager)); } else { Set<Map.Entry<String, CamelContextConfig>> entries = camelContextConfigMap.entrySet(); for (Map.Entry<String, CamelContextConfig> entry : entries) { String name = entry.getKey(); CamelContextConfig config = entry.getValue(); CamelContextBean camelContextBean = new CamelContextBean(manager, "CamelContext:" + name, name, config); camelContextBeans.add(camelContextBean); abd.addBean(camelContextBean); } } } /** * Lets detect all beans annotated with @Consume and * beans of type {@link RouteBuilder} which are annotated with {@link org.apache.camel.cdi.ContextName} * so they can be auto-registered */ public void detectConsumeBeans(@Observes ProcessBean<?> event) { final Bean<?> bean = event.getBean(); Class<?> beanClass = bean.getBeanClass(); ReflectionHelper.doWithMethods(beanClass, new ReflectionHelper.MethodCallback() { @Override public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { Consume consume = method.getAnnotation(Consume.class); if (consume != null) { eagerBeans.add(bean); } } }); } /** * Lets detect all beans annotated of type {@link RouteBuilder} * which are annotated with {@link org.apache.camel.cdi.ContextName} * so they can be auto-registered */ public void detectRouteBuilderBeans(@Observes ProcessBean<?> event) { final Bean<?> bean = event.getBean(); Class<?> beanClass = bean.getBeanClass(); if (isRoutesBean(beanClass)) { addRouteBuilderBean(bean, beanClass.getAnnotation(ContextName.class)); } } private void addRouteBuilderBean(Bean<?> bean, ContextName annotation) { if (annotation != null) { String contextName = annotation.value(); CamelContextConfig config = getCamelConfig(contextName); config.addRouteBuilderBean(bean); } } private CamelContextConfig getCamelConfig(final String contextName) { CamelContextConfig config = camelContextConfigMap.get(contextName); if (config == null) { config = new CamelContextConfig(); camelContextConfigMap.put(contextName, config); } return config; } /** * Lets detect all producer methods creating instances of {@link RouteBuilder} which are annotated with {@link org * .apache.camel.cdi.ContextName} * so they can be auto-registered */ public void detectProducerRoutes(@Observes ProcessProducerMethod<?, ?> event) { Annotated annotated = event.getAnnotated(); ContextName annotation = annotated.getAnnotation(ContextName.class); Class<?> returnType = event.getAnnotatedProducerMethod().getJavaMember().getReturnType(); if (isRoutesBean(returnType)) { addRouteBuilderBean(event.getBean(), annotation); } } /** * Lets force the CDI container to create all beans annotated with @Consume so that the consumer becomes active */ public void startConsumeBeans(@Observes AfterDeploymentValidation event, BeanManager beanManager) throws Exception { for (CamelContextBean bean : camelContextBeans) { String name = bean.getCamelContextName(); CamelContext context = getCamelContext(name); if (context == null) { throw new IllegalStateException( "CamelContext '" + name + "' has not been injected into the CamelContextMap"); } bean.configureCamelContext((CdiCamelContext) context); } for (Bean<?> bean : eagerBeans) { // force lazy creation to start the consumer CreationalContext<?> creationalContext = beanManager.createCreationalContext(bean); beanManager.getReference(bean, bean.getBeanClass(), creationalContext); } } /** * Lets perform injection of all beans which use Camel annotations */ public void onInjectionTarget(@Observes ProcessInjectionTarget<?> event) { final InjectionTarget injectionTarget = event.getInjectionTarget(); AnnotatedType annotatedType = event.getAnnotatedType(); final Class<Object> beanClass = annotatedType.getJavaClass(); // TODO this is a bit of a hack - what should the bean name be? final String beanName = injectionTarget.toString(); ContextName contextName = annotatedType.getAnnotation(ContextName.class); final BeanAdapter adapter = createBeanAdapter(beanClass, contextName); if (!adapter.isEmpty()) { DelegateInjectionTarget newTarget = new DelegateInjectionTarget(injectionTarget) { @Override public void postConstruct(Object instance) { super.postConstruct(instance); // now lets do the post instruct to inject our Camel injections adapter.inject(CamelExtension.this, instance, beanName); } }; event.setInjectionTarget(newTarget); } } /** * Perform injection on an existing bean such as a test case which is created directly by a testing framework. * <p/> * This is because BeanProvider.injectFields() does not invoke the onInjectionTarget() method so the injection * of @Produce / @EndpointInject and processing of the @Consume annotations are not performed. */ public void inject(Object bean) { Class<?> beanClass = bean.getClass(); ContextName contextName = beanClass.getAnnotation(ContextName.class); final BeanAdapter adapter = createBeanAdapter(beanClass, contextName); if (!adapter.isEmpty()) { // TODO this is a bit of a hack - what should the bean name be? final String beanName = bean.toString(); adapter.inject(this, bean, beanName); } } private BeanAdapter createBeanAdapter(Class<?> beanClass, ContextName contextName) { final BeanAdapter adapter = new BeanAdapter(contextName); ReflectionHelper.doWithFields(beanClass, new ReflectionHelper.FieldCallback() { @Override public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException { Produce produce = field.getAnnotation(Produce.class); if (produce != null && !injectAnnotatedField(field)) { adapter.addProduceField(field); } EndpointInject endpointInject = field.getAnnotation(EndpointInject.class); if (endpointInject != null) { adapter.addEndpointField(field); } } }); ReflectionHelper.doWithMethods(beanClass, new ReflectionHelper.MethodCallback() { @Override public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException { Consume consume = method.getAnnotation(Consume.class); if (consume != null) { adapter.addConsumeMethod(method); } Produce produce = method.getAnnotation(Produce.class); if (produce != null) { adapter.addProduceMethod(method); } EndpointInject endpointInject = method.getAnnotation(EndpointInject.class); if (endpointInject != null) { adapter.addEndpointMethod(method); } } }); return adapter; } protected DefaultCamelBeanPostProcessor getPostProcessor(String context) { CamelContext camelContext = getCamelContext(context); if (camelContext != null) { return new DefaultCamelBeanPostProcessor(camelContext); } else { throw new IllegalArgumentException("No such CamelContext '" + context + "' available!"); } } protected CamelContext getCamelContext(String context) { if (camelContextMap == null) { camelContextMap = BeanProvider.getContextualReference(CamelContextMap.class); ObjectHelper.notNull(camelContextMap, "Could not resolve CamelContextMap"); } return camelContextMap.getCamelContext(context); } /** * Returns true if this field is annotated with @Inject */ protected static boolean injectAnnotatedField(Field field) { return field.getAnnotation(Inject.class) != null; } protected boolean isRoutesBean(Class<?> returnType) { return (RoutesBuilder.class.isAssignableFrom(returnType) || RouteContainer.class.isAssignableFrom(returnType)) && !Modifier.isAbstract(returnType.getModifiers()); } }
package org.apache.solr.highlight; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Document; import org.apache.lucene.search.Query; import org.apache.lucene.search.postingshighlight.DefaultPassageFormatter; import org.apache.lucene.search.postingshighlight.Passage; import org.apache.lucene.search.postingshighlight.PassageFormatter; import org.apache.lucene.search.postingshighlight.PassageScorer; import org.apache.lucene.search.postingshighlight.PostingsHighlighter; import org.apache.lucene.search.postingshighlight.WholeBreakIterator; import org.apache.solr.common.params.HighlightParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.core.PluginInfo; import org.apache.solr.core.SolrConfig; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocList; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.util.plugin.PluginInfoInitialized; import java.io.IOException; import java.text.BreakIterator; import java.util.Collections; import java.util.Locale; import java.util.Map; import java.util.Set; /** * Highlighter impl that uses {@link PostingsHighlighter} * <p> * Example configuration: * <pre class="prettyprint"> * &lt;requestHandler name="standard" class="solr.StandardRequestHandler"&gt; * &lt;lst name="defaults"&gt; * &lt;int name="hl.snippets"&gt;1&lt;/int&gt; * &lt;str name="hl.tag.pre"&gt;&amp;lt;em&amp;gt;&lt;/str&gt; * &lt;str name="hl.tag.post"&gt;&amp;lt;/em&amp;gt;&lt;/str&gt; * &lt;str name="hl.tag.ellipsis"&gt;... &lt;/str&gt; * &lt;bool name="hl.defaultSummary"&gt;true&lt;/bool&gt; * &lt;str name="hl.encoder"&gt;simple&lt;/str&gt; * &lt;float name="hl.score.k1"&gt;1.2&lt;/float&gt; * &lt;float name="hl.score.b"&gt;0.75&lt;/float&gt; * &lt;float name="hl.score.pivot"&gt;87&lt;/float&gt; * &lt;str name="hl.bs.language"&gt;&lt;/str&gt; * &lt;str name="hl.bs.country"&gt;&lt;/str&gt; * &lt;str name="hl.bs.variant"&gt;&lt;/str&gt; * &lt;str name="hl.bs.type"&gt;SENTENCE&lt;/str&gt; * &lt;int name="hl.maxAnalyzedChars"&gt;10000&lt;/int&gt; * &lt;str name="hl.multiValuedSeparatorChar"&gt; &lt;/str&gt; * &lt;bool name="hl.highlightMultiTerm"&gt;false&lt;/bool&gt; * &lt;/lst&gt; * &lt;/requestHandler&gt; * </pre> * ... * <pre class="prettyprint"> * &lt;searchComponent class="solr.HighlightComponent" name="highlight"&gt; * &lt;highlighting class="org.apache.solr.highlight.PostingsSolrHighlighter"/&gt; * &lt;/searchComponent&gt; * </pre> * <p> * Notes: * <ul> * <li>fields to highlight must be configured with storeOffsetsWithPositions="true" * <li>hl.q (string) can specify the query * <li>hl.fl (string) specifies the field list. * <li>hl.snippets (int) specifies how many underlying passages form the resulting snippet. * <li>hl.tag.pre (string) specifies text which appears before a highlighted term. * <li>hl.tag.post (string) specifies text which appears after a highlighted term. * <li>hl.tag.ellipsis (string) specifies text which joins non-adjacent passages. * <li>hl.defaultSummary (bool) specifies if a field should have a default summary. * <li>hl.encoder (string) can be 'html' (html escapes content) or 'simple' (no escaping). * <li>hl.score.k1 (float) specifies bm25 scoring parameter 'k1' * <li>hl.score.b (float) specifies bm25 scoring parameter 'b' * <li>hl.score.pivot (float) specifies bm25 scoring parameter 'avgdl' * <li>hl.bs.type (string) specifies how to divide text into passages: [SENTENCE, LINE, WORD, CHAR, WHOLE] * <li>hl.bs.language (string) specifies language code for BreakIterator. default is empty string (root locale) * <li>hl.bs.country (string) specifies country code for BreakIterator. default is empty string (root locale) * <li>hl.bs.variant (string) specifies country code for BreakIterator. default is empty string (root locale) * <li>hl.maxAnalyzedChars specifies how many characters at most will be processed in a document. * <li>hl.multiValuedSeparatorChar specifies the logical separator between values for multi-valued fields. * <li>hl.highlightMultiTerm enables highlighting for range/wildcard/fuzzy/prefix queries. * NOTE: currently hl.maxAnalyzedChars cannot yet be specified per-field * </ul> * * @lucene.experimental */ public class PostingsSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized { @Override public void init(PluginInfo info) {} @Override public NamedList<Object> doHighlighting(DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException { final SolrParams params = req.getParams(); // if highlighting isnt enabled, then why call doHighlighting? if (!isHighlightingEnabled(params)) return null; SolrIndexSearcher searcher = req.getSearcher(); int[] docIDs = toDocIDs(docs); // fetch the unique keys String[] keys = getUniqueKeys(searcher, docIDs); // query-time parameters String[] fieldNames = getHighlightFields(query, req, defaultFields); int maxPassages[] = new int[fieldNames.length]; for (int i = 0; i < fieldNames.length; i++) { maxPassages[i] = params.getFieldInt(fieldNames[i], HighlightParams.SNIPPETS, 1); } PostingsHighlighter highlighter = getHighlighter(req); Map<String,String[]> snippets = highlighter.highlightFields(fieldNames, query, searcher, docIDs, maxPassages); return encodeSnippets(keys, fieldNames, snippets); } /** Creates an instance of the Lucene PostingsHighlighter. Provided for subclass extension so that * a subclass can return a subclass of {@link PostingsSolrHighlighter.SolrExtendedPostingsHighlighter}. */ protected PostingsHighlighter getHighlighter(SolrQueryRequest req) { return new SolrExtendedPostingsHighlighter(req); } /** * Encodes the resulting snippets into a namedlist * @param keys the document unique keys * @param fieldNames field names to highlight in the order * @param snippets map from field name to snippet array for the docs * @return encoded namedlist of summaries */ protected NamedList<Object> encodeSnippets(String[] keys, String[] fieldNames, Map<String,String[]> snippets) { NamedList<Object> list = new SimpleOrderedMap<>(); for (int i = 0; i < keys.length; i++) { NamedList<Object> summary = new SimpleOrderedMap<>(); for (String field : fieldNames) { String snippet = snippets.get(field)[i]; // box in an array to match the format of existing highlighters, // even though it's always one element. if (snippet == null) { summary.add(field, new String[0]); } else { summary.add(field, new String[] { snippet }); } } list.add(keys[i], summary); } return list; } /** Converts solr's DocList to the int[] docIDs */ protected int[] toDocIDs(DocList docs) { int[] docIDs = new int[docs.size()]; DocIterator iterator = docs.iterator(); for (int i = 0; i < docIDs.length; i++) { if (!iterator.hasNext()) { throw new AssertionError(); } docIDs[i] = iterator.nextDoc(); } if (iterator.hasNext()) { throw new AssertionError(); } return docIDs; } /** Retrieves the unique keys for the topdocs to key the results */ protected String[] getUniqueKeys(SolrIndexSearcher searcher, int[] docIDs) throws IOException { IndexSchema schema = searcher.getSchema(); SchemaField keyField = schema.getUniqueKeyField(); if (keyField != null) { Set<String> selector = Collections.singleton(keyField.getName()); String uniqueKeys[] = new String[docIDs.length]; for (int i = 0; i < docIDs.length; i++) { int docid = docIDs[i]; Document doc = searcher.doc(docid, selector); String id = schema.printableUniqueKey(doc); uniqueKeys[i] = id; } return uniqueKeys; } else { return new String[docIDs.length]; } } /** From {@link #getHighlighter(org.apache.solr.request.SolrQueryRequest)}. */ public class SolrExtendedPostingsHighlighter extends PostingsHighlighter { protected final SolrParams params; protected final IndexSchema schema; public SolrExtendedPostingsHighlighter(SolrQueryRequest req) { super(req.getParams().getInt(HighlightParams.MAX_CHARS, PostingsHighlighter.DEFAULT_MAX_LENGTH)); this.params = req.getParams(); this.schema = req.getSchema(); } @Override protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { boolean defaultSummary = params.getFieldBool(fieldName, HighlightParams.DEFAULT_SUMMARY, true); if (defaultSummary) { return super.getEmptyHighlight(fieldName, bi, maxPassages); } else { //TODO reuse logic of DefaultSolrHighlighter.alternateField return new Passage[0]; } } @Override protected PassageFormatter getFormatter(String fieldName) { String preTag = params.getFieldParam(fieldName, HighlightParams.TAG_PRE, "<em>"); String postTag = params.getFieldParam(fieldName, HighlightParams.TAG_POST, "</em>"); String ellipsis = params.getFieldParam(fieldName, HighlightParams.TAG_ELLIPSIS, "... "); String encoder = params.getFieldParam(fieldName, HighlightParams.ENCODER, "simple"); return new DefaultPassageFormatter(preTag, postTag, ellipsis, "html".equals(encoder)); } @Override protected PassageScorer getScorer(String fieldName) { float k1 = params.getFieldFloat(fieldName, HighlightParams.SCORE_K1, 1.2f); float b = params.getFieldFloat(fieldName, HighlightParams.SCORE_B, 0.75f); float pivot = params.getFieldFloat(fieldName, HighlightParams.SCORE_PIVOT, 87f); return new PassageScorer(k1, b, pivot); } @Override protected BreakIterator getBreakIterator(String field) { String language = params.getFieldParam(field, HighlightParams.BS_LANGUAGE); String country = params.getFieldParam(field, HighlightParams.BS_COUNTRY); String variant = params.getFieldParam(field, HighlightParams.BS_VARIANT); Locale locale = parseLocale(language, country, variant); String type = params.getFieldParam(field, HighlightParams.BS_TYPE); return parseBreakIterator(type, locale); } @Override protected char getMultiValuedSeparator(String field) { String sep = params.getFieldParam(field, HighlightParams.MULTI_VALUED_SEPARATOR, " "); if (sep.length() != 1) { throw new IllegalArgumentException(HighlightParams.MULTI_VALUED_SEPARATOR + " must be exactly one character."); } return sep.charAt(0); } @Override protected Analyzer getIndexAnalyzer(String field) { if (params.getFieldBool(field, HighlightParams.HIGHLIGHT_MULTI_TERM, false)) { return schema.getIndexAnalyzer(); } else { return null; } } } /** parse a break iterator type for the specified locale */ protected BreakIterator parseBreakIterator(String type, Locale locale) { if (type == null || "SENTENCE".equals(type)) { return BreakIterator.getSentenceInstance(locale); } else if ("LINE".equals(type)) { return BreakIterator.getLineInstance(locale); } else if ("WORD".equals(type)) { return BreakIterator.getWordInstance(locale); } else if ("CHARACTER".equals(type)) { return BreakIterator.getCharacterInstance(locale); } else if ("WHOLE".equals(type)) { return new WholeBreakIterator(); } else { throw new IllegalArgumentException("Unknown " + HighlightParams.BS_TYPE + ": " + type); } } /** parse a locale from a language+country+variant spec */ protected Locale parseLocale(String language, String country, String variant) { if (language == null && country == null && variant == null) { return Locale.ROOT; } else if (language != null && country == null && variant != null) { throw new IllegalArgumentException("To specify variant, country is required"); } else if (language != null && country != null && variant != null) { return new Locale(language, country, variant); } else if (language != null && country != null) { return new Locale(language, country); } else { return new Locale(language); } } }
/** * Copyright notice * * This file is part of the Processing library `gwoptics' * http://www.gwoptics.org/processing/gwoptics_p5lib/ * * Copyright (C) 2009 onwards Daniel Brown and Andreas Freise * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License version 2.1 as published * by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.gwoptics.graphics.graph3D; import org.gwoptics.graphics.GWColour; import org.gwoptics.graphics.Renderable; import processing.core.PApplet; import processing.core.PConstants; /** * <p> SquareGridMesh is a square shaped grid with variable resolution along * both sides. Each point can have its height set allowing surfaces to be * generated. This object is originally designed to work with the SurfaceGraph3D * control, but is Usable without this for other purposes. </p> * * <p> Each point is coloured according to a supplied IColourmap, solid fill * colour or by wireframe. The colour and point positions are both stored in 2 * arrays. Default values are a resolution along both sides of 10 squares each * 10 units in length. </p> * * <p> <b>Important Note:</b> It must be noted that although due to notation the * Z axis points upwards, in world space the Y axis points up. Therefore Z <-> Y * when it comes to plotting data. </p> * * @author Daniel Brown 8/6/09 * @since 0.1.1 * @see Renderable * @see IColourmap */ public class SquareGridMesh extends Renderable { private int _X_size = 11; //number of points along x axis private int _Y_size = 11; //number of points along z axis private float _dx = 10; //x distance between points private float _dy = 10; //z distance between points protected float[][][] _vertexs; protected int[][][] _colour; /** * Specifies whether to use a colourmap to colour each vertex */ public boolean isColoured; /** * If no colourmap is provided this is the stroke colour for the grid. Only * applies if isColoured is false. */ public GWColour strokeColour; /** * States whether to fill the grid using fillColour. Only applies if * isColoured is false. */ public boolean isFilled; /** * If no colourmap is provided this is the fill colour for the grid. Only * applies if isColoured is false. */ public GWColour fillColour; /** * States whether to stroke the grid using strokeColour. Only applies if * isColoured is false. */ public boolean isStroked; public float getWidth() { return _dx * (_X_size - 1); } public float getLength() { return _dy * (_Y_size - 1); } /** * Allows user to specify dimensions of the grid needed. By default the grid * is rendered in wireframe mode with stroke colour as white. * * @param X number of squares along the x axis * @param Y number of squares along the y axis * @param dx size of square in x direction * @param dy size of square in y direction * @param parent PApplet that the grid is rendered in */ public SquareGridMesh(int X, int Y, float dx, float dz, PApplet parent) { super(parent); if (X <= 0 || Y <= 0) { throw new IllegalArgumentException("Grid size dimensions should be greater than 0."); } //Add one to resolution so it determines the number of squares not points _X_size = X + 1; _Y_size = Y + 1; _dx = dx; _dy = dz; isColoured = false; isFilled = false; isStroked = true; strokeColour = new GWColour(1, 1, 1); fillColour = new GWColour(0.5f, 0.5f, 0.5f); _vertexs = new float[_X_size][_Y_size][3]; _colour = new int[_X_size][_Y_size][3]; //as the x and z components never change on the grid just the y we set //all the values now and then the y later for (int i = 0; i < _vertexs.length; i++) { for (int j = 0; j < _vertexs[0].length; j++) { _vertexs[i][j][0] = i * _dx; _vertexs[i][j][2] = j * _dy; } } } /** * This function sets the Z value of a given point at X and Y. X and Y both * refer to the index of the point. * * @param X * @param Y * @param Z */ public void setZValue(int X, int Y, float Z) { if (X < 0 || Y < 0 || X > _X_size - 1 || Y > _Y_size - 1) { throw new ArrayIndexOutOfBoundsException(); } _vertexs[X][Y][1] = Z;//remember plot Z as Y } /** * Sets the colour of a given point at indexes X and Y. * * @param X * @param Y * @param c */ public void setVertexColour(int X, int Y, GWColour c) { if (X < 0 || Y < 0 || X > _X_size - 1 || Y > _Y_size - 1) { throw new ArrayIndexOutOfBoundsException(); } _colour[X][Y][0] = (int) (c.R * 255); _colour[X][Y][1] = (int) (c.G * 255); _colour[X][Y][2] = (int) (c.B * 255); } /** * Returns height of point at index X and Y. */ public float getZValue(int X, int Y) { return _vertexs[X][Y][1]; } public void draw() { _parent.pushMatrix(); if (isColoured) { _ColouredDraw(); } else { _noColourDraw(); } _parent.popMatrix(); } /** * This function is used when no colourmap is given */ private void _noColourDraw() { //moved these 2 check outside the loop if (isFilled) { _parent.fill(fillColour.toInt()); } else { _parent.noFill(); } if (isStroked) { _parent.stroke(strokeColour.toInt()); } else { _parent.noStroke(); } for (int i = 0; i < _vertexs.length - 1; i++) { _parent.beginShape(PConstants.TRIANGLE_STRIP); for (int j = 0; j < _vertexs[0].length; j++) { //as in setZValue() earlier we saved the 'Z' as Y component we simply plot // the points as normal now _parent.vertex(_vertexs[i][j][0], _vertexs[i][j][1], _vertexs[i][j][2]); _parent.vertex(_vertexs[i + 1][j][0], _vertexs[i + 1][j][1], _vertexs[i + 1][j][2]); } _parent.endShape(); } } /** * This function is used when colours have been applied to each vertex */ private void _ColouredDraw() { int k; _parent.noStroke(); for (int i = 0; i < _vertexs.length - 1; i++) { _parent.beginShape(PConstants.TRIANGLE_STRIP); for (int j = 0; j < _vertexs[0].length; j++) { k = i + 1; //as in setZValue() earlier we saved the 'Z' as Y component we simply plot // the points as normal now _parent.fill(_colour[i][j][0], _colour[i][j][1], _colour[i][j][2]); _parent.vertex(_vertexs[i][j][0], _vertexs[i][j][1], _vertexs[i][j][2]); _parent.fill(_colour[k][j][0], _colour[k][j][1], _colour[k][j][2]); _parent.vertex(_vertexs[i + 1][j][0], _vertexs[i + 1][j][1], _vertexs[i + 1][j][2]); } _parent.endShape(); } } /** * Returns number of squares along X side. */ public int sizeX() { return _X_size - 1; }//1 is subtracted as we added one in the constructor //for the total number of points rather than squares. /** * Returns number of squares along Z side. */ public int sizeY() { return _Y_size - 1; } }
package com.ad.zakatrizki.fragment; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.design.widget.CoordinatorLayout; import android.support.design.widget.FloatingActionButton; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.CardView; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Html; import android.text.InputType; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.TextView; import com.ad.zakatrizki.R; import com.ad.zakatrizki.Zakat; import com.ad.zakatrizki.activity.CalonMustahiqDetailActivity; import com.ad.zakatrizki.activity.CariCalonMustahiqActivity; import com.ad.zakatrizki.activity.DrawerActivity; import com.ad.zakatrizki.adapter.CalonMustahiqAdapter; import com.ad.zakatrizki.model.CalonMustahiq; import com.ad.zakatrizki.model.Mustahiq; import com.ad.zakatrizki.model.Refresh; import com.ad.zakatrizki.utils.ApiHelper; import com.ad.zakatrizki.utils.CustomVolley; import com.ad.zakatrizki.utils.Prefs; import com.ad.zakatrizki.utils.TextUtils; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.joanzapata.iconify.IconDrawable; import com.joanzapata.iconify.fonts.MaterialCommunityIcons; import com.joanzapata.iconify.fonts.MaterialIcons; import com.joanzapata.iconify.widget.IconButton; import com.mugen.Mugen; import com.mugen.MugenCallbacks; import com.sdsmdg.tastytoast.TastyToast; import org.greenrobot.eventbus.EventBus; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import butterknife.BindBool; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; import butterknife.Unbinder; import static android.content.Context.INPUT_METHOD_SERVICE; public class CalonMustahiqListFragment extends Fragment implements CalonMustahiqAdapter.OnCalonMustahiqItemClickListener, SwipeRefreshLayout.OnRefreshListener, AddRatingFragment.RatingListener, CustomVolley.OnCallbackResponse, ManageCalonMustahiqFragment.AddEditCalonMustahiqListener { private static final String TAG_MORE = "TAG_MORE"; private static final String TAG_AWAL = "TAG_AWAL"; private static final String TAG_NEW = "TAG_NEW"; private static final String TAG_DELETE = "TAG_DELETE"; private static final String TAG_ATAS = "atas"; private static final String TAG_BAWAH = "bawah"; public CalonMustahiqAdapter adapterCalonMustahiq; @BindBool(R.bool.is_tablet) boolean isTablet; @BindView(R.id.recyclerview) RecyclerView recyclerView; @BindView(R.id.swipe_container) SwipeRefreshLayout swipeContainer; @BindView(R.id.progress_more_data) ProgressBar progressMoreData; @BindView(R.id.no_data) IconButton noData; @BindView(R.id.fab_scroll_up) FloatingActionButton fabScrollUp; @BindView(R.id.fab_action) com.github.clans.fab.FloatingActionButton fabAction; @BindView(R.id.coordinatorLayout) CoordinatorLayout coordinatorLayout; //error @BindView(R.id.error_message) View errorMessage; @BindView(R.id.loading) ProgressBar loading; @BindView(R.id.text_error) TextView textError; @BindView(R.id.try_again) TextView tryAgain; @BindView(R.id.search) EditText search; @BindView(R.id.parent_search) CardView parentSearch; private ArrayList<CalonMustahiq> dataCalonMustahiqs = new ArrayList<>(); private GridLayoutManager mLayoutManager; private Integer position_delete; private ProgressDialog dialogProgress; private FragmentActivity activity; private Unbinder butterknife; private boolean isFinishLoadingAwalData = true; private boolean isLoadingMoreData = false; private boolean isFinishMoreData = false; private int page = 1; private boolean isRefresh = false; private CustomVolley customVolley; private RequestQueue queue; private int mPreviousVisibleItem; public CalonMustahiqListFragment() { } /** * Returns a new instance of this fragment for the given section * number. */ public static CalonMustahiqListFragment newInstance() { CalonMustahiqListFragment fragment = new CalonMustahiqListFragment(); return fragment; } // private String session_key; void ScrollUp() { recyclerView.smoothScrollToPosition(0); } @OnClick(R.id.fab_action) void AddCalonMustahiq() { FragmentManager fragmentManager = getChildFragmentManager(); ManageCalonMustahiqFragment manageCalonMustahiq = new ManageCalonMustahiqFragment(); manageCalonMustahiq.setTargetFragment(this, 0); manageCalonMustahiq.setCancelable(false); manageCalonMustahiq.setDialogTitle("Calon Mustahiq"); manageCalonMustahiq.setAction("add"); manageCalonMustahiq.show(fragmentManager, "Manage Calon Mustahiq"); } @OnClick(R.id.try_again) void TryAgain() { RefreshData(); } @OnClick(R.id.btn_search) void btn_search() { Search(); } @Override public void onAttach(Context context) { super.onAttach(context); if (context instanceof DrawerActivity) { // activity = (DrawerActivity) context; } activity = getActivity(); } private String keyword = null; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_list, container, false); butterknife = ButterKnife.bind(this, rootView); customVolley = new CustomVolley(activity); customVolley.setOnCallbackResponse(this); try { keyword = getArguments().getString(Zakat.KEYWORD); } catch (Exception e) { } // Configure the swipe refresh layout swipeContainer.setOnRefreshListener(this); swipeContainer.setColorSchemeResources(R.color.blue_light, R.color.green_light, R.color.orange_light, R.color.red_light); TypedValue typed_value = new TypedValue(); activity.getTheme().resolveAttribute(android.support.v7.appcompat.R.attr.actionBarSize, typed_value, true); swipeContainer.setProgressViewOffset(false, 0, getResources().getDimensionPixelSize(typed_value.resourceId)); //search search.setHint("Cari Nama atau Alamat Calon Mustahiq..."); search.setInputType(InputType.TYPE_CLASS_TEXT); search.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) { Search(); return false; } }); hideSoftKeyboard(); if (!TextUtils.isNullOrEmpty(keyword)) parentSearch.setVisibility(View.GONE); //inisial adapterMustahiq adapterCalonMustahiq = new CalonMustahiqAdapter(activity, dataCalonMustahiqs, isTablet); adapterCalonMustahiq.setValSearchAlamat(keyword); adapterCalonMustahiq.setOnCalonMustahiqItemClickListener(this); //recyclerView recyclerView.setItemAnimator(new DefaultItemAnimator()); recyclerView.setHasFixedSize(true); //inisial layout manager /* int grid_column_count = getResources().getInteger(R.integer.grid_column_count); StaggeredGridLayoutManager mLayoutManager = new StaggeredGridLayoutManager(grid_column_count, StaggeredGridLayoutManager.VERTICAL); */ // final LinearLayoutManager mLayoutManager = new LinearLayoutManager(getActivity()); // mLayoutManager.setOrientation(LinearLayoutManager.VERTICAL); mLayoutManager = new GridLayoutManager(activity, getNumberOfColumns()); // set layout manager recyclerView.setLayoutManager(mLayoutManager); // set adapterCalonMustahiq recyclerView.setAdapter(adapterCalonMustahiq); //handle ringkas dataCalonMustahiqs Mugen.with(recyclerView, new MugenCallbacks() { @Override public void onLoadMore() { if (isFinishLoadingAwalData && !isFinishMoreData && adapterCalonMustahiq.getItemCount() > 0) { getDataFromServer(TAG_MORE); } } @Override public boolean isLoading() { return isLoadingMoreData; } @Override public boolean hasLoadedAllItems() { return false; } }).start(); //setup fab fabAction.setImageDrawable( new IconDrawable(getActivity(), MaterialIcons.md_add) .colorRes(R.color.white) .actionBarSize()); fabScrollUp.setImageDrawable( new IconDrawable(getActivity(), MaterialCommunityIcons.mdi_arrow_up) .colorRes(R.color.primary)); if (Prefs.getTipeUser(getActivity()).equalsIgnoreCase("2")) { recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrolled(RecyclerView recyclerView, int dx, int dy) { super.onScrolled(recyclerView, dx, dy); int firstVisibleItem = mLayoutManager.findFirstVisibleItemPosition(); if (firstVisibleItem > mPreviousVisibleItem) { fabAction.hide(true); } else if (firstVisibleItem < mPreviousVisibleItem) { fabAction.show(true); } mPreviousVisibleItem = firstVisibleItem; } }); fabAction.setVisibility(View.VISIBLE); } noData.setText(Html.fromHtml("<center><h1>{mdi-calendar}</h1></br> Tidak ada calon mustahiq ...</center>")); showNoData(false); /* ========================================================================================= ==================== Get Data List (CalonMustahiq) ================================================ ============================================================================================*/ if (savedInstanceState == null || !savedInstanceState.containsKey(Zakat.CALON_MUSTAHIQ_ID)) { getDataFromServer(TAG_AWAL); } else { dataCalonMustahiqs = savedInstanceState.getParcelableArrayList(Zakat.CALON_MUSTAHIQ_ID); page = savedInstanceState.getInt(Zakat.PAGE); isLoadingMoreData = savedInstanceState.getBoolean(Zakat.IS_LOADING_MORE_DATA); isFinishLoadingAwalData = savedInstanceState.getBoolean(Zakat.IS_FINISH_LOADING_AWAL_DATA); if (!isFinishLoadingAwalData) { getDataFromServer(TAG_AWAL); } else if (isLoadingMoreData) { adapterCalonMustahiq.notifyDataSetChanged(); checkData(); getDataFromServer(TAG_MORE); } else { adapterCalonMustahiq.notifyDataSetChanged(); checkData(); } } /* ========================================================================================= ==================== End Get Data List (CalonMustahiq) ============================================ ============================================================================================*/ return rootView; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (mLayoutManager != null && adapterCalonMustahiq != null) { outState.putBoolean(Zakat.IS_FINISH_LOADING_AWAL_DATA, isFinishLoadingAwalData); outState.putBoolean(Zakat.IS_LOADING_MORE_DATA, isLoadingMoreData); outState.putInt(Zakat.PAGE, page); outState.putParcelableArrayList(Zakat.data, dataCalonMustahiqs); } } private void showProgresMore(boolean show) { if (show) { progressMoreData.setVisibility(View.VISIBLE); } else { progressMoreData.setVisibility(View.GONE); } } private void showNoData(boolean show) { if (show) { noData.setVisibility(View.VISIBLE); } else { noData.setVisibility(View.GONE); } } private void ProgresRefresh(boolean show) { if (show) { swipeContainer.setRefreshing(true); swipeContainer.setEnabled(false); } else { swipeContainer.setEnabled(true); swipeContainer.setRefreshing(false); } } private void getDataFromServer(final String TAG) { /*queue = customVolley.Rest(Request.Method.GET, Zakat.api_test + "?" + Zakat.app_key + "=" + Zakat.value_app_key + "&session_key=" + session_key + "&PAGE=" + PAGE + "&limit=" + Zakat.LIMIT_DATA, null, TAG);*/ queue = customVolley.Rest(Request.Method.GET, getUrlToDownload(page), null, TAG); } public String getUrlToDownload(int page) { return ApiHelper.getCalonMustahiqLink(getActivity(), page, keyword); } protected void DrawDataAllData(String position, String tag, String response) { try { if (isRefresh) { adapterCalonMustahiq.delete_all(); } JSONObject json = new JSONObject(response); Boolean isSuccess = Boolean.parseBoolean(json.getString(Zakat.isSuccess)); String message = json.getString(Zakat.message); if (isSuccess) { JSONArray items_obj = json.getJSONArray(Zakat.calon_mustahiq); int jumlah_list_data = items_obj.length(); if (jumlah_list_data > 0) { for (int i = 0; i < jumlah_list_data; i++) { JSONObject obj = items_obj.getJSONObject(i); setDataObject(position, obj); } adapterCalonMustahiq.notifyDataSetChanged(); } else { switch (tag) { case TAG_MORE: isFinishMoreData = true; // TastyToast.makeText(activity, "tidak ada dataCalonMustahiqs lama...", TastyToast.LENGTH_LONG, TastyToast.INFO); break; case TAG_AWAL: // TastyToast.makeText(activity, "tidak ada dataCalonMustahiqs...", TastyToast.LENGTH_LONG, TastyToast.INFO); break; case TAG_NEW: // TastyToast.makeText(activity, "tidak ada dataCalonMustahiqs baru...", TastyToast.LENGTH_LONG, TastyToast.INFO); break; } } if (isTablet && page == 1 && adapterCalonMustahiq.data.size() > 0) { adapterCalonMustahiq.setSelected(0); ((DrawerActivity) getActivity()).loadDetailCalonMustahiqFragmentWith(adapterCalonMustahiq.data.get(0).id_calon_mustahiq); } page = page + 1; } else { TastyToast.makeText(activity, message, TastyToast.LENGTH_LONG, TastyToast.SUCCESS); } checkData(); } catch (JSONException e) { e.printStackTrace(); TastyToast.makeText(activity, "Parsing dataCalonMustahiqs error ...", TastyToast.LENGTH_LONG, TastyToast.ERROR); } } private void checkData() { if (adapterCalonMustahiq.getItemCount() > 0) { showNoData(false); } else { showNoData(true); } } private void ResponeDelete(String response) { try { JSONObject json = new JSONObject(response); Boolean isSuccess = Boolean.parseBoolean(json.getString(Zakat.isSuccess)); String message = json.getString(Zakat.message); if (isSuccess) { adapterCalonMustahiq.remove(position_delete); checkData(); } else { TastyToast.makeText(activity, message, TastyToast.LENGTH_LONG, TastyToast.SUCCESS); } } catch (JSONException e) { e.printStackTrace(); TastyToast.makeText(activity, "Parsing dataCalonMustahiqs error ...", TastyToast.LENGTH_LONG, TastyToast.ERROR); } } private void setDataObject(String position, JSONObject obj) throws JSONException { //parse object String id_calon_mustahiq = obj.getString(Zakat.id_calon_mustahiq); String nama_calon_mustahiq = obj.getString(Zakat.nama_calon_mustahiq); String alamat_calon_mustahiq = obj.getString(Zakat.alamat_calon_mustahiq); String latitude_calon_mustahiq = obj.getString(Zakat.latitude_calon_mustahiq); String longitude_calon_mustahiq = obj.getString(Zakat.longitude_calon_mustahiq); String no_identitas_calon_mustahiq = obj.getString(Zakat.no_identitas_calon_mustahiq); String no_telp_calon_mustahiq = obj.getString(Zakat.no_telp_calon_mustahiq); String jumlah_anak_calon_mustahiq = obj.getString(Zakat.jumlah_anak_calon_mustahiq); String status_pernikahan_calon_mustahiq = obj.getString(Zakat.status_pernikahan_calon_mustahiq); String status_tempat_tinggal_calon_mustahiq = obj.getString(Zakat.status_tempat_tinggal_calon_mustahiq); String status_pekerjaan_calon_mustahiq = obj.getString(Zakat.status_pekerjaan_calon_mustahiq); String id_user_perekomendasi = obj.getString(Zakat.id_user_perekomendasi); String nama_perekomendasi_calon_mustahiq = obj .getString(Zakat.nama_perekomendasi_calon_mustahiq); String alasan_perekomendasi_calon_mustahiq = obj .getString(Zakat.alasan_perekomendasi_calon_mustahiq); String photo_1 = obj .getString(Zakat.photo_1); String photo_2 = obj .getString(Zakat.photo_2); String photo_3 = obj .getString(Zakat.photo_3); String caption_photo_1 = obj .getString(Zakat.caption_photo_1); String caption_photo_2 = obj .getString(Zakat.caption_photo_2); String caption_photo_3 = obj .getString(Zakat.caption_photo_3); String status_calon_mustahiq = obj.getString(Zakat.status_calon_mustahiq); String jumlah_rating = obj.getString(Zakat.jumlah_rating); String jumlah_rating_amil_zakat = obj.getString(Zakat.jumlah_rating_amil_zakat); Log.v("jumlah_rating", jumlah_rating + ""); //set map object AddAndSetMapData( position, id_calon_mustahiq, nama_calon_mustahiq, alamat_calon_mustahiq, latitude_calon_mustahiq, longitude_calon_mustahiq, no_identitas_calon_mustahiq, no_telp_calon_mustahiq, jumlah_anak_calon_mustahiq, status_pernikahan_calon_mustahiq, status_tempat_tinggal_calon_mustahiq, status_pekerjaan_calon_mustahiq, id_user_perekomendasi, nama_perekomendasi_calon_mustahiq, alasan_perekomendasi_calon_mustahiq, photo_1, photo_2, photo_3, caption_photo_1, caption_photo_2, caption_photo_3, status_calon_mustahiq, jumlah_rating, jumlah_rating_amil_zakat ); } private void AddAndSetMapData( String position, String id_calon_mustahiq, String nama_calon_mustahiq, String alamat_calon_mustahiq, String latitude_calon_mustahiq, String longitude_calon_mustahiq, String no_identitas_calon_mustahiq, String no_telp_calon_mustahiq, String jumlah_anak_calon_mustahiq, String status_pernikahan_calon_mustahiq, String status_tempat_tinggal_calon_mustahiq, String status_pekerjaan_calon_mustahiq, String id_user_perekomendasi, String nama_perekomendasi_calon_mustahiq, String alasan_perekomendasi_calon_mustahiq, String photo_1, String photo_2, String photo_3, String caption_photo_1, String caption_photo_2, String caption_photo_3, String status_calon_mustahiq, String jumlah_rating, String jumlah_rating_amil_zakat) { CalonMustahiq calon_mustahiq = new CalonMustahiq( id_calon_mustahiq, nama_calon_mustahiq, alamat_calon_mustahiq, latitude_calon_mustahiq, longitude_calon_mustahiq, no_identitas_calon_mustahiq, no_telp_calon_mustahiq, jumlah_anak_calon_mustahiq, status_pernikahan_calon_mustahiq, status_tempat_tinggal_calon_mustahiq, status_pekerjaan_calon_mustahiq, id_user_perekomendasi, nama_perekomendasi_calon_mustahiq, alasan_perekomendasi_calon_mustahiq, photo_1, photo_2, photo_3, caption_photo_1, caption_photo_2, caption_photo_3, status_calon_mustahiq, jumlah_rating, jumlah_rating_amil_zakat ); if (position.equals(TAG_BAWAH)) { dataCalonMustahiqs.add(calon_mustahiq); } else { dataCalonMustahiqs.add(0, calon_mustahiq); } } @Override public void onRefresh() { RefreshData(); } public void RefreshData() { // if (adapterCalonMustahiq.getItemCount() > 1) { isRefresh = true; isLoadingMoreData = false; isFinishLoadingAwalData = true; isFinishMoreData = false; page = 1; showNoData(false); /* } else { isRefresh = false; }*/ getDataFromServer(TAG_AWAL); } private void startProgress(String TAG) { if (TAG.equals(TAG_DELETE)) { TAG = "Delete CalonMustahiq"; } dialogProgress = ProgressDialog.show(getActivity(), TAG, "Please wait...", true); } private void stopProgress(String TAG) { if (dialogProgress != null) dialogProgress.dismiss(); } @Override public void onVolleyStart(String TAG) { if (TAG.equals(TAG_DELETE)) { startProgress(TAG_DELETE); } else { showProgresMore(false); if (TAG.equals(TAG_AWAL)) { ProgresRefresh(true); isFinishLoadingAwalData = false; errorMessage.setVisibility(View.GONE); if (adapterCalonMustahiq.getItemCount() == 0) { loading.setVisibility(View.VISIBLE); } } else { if (TAG.equals(TAG_MORE)) { isLoadingMoreData = true; isFinishMoreData = false; showProgresMore(true); } } } } @Override public void onVolleyEnd(String TAG) { if (TAG.equals(TAG_DELETE)) { stopProgress(TAG_DELETE); } else { ProgresRefresh(false); if (TAG.equals(TAG_AWAL)) { loading.setVisibility(View.GONE); } } } @Override public void onVolleySuccessResponse(String TAG, String response) { if (TAG.equals(TAG_DELETE)) { ResponeDelete(response); } else { if (TAG.equals(TAG_AWAL)) { errorMessage.setVisibility(View.GONE); DrawDataAllData(TAG_BAWAH, TAG, response); isFinishLoadingAwalData = true; } if (TAG.equals(TAG_MORE)) { DrawDataAllData(TAG_BAWAH, TAG, response); isLoadingMoreData = false; } if (TAG.equals(TAG_NEW)) { DrawDataAllData(TAG_ATAS, TAG, response); } isRefresh = false; showProgresMore(false); } } @Override public void onVolleyErrorResponse(String TAG, String response) { if (TAG.equals(TAG_DELETE)) { TastyToast.makeText(activity, "Error hapus calon mustahiq...", TastyToast.LENGTH_LONG, TastyToast.ERROR); } else { if (TAG.equals(TAG_AWAL)) { isFinishLoadingAwalData = false; if (adapterCalonMustahiq.getItemCount() == 0) { errorMessage.setVisibility(View.VISIBLE); } else { errorMessage.setVisibility(View.GONE); } } if (TAG.equals(TAG_MORE)) { isFinishMoreData = false; isLoadingMoreData = false; showProgresMore(false); } } } @Override public void onDestroyView() { super.onDestroyView(); butterknife.unbind(); if (queue != null) { queue.cancelAll(TAG_AWAL); queue.cancelAll(TAG_MORE); queue.cancelAll(TAG_NEW); queue.cancelAll(TAG_DELETE); } } @Override public void onActionClick(View v, int position) { int viewId = v.getId(); /* if (viewId == R.id.btn_action) { OpenAtion(v, position); }*/ } @Override public void onRootClick(View v, int position) { position_delete = position; if (isTablet) { adapterCalonMustahiq.setSelected(position); ((DrawerActivity) getActivity()).loadDetailCalonMustahiqFragmentWith(adapterCalonMustahiq.data.get(position).id_calon_mustahiq); } else { Intent intent = new Intent(activity, CalonMustahiqDetailActivity.class); intent.putExtra(Zakat.CALON_MUSTAHIQ_ID, adapterCalonMustahiq.data.get(position).id_calon_mustahiq); startActivity(intent); } } /* public void OpenAtion(View v, final int position) { final String id_calon_mustahiq = adapterCalonMustahiq.dataCalonMustahiqs.get(position).id_calon_mustahiq PopupMenu popup = new PopupMenu(activity, v, Gravity.RIGHT); popup.getMenuInflater().inflate(R.menu.action_manage, popup.getMenu()); popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { public boolean onMenuItemClick(MenuItem item) { int which = item.getItemId(); if (which == R.id.action_edit) { Intent myIntent = new Intent(getActivity(), actionEditActivity.class); activity.startActivityForResult(myIntent, 1); } if (which == R.id.action_delete) { new AlertDialog.Builder(getActivity()) .setIcon( new IconDrawable(getActivity(), MaterialCommunityIcons.mdi_alert_octagon) .colorRes(R.color.primary) .actionBarSize()) .setTitle("Hapus CalonMustahiq") .setMessage("Apa anda yakin akan menghapus calon_mustahiq ini?") .setPositiveButton("Ya", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { position_delete = position; queue = customVolley.Rest(Request.Method.GET, ApiHelper.getDeleteCalonMustahiqLink(getActivity(), idgambar), null, TAG_DELETE); } }) .setNegativeButton("Tidak", null) .show(); } return true; } }); // Force icons to show try { Field mFieldPopup = popup.getClass().getDeclaredField("mPopup"); mFieldPopup.setAccessible(true); MenuPopupHelper mPopup = (MenuPopupHelper) mFieldPopup.get(popup); mPopup.setForceShowIcon(true); } catch (Exception e) { Log.w("TAG", "error forcing menu icons to show", e); return; } popup.show(); }*/ public int getNumberOfColumns() { // Get screen width DisplayMetrics displayMetrics = getResources().getDisplayMetrics(); float widthPx = displayMetrics.widthPixels; if (isTablet) { widthPx = widthPx / 3; } // Calculate desired width float desiredPx = getResources().getDimensionPixelSize(R.dimen.movie_list_card_width); int columns = Math.round(widthPx / desiredPx); return columns > 1 ? columns : 1; } @Override public void onFinishEditCalonMustahiq(CalonMustahiq calon_mustahiq) { } @Override public void onFinishAddCalonMustahiq(CalonMustahiq calon_mustahiq) { adapterCalonMustahiq.data.add(0, calon_mustahiq); adapterCalonMustahiq.notifyDataSetChanged(); if (isTablet) { adapterCalonMustahiq.setSelected(0); ((DrawerActivity) getActivity()).loadDetailCalonMustahiqFragmentWith(adapterCalonMustahiq.data.get(0).id_calon_mustahiq); } FragmentManager fragmentManager = getChildFragmentManager(); AddRatingFragment add = new AddRatingFragment(); add.setTargetFragment(this, 0); add.setData(calon_mustahiq); add.show(fragmentManager, "Add Rating"); } @Override public void onFinishDeleteCalonMustahiq(CalonMustahiq calon_mustahiq) { } private void Search() { String val_search = search.getText().toString().trim(); if (!TextUtils.isNullOrEmpty(val_search)) { search.setText(""); Intent intent = new Intent(activity, CariCalonMustahiqActivity.class); intent.putExtra(Zakat.KEYWORD, val_search); startActivity(intent); } } public void hideSoftKeyboard() { if (getActivity().getCurrentFocus() != null) { InputMethodManager inputMethodManager = (InputMethodManager) getActivity().getSystemService(INPUT_METHOD_SERVICE); inputMethodManager.hideSoftInputFromWindow(getActivity().getCurrentFocus().getWindowToken(), 0); } } @Subscribe(sticky = true, threadMode = ThreadMode.MAIN) public void onRefresh(Refresh cp) { if (cp.getRefresh()) { adapterCalonMustahiq.remove(position_delete); } Refresh stickyEvent = EventBus.getDefault().getStickyEvent(Refresh.class); if (stickyEvent != null) { EventBus.getDefault().removeStickyEvent(stickyEvent); } } @Override public void onStart() { super.onStart(); EventBus.getDefault().register(this); } @Override public void onStop() { EventBus.getDefault().unregister(this); super.onStop(); } @Override public void onFinishRating(Mustahiq mustahiq) { } @Override public void onFinishRating(CalonMustahiq calonMustahiq) { adapterCalonMustahiq.notifyDataSetChanged(); } }
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.execapp; import azkaban.Constants; import azkaban.event.Event; import azkaban.event.EventListener; import azkaban.execapp.event.FlowWatcher; import azkaban.execapp.event.LocalFlowWatcher; import azkaban.execapp.event.RemoteFlowWatcher; import azkaban.execapp.metric.NumFailedFlowMetric; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutionOptions; import azkaban.executor.ExecutorLoader; import azkaban.executor.ExecutorManagerException; import azkaban.executor.Status; import azkaban.jobtype.JobTypeManager; import azkaban.jobtype.JobTypeManagerException; import azkaban.metric.MetricReportManager; import azkaban.project.ProjectLoader; import azkaban.project.ProjectWhitelist; import azkaban.project.ProjectWhitelist.WhitelistType; import azkaban.sla.SlaOption; import azkaban.spi.AzkabanEventReporter; import azkaban.spi.EventType; import azkaban.storage.StorageManager; import azkaban.utils.FileIOUtils; import azkaban.utils.FileIOUtils.JobMetaData; import azkaban.utils.FileIOUtils.LogData; import azkaban.utils.JSONUtils; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.ThreadPoolExecutingListener; import azkaban.utils.TrackingThreadPool; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.lang.Thread.State; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; /** * Execution manager for the server side execution. * * When a flow is submitted to FlowRunnerManager, it is the {@link Status.PREPARING} status. When a * flow is about to be executed by FlowRunner, its status is updated to {@link Status.RUNNING} * * Two main data structures are used in this class to maintain flows. * * runningFlows: this is used as a bookkeeping for submitted flows in FlowRunnerManager. It has * nothing to do with the executor service that is used to execute the flows. This bookkeeping is * used at the time of canceling or killing a flow. The flows in this data structure is removed in * the handleEvent method. * * submittedFlows: this is used to keep track the execution of the flows, so it has the mapping * between a Future<?> and an execution id. This would allow us to find out the execution ids of the * flows that are in the Status.PREPARING status. The entries in this map is removed once the flow * execution is completed. */ @Singleton public class FlowRunnerManager implements EventListener, ThreadPoolExecutingListener { private static final Logger logger = Logger.getLogger(FlowRunnerManager.class); private static final String EXECUTOR_USE_BOUNDED_THREADPOOL_QUEUE = "executor.use.bounded.threadpool.queue"; private static final String EXECUTOR_THREADPOOL_WORKQUEUE_SIZE = "executor.threadpool.workqueue.size"; private static final String EXECUTOR_FLOW_THREADS = "executor.flow.threads"; private static final String FLOW_NUM_JOB_THREADS = "flow.num.job.threads"; // recently finished secs to clean up. 1 minute private static final int RECENTLY_FINISHED_TIME_TO_LIVE = 60 * 1000; private static final int DEFAULT_NUM_EXECUTING_FLOWS = 30; private static final int DEFAULT_FLOW_NUM_JOB_TREADS = 10; // this map is used to store the flows that have been submitted to // the executor service. Once a flow has been submitted, it is either // in the queue waiting to be executed or in executing state. private final Map<Future<?>, Integer> submittedFlows = new ConcurrentHashMap<>(); private final Map<Integer, FlowRunner> runningFlows = new ConcurrentHashMap<>(); private final Map<Integer, ExecutableFlow> recentlyFinishedFlows = new ConcurrentHashMap<>(); private final Map<Pair<Integer, Integer>, ProjectVersion> installedProjects; private final TrackingThreadPool executorService; private final CleanerThread cleanerThread; private final ExecutorLoader executorLoader; private final ProjectLoader projectLoader; private final JobTypeManager jobtypeManager; private final FlowPreparer flowPreparer; private final TriggerManager triggerManager; private final AzkabanEventReporter azkabanEventReporter; private final Props azkabanProps; private final File executionDirectory; private final File projectDirectory; private final Object executionDirDeletionSync = new Object(); private int numThreads = DEFAULT_NUM_EXECUTING_FLOWS; private int threadPoolQueueSize = -1; private int numJobThreadPerFlow = DEFAULT_FLOW_NUM_JOB_TREADS; private Props globalProps; private long lastCleanerThreadCheckTime = -1; private long executionDirRetention = 1 * 24 * 60 * 60 * 1000; // 1 Day // We want to limit the log sizes to about 20 megs private String jobLogChunkSize = "5MB"; private int jobLogNumFiles = 4; // If true, jobs will validate proxy user against a list of valid proxy users. private boolean validateProxyUser = false; // date time of the the last flow submitted. private long lastFlowSubmittedDate = 0; // whether the current executor is active private volatile boolean isExecutorActive = false; @Inject public FlowRunnerManager(final Props props, final ExecutorLoader executorLoader, final ProjectLoader projectLoader, final StorageManager storageManager, final TriggerManager triggerManager, @Nullable final AzkabanEventReporter azkabanEventReporter) throws IOException { this.azkabanProps = props; this.executionDirRetention = props.getLong("execution.dir.retention", this.executionDirRetention); this.azkabanEventReporter = azkabanEventReporter; logger.info("Execution dir retention set to " + this.executionDirRetention + " ms"); this.executionDirectory = new File(props.getString("azkaban.execution.dir", "executions")); if (!this.executionDirectory.exists()) { this.executionDirectory.mkdirs(); setgidPermissionOnExecutionDirectory(); } this.projectDirectory = new File(props.getString("azkaban.project.dir", "projects")); if (!this.projectDirectory.exists()) { this.projectDirectory.mkdirs(); } this.installedProjects = loadExistingProjects(); // azkaban.temp.dir this.numThreads = props.getInt(EXECUTOR_FLOW_THREADS, DEFAULT_NUM_EXECUTING_FLOWS); this.numJobThreadPerFlow = props.getInt(FLOW_NUM_JOB_THREADS, DEFAULT_FLOW_NUM_JOB_TREADS); this.executorService = createExecutorService(this.numThreads); // Create a flow preparer this.flowPreparer = new FlowPreparer(storageManager, this.executionDirectory, this.projectDirectory, this.installedProjects); this.executorLoader = executorLoader; this.projectLoader = projectLoader; this.triggerManager = triggerManager; this.jobLogChunkSize = this.azkabanProps.getString("job.log.chunk.size", "5MB"); this.jobLogNumFiles = this.azkabanProps.getInt("job.log.backup.index", 4); this.validateProxyUser = this.azkabanProps.getBoolean("proxy.user.lock.down", false); this.cleanerThread = new CleanerThread(); this.cleanerThread.start(); final String globalPropsPath = props.getString("executor.global.properties", null); if (globalPropsPath != null) { this.globalProps = new Props(null, globalPropsPath); } this.jobtypeManager = new JobTypeManager(props.getString( AzkabanExecutorServer.JOBTYPE_PLUGIN_DIR, JobTypeManager.DEFAULT_JOBTYPEPLUGINDIR), this.globalProps, getClass().getClassLoader()); } /** * Setting the gid bit on the execution directory forces all files/directories created within the * directory to be a part of the group associated with the azkaban process. Then, when users * create their own files, the azkaban cleanup thread can properly remove them. * * Java does not provide a standard library api for setting the gid bit because the gid bit is * system dependent, so the only way to set this bit is to start a new process and run the shell * command "chmod g+s " + execution directory name. * * Note that this should work on most Linux distributions and MacOS, but will not work on * Windows. */ private void setgidPermissionOnExecutionDirectory() throws IOException { logger.info("Creating subprocess to run shell command: chmod g+s " + this.executionDirectory.toString()); Runtime.getRuntime().exec("chmod g+s " + this.executionDirectory.toString()); } private TrackingThreadPool createExecutorService(final int nThreads) { final boolean useNewThreadPool = this.azkabanProps.getBoolean(EXECUTOR_USE_BOUNDED_THREADPOOL_QUEUE, false); logger.info("useNewThreadPool: " + useNewThreadPool); if (useNewThreadPool) { this.threadPoolQueueSize = this.azkabanProps.getInt(EXECUTOR_THREADPOOL_WORKQUEUE_SIZE, nThreads); logger.info("workQueueSize: " + this.threadPoolQueueSize); // using a bounded queue for the work queue. The default rejection policy // {@ThreadPoolExecutor.AbortPolicy} is used final TrackingThreadPool executor = new TrackingThreadPool(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(this.threadPoolQueueSize), this); return executor; } else { // the old way of using unbounded task queue. // if the running tasks are taking a long time or stuck, this queue // will be very very long. return new TrackingThreadPool(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), this); } } private Map<Pair<Integer, Integer>, ProjectVersion> loadExistingProjects() { final Map<Pair<Integer, Integer>, ProjectVersion> allProjects = new HashMap<>(); for (final File project : this.projectDirectory.listFiles(new FilenameFilter() { String pattern = "[0-9]+\\.[0-9]+"; @Override public boolean accept(final File dir, final String name) { return name.matches(this.pattern); } })) { if (project.isDirectory()) { try { final String fileName = new File(project.getAbsolutePath()).getName(); final int projectId = Integer.parseInt(fileName.split("\\.")[0]); final int versionNum = Integer.parseInt(fileName.split("\\.")[1]); final ProjectVersion version = new ProjectVersion(projectId, versionNum, project); allProjects.put(new Pair<>(projectId, versionNum), version); } catch (final Exception e) { e.printStackTrace(); } } } return allProjects; } public void setExecutorActive(final boolean isActive) { this.isExecutorActive = isActive; } public long getLastFlowSubmittedTime() { // Note: this is not thread safe and may result in providing dirty data. // we will provide this data as is for now and will revisit if there // is a string justification for change. return this.lastFlowSubmittedDate; } public Props getGlobalProps() { return this.globalProps; } public void setGlobalProps(final Props globalProps) { this.globalProps = globalProps; } public void deleteDirectory(final ProjectVersion pv) throws IOException { synchronized (pv) { logger.warn("Deleting project: " + pv); final File installedDir = pv.getInstalledDir(); if (installedDir != null && installedDir.exists()) { FileUtils.deleteDirectory(installedDir); } } } public void submitFlow(final int execId) throws ExecutorManagerException { // Load file and submit if (this.runningFlows.containsKey(execId)) { throw new ExecutorManagerException("Execution " + execId + " is already running."); } ExecutableFlow flow = null; flow = this.executorLoader.fetchExecutableFlow(execId); if (flow == null) { throw new ExecutorManagerException("Error loading flow with exec " + execId); } // Sets up the project files and execution directory. this.flowPreparer.setup(flow); // Setup flow runner FlowWatcher watcher = null; final ExecutionOptions options = flow.getExecutionOptions(); if (options.getPipelineExecutionId() != null) { final Integer pipelineExecId = options.getPipelineExecutionId(); final FlowRunner runner = this.runningFlows.get(pipelineExecId); if (runner != null) { watcher = new LocalFlowWatcher(runner); } else { watcher = new RemoteFlowWatcher(pipelineExecId, this.executorLoader); } } int numJobThreads = this.numJobThreadPerFlow; if (options.getFlowParameters().containsKey(FLOW_NUM_JOB_THREADS)) { try { final int numJobs = Integer.valueOf(options.getFlowParameters().get( FLOW_NUM_JOB_THREADS)); if (numJobs > 0 && (numJobs <= numJobThreads || ProjectWhitelist .isProjectWhitelisted(flow.getProjectId(), WhitelistType.NumJobPerFlow))) { numJobThreads = numJobs; } } catch (final Exception e) { throw new ExecutorManagerException( "Failed to set the number of job threads " + options.getFlowParameters().get(FLOW_NUM_JOB_THREADS) + " for flow " + execId, e); } } final FlowRunner runner = new FlowRunner(flow, this.executorLoader, this.projectLoader, this.jobtypeManager, this.azkabanProps, this.azkabanEventReporter); runner.setFlowWatcher(watcher) .setJobLogSettings(this.jobLogChunkSize, this.jobLogNumFiles) .setValidateProxyUser(this.validateProxyUser) .setNumJobThreads(numJobThreads).addListener(this); configureFlowLevelMetrics(runner); // Check again. if (this.runningFlows.containsKey(execId)) { throw new ExecutorManagerException("Execution " + execId + " is already running."); } // Finally, queue the sucker. this.runningFlows.put(execId, runner); try { // The executorService already has a queue. // The submit method below actually returns an instance of FutureTask, // which implements interface RunnableFuture, which extends both // Runnable and Future interfaces final Future<?> future = this.executorService.submit(runner); // keep track of this future this.submittedFlows.put(future, runner.getExecutionId()); // update the last submitted time. this.lastFlowSubmittedDate = System.currentTimeMillis(); } catch (final RejectedExecutionException re) { throw new ExecutorManagerException( "Azkaban server can't execute any more flows. " + "The number of running flows has reached the system configured limit." + "Please notify Azkaban administrators"); } } /** * Configure Azkaban metrics tracking for a new flowRunner instance */ private void configureFlowLevelMetrics(final FlowRunner flowRunner) { logger.info("Configuring Azkaban metrics tracking for flow runner object"); if (MetricReportManager.isAvailable()) { final MetricReportManager metricManager = MetricReportManager.getInstance(); // Adding NumFailedFlow Metric listener flowRunner.addListener((NumFailedFlowMetric) metricManager .getMetricFromName(NumFailedFlowMetric.NUM_FAILED_FLOW_METRIC_NAME)); } } public void cancelJobBySLA(final int execId, final String jobId) throws ExecutorManagerException { final FlowRunner flowRunner = this.runningFlows.get(execId); if (flowRunner == null) { throw new ExecutorManagerException("Execution " + execId + " is not running."); } for (final JobRunner jobRunner : flowRunner.getActiveJobRunners()) { if (jobRunner.getJobId().equals(jobId)) { logger.info("Killing job " + jobId + " in execution " + execId + " by SLA"); jobRunner.killBySLA(); break; } } } public void cancelFlow(final int execId, final String user) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Execution " + execId + " is not running."); } runner.kill(user); } public void pauseFlow(final int execId, final String user) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Execution " + execId + " is not running."); } runner.pause(user); } public void resumeFlow(final int execId, final String user) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Execution " + execId + " is not running."); } runner.resume(user); } public void retryFailures(final int execId, final String user) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Execution " + execId + " is not running."); } runner.retryFailures(user); } public ExecutableFlow getExecutableFlow(final int execId) { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { return this.recentlyFinishedFlows.get(execId); } return runner.getExecutableFlow(); } @Override public void handleEvent(final Event event) { if (event.getType() == EventType.FLOW_FINISHED || event.getType() == EventType.FLOW_STARTED) { final FlowRunner flowRunner = (FlowRunner) event.getRunner(); final ExecutableFlow flow = flowRunner.getExecutableFlow(); if (event.getType() == EventType.FLOW_FINISHED) { this.recentlyFinishedFlows.put(flow.getExecutionId(), flow); logger.info("Flow " + flow.getExecutionId() + " is finished. Adding it to recently finished flows list."); this.runningFlows.remove(flow.getExecutionId()); } else if (event.getType() == EventType.FLOW_STARTED) { // add flow level SLA checker this.triggerManager .addTrigger(flow.getExecutionId(), SlaOption.getFlowLevelSLAOptions(flow)); } } } public LogData readFlowLogs(final int execId, final int startByte, final int length) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Running flow " + execId + " not found."); } final File dir = runner.getExecutionDir(); if (dir != null && dir.exists()) { try { synchronized (this.executionDirDeletionSync) { if (!dir.exists()) { throw new ExecutorManagerException( "Execution dir file doesn't exist. Probably has beend deleted"); } final File logFile = runner.getFlowLogFile(); if (logFile != null && logFile.exists()) { return FileIOUtils.readUtf8File(logFile, startByte, length); } else { throw new ExecutorManagerException("Flow log file doesn't exist."); } } } catch (final IOException e) { throw new ExecutorManagerException(e); } } throw new ExecutorManagerException( "Error reading file. Log directory doesn't exist."); } public LogData readJobLogs(final int execId, final String jobId, final int attempt, final int startByte, final int length) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Running flow " + execId + " not found."); } final File dir = runner.getExecutionDir(); if (dir != null && dir.exists()) { try { synchronized (this.executionDirDeletionSync) { if (!dir.exists()) { throw new ExecutorManagerException( "Execution dir file doesn't exist. Probably has beend deleted"); } final File logFile = runner.getJobLogFile(jobId, attempt); if (logFile != null && logFile.exists()) { return FileIOUtils.readUtf8File(logFile, startByte, length); } else { throw new ExecutorManagerException("Job log file doesn't exist."); } } } catch (final IOException e) { throw new ExecutorManagerException(e); } } throw new ExecutorManagerException( "Error reading file. Log directory doesn't exist."); } public List<Object> readJobAttachments(final int execId, final String jobId, final int attempt) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Running flow " + execId + " not found."); } final File dir = runner.getExecutionDir(); if (dir == null || !dir.exists()) { throw new ExecutorManagerException( "Error reading file. Log directory doesn't exist."); } try { synchronized (this.executionDirDeletionSync) { if (!dir.exists()) { throw new ExecutorManagerException( "Execution dir file doesn't exist. Probably has beend deleted"); } final File attachmentFile = runner.getJobAttachmentFile(jobId, attempt); if (attachmentFile == null || !attachmentFile.exists()) { return null; } final List<Object> jobAttachments = (ArrayList<Object>) JSONUtils.parseJSONFromFile(attachmentFile); return jobAttachments; } } catch (final IOException e) { throw new ExecutorManagerException(e); } } public JobMetaData readJobMetaData(final int execId, final String jobId, final int attempt, final int startByte, final int length) throws ExecutorManagerException { final FlowRunner runner = this.runningFlows.get(execId); if (runner == null) { throw new ExecutorManagerException("Running flow " + execId + " not found."); } final File dir = runner.getExecutionDir(); if (dir != null && dir.exists()) { try { synchronized (this.executionDirDeletionSync) { if (!dir.exists()) { throw new ExecutorManagerException( "Execution dir file doesn't exist. Probably has beend deleted"); } final File metaDataFile = runner.getJobMetaDataFile(jobId, attempt); if (metaDataFile != null && metaDataFile.exists()) { return FileIOUtils.readUtf8MetaDataFile(metaDataFile, startByte, length); } else { throw new ExecutorManagerException("Job log file doesn't exist."); } } } catch (final IOException e) { throw new ExecutorManagerException(e); } } throw new ExecutorManagerException( "Error reading file. Log directory doesn't exist."); } public long getLastCleanerThreadCheckTime() { return this.lastCleanerThreadCheckTime; } public boolean isCleanerThreadActive() { return this.cleanerThread.isAlive(); } public State getCleanerThreadState() { return this.cleanerThread.getState(); } public boolean isExecutorThreadPoolShutdown() { return this.executorService.isShutdown(); } public int getNumQueuedFlows() { return this.executorService.getQueue().size(); } public int getNumRunningFlows() { return this.executorService.getActiveCount(); } public String getRunningFlowIds() { // The in progress tasks are actually of type FutureTask final Set<Runnable> inProgressTasks = this.executorService.getInProgressTasks(); final List<Integer> runningFlowIds = new ArrayList<>(inProgressTasks.size()); for (final Runnable task : inProgressTasks) { // add casting here to ensure it matches the expected type in // submittedFlows final Integer execId = this.submittedFlows.get((Future<?>) task); if (execId != null) { runningFlowIds.add(execId); } else { logger.warn("getRunningFlowIds: got null execId for task: " + task); } } Collections.sort(runningFlowIds); return runningFlowIds.toString(); } public String getQueuedFlowIds() { final List<Integer> flowIdList = new ArrayList<>(this.executorService.getQueue().size()); for (final Runnable task : this.executorService.getQueue()) { final Integer execId = this.submittedFlows.get(task); if (execId != null) { flowIdList.add(execId); } else { logger .warn("getQueuedFlowIds: got null execId for queuedTask: " + task); } } Collections.sort(flowIdList); return flowIdList.toString(); } public int getMaxNumRunningFlows() { return this.numThreads; } public int getTheadPoolQueueSize() { return this.threadPoolQueueSize; } public void reloadJobTypePlugins() throws JobTypeManagerException { this.jobtypeManager.loadPlugins(); } public int getTotalNumExecutedFlows() { return this.executorService.getTotalTasks(); } @Override public void beforeExecute(final Runnable r) { } @Override public void afterExecute(final Runnable r) { this.submittedFlows.remove(r); } /** * This shuts down the flow runner. The call is blocking and awaits execution of all jobs. */ public void shutdown() { logger.warn("Shutting down FlowRunnerManager..."); this.executorService.shutdown(); boolean result = false; while (!result) { logger.info("Awaiting Shutdown. # of executing flows: " + getNumRunningFlows()); try { result = this.executorService.awaitTermination(1, TimeUnit.MINUTES); } catch (final InterruptedException e) { logger.error(e); } } logger.warn("Shutdown FlowRunnerManager complete."); } /** * This attempts shuts down the flow runner immediately (unsafe). This doesn't wait for jobs to * finish but interrupts all threads. */ public void shutdownNow() { logger.warn("Shutting down FlowRunnerManager now..."); this.executorService.shutdownNow(); this.triggerManager.shutdown(); } /** * Deleting old execution directory to free disk space. */ public void deleteExecutionDirectory() { logger.warn("Deleting execution dir: " + this.executionDirectory.getAbsolutePath()); try { FileUtils.deleteDirectory(this.executionDirectory); } catch (final IOException e) { logger.error(e); } } private class CleanerThread extends Thread { // Every hour, clean execution dir. private static final long EXECUTION_DIR_CLEAN_INTERVAL_MS = 60 * 60 * 1000; // Every 5 mins clean the old project dir private static final long OLD_PROJECT_DIR_INTERVAL_MS = 5 * 60 * 1000; // Every 2 mins clean the recently finished list private static final long RECENTLY_FINISHED_INTERVAL_MS = 2 * 60 * 1000; // Every 5 mins kill flows running longer than allowed max running time private static final long LONG_RUNNING_FLOW_KILLING_INTERVAL_MS = 5 * 60 * 1000; private final long flowMaxRunningTimeInMins = FlowRunnerManager.this.azkabanProps.getInt( Constants.ConfigurationKeys.AZKABAN_MAX_FLOW_RUNNING_MINS, -1); private boolean shutdown = false; private long lastExecutionDirCleanTime = -1; private long lastOldProjectCleanTime = -1; private long lastRecentlyFinishedCleanTime = -1; private long lastLongRunningFlowCleanTime = -1; public CleanerThread() { this.setName("FlowRunnerManager-Cleaner-Thread"); setDaemon(true); } public void shutdown() { this.shutdown = true; this.interrupt(); } private boolean isFlowRunningLongerThan(final ExecutableFlow flow, final long flowMaxRunningTimeInMins) { final Set<Status> nonFinishingStatusAfterFlowStarts = new HashSet<>( Arrays.asList(Status.RUNNING, Status.QUEUED, Status.PAUSED, Status.FAILED_FINISHING)); return nonFinishingStatusAfterFlowStarts.contains(flow.getStatus()) && flow.getStartTime() > 0 && TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis() - flow.getStartTime()) >= flowMaxRunningTimeInMins; } @Override public void run() { while (!this.shutdown) { synchronized (this) { try { FlowRunnerManager.this.lastCleanerThreadCheckTime = System.currentTimeMillis(); logger.info("# of executing flows: " + getNumRunningFlows()); // Cleanup old stuff. final long currentTime = System.currentTimeMillis(); if (currentTime - RECENTLY_FINISHED_INTERVAL_MS > this.lastRecentlyFinishedCleanTime) { logger.info("Cleaning recently finished"); cleanRecentlyFinished(); this.lastRecentlyFinishedCleanTime = currentTime; } if (currentTime - OLD_PROJECT_DIR_INTERVAL_MS > this.lastOldProjectCleanTime && FlowRunnerManager.this.isExecutorActive) { logger.info("Cleaning old projects"); cleanOlderProjects(); this.lastOldProjectCleanTime = currentTime; } if (currentTime - EXECUTION_DIR_CLEAN_INTERVAL_MS > this.lastExecutionDirCleanTime) { logger.info("Cleaning old execution dirs"); cleanOlderExecutionDirs(); this.lastExecutionDirCleanTime = currentTime; } if (this.flowMaxRunningTimeInMins > 0 && currentTime - LONG_RUNNING_FLOW_KILLING_INTERVAL_MS > this.lastLongRunningFlowCleanTime) { logger.info(String.format("Killing long jobs running longer than %s mins", this.flowMaxRunningTimeInMins)); for (final FlowRunner flowRunner : FlowRunnerManager.this.runningFlows.values()) { if (isFlowRunningLongerThan(flowRunner.getExecutableFlow(), this.flowMaxRunningTimeInMins)) { logger.info(String .format("Killing job [id: %s, status: %s]. It has been running for %s mins", flowRunner.getExecutableFlow().getId(), flowRunner.getExecutableFlow().getStatus(), TimeUnit.MILLISECONDS .toMinutes(System.currentTimeMillis() - flowRunner.getExecutableFlow() .getStartTime()))); flowRunner.kill(); } } this.lastLongRunningFlowCleanTime = currentTime; } wait(RECENTLY_FINISHED_TIME_TO_LIVE); } catch (final InterruptedException e) { logger.info("Interrupted. Probably to shut down."); } catch (final Throwable t) { logger.warn( "Uncaught throwable, please look into why it is not caught", t); } } } } private void cleanOlderExecutionDirs() { final File dir = FlowRunnerManager.this.executionDirectory; final long pastTimeThreshold = System.currentTimeMillis() - FlowRunnerManager.this.executionDirRetention; final File[] executionDirs = dir .listFiles(path -> path.isDirectory() && path.lastModified() < pastTimeThreshold); for (final File exDir : executionDirs) { try { final int execId = Integer.valueOf(exDir.getName()); if (FlowRunnerManager.this.runningFlows.containsKey(execId) || FlowRunnerManager.this.recentlyFinishedFlows.containsKey(execId)) { continue; } } catch (final NumberFormatException e) { logger.error("Can't delete exec dir " + exDir.getName() + " it is not a number"); continue; } synchronized (FlowRunnerManager.this.executionDirDeletionSync) { try { FileUtils.deleteDirectory(exDir); } catch (final IOException e) { logger.error("Error cleaning execution dir " + exDir.getPath(), e); } } } } private void cleanRecentlyFinished() { final long cleanupThreshold = System.currentTimeMillis() - RECENTLY_FINISHED_TIME_TO_LIVE; final ArrayList<Integer> executionToKill = new ArrayList<>(); for (final ExecutableFlow flow : FlowRunnerManager.this.recentlyFinishedFlows.values()) { if (flow.getEndTime() < cleanupThreshold) { executionToKill.add(flow.getExecutionId()); } } for (final Integer id : executionToKill) { logger.info("Cleaning execution " + id + " from recently finished flows list."); FlowRunnerManager.this.recentlyFinishedFlows.remove(id); } } private void cleanOlderProjects() { final Map<Integer, ArrayList<ProjectVersion>> projectVersions = new HashMap<>(); for (final ProjectVersion version : FlowRunnerManager.this.installedProjects.values()) { ArrayList<ProjectVersion> versionList = projectVersions.get(version.getProjectId()); if (versionList == null) { versionList = new ArrayList<>(); projectVersions.put(version.getProjectId(), versionList); } versionList.add(version); } final HashSet<Pair<Integer, Integer>> activeProjectVersions = new HashSet<>(); for (final FlowRunner runner : FlowRunnerManager.this.runningFlows.values()) { final ExecutableFlow flow = runner.getExecutableFlow(); activeProjectVersions.add(new Pair<>(flow .getProjectId(), flow.getVersion())); } for (final Map.Entry<Integer, ArrayList<ProjectVersion>> entry : projectVersions .entrySet()) { // Integer projectId = entry.getKey(); final ArrayList<ProjectVersion> installedVersions = entry.getValue(); // Keep one version of the project around. if (installedVersions.size() == 1) { continue; } Collections.sort(installedVersions); for (int i = 0; i < installedVersions.size() - 1; ++i) { final ProjectVersion version = installedVersions.get(i); final Pair<Integer, Integer> versionKey = new Pair<>(version.getProjectId(), version.getVersion()); if (!activeProjectVersions.contains(versionKey)) { try { logger.info("Removing old unused installed project " + version.getProjectId() + ":" + version.getVersion()); deleteDirectory(version); FlowRunnerManager.this.installedProjects.remove(new Pair<>(version .getProjectId(), version.getVersion())); } catch (final IOException e) { logger.error(e); } } } } } } }
/******************************************************************************* * Copyright 2015 DANS - Data Archiving and Networked Services * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package nl.knaw.dans.common.lang.log; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Reporter of {@link Event}s. * * @see RL */ public class Reporter { private static final Logger logger = LoggerFactory.getLogger(Reporter.class); private File reportLocation; private boolean allRW; private List<Report> reports = new ArrayList<Report>(); public Reporter() { } public Reporter(final String reportDirectoryName) { this(reportDirectoryName, false); } public Reporter(final String reportDirectoryName, boolean allRW) { this(new File(reportDirectoryName), allRW); } public Reporter(File reportDirectory, boolean allRW) { this.allRW = allRW; this.reportLocation = reportDirectory; } public List<Report> getReports() { if (reports.isEmpty()) { reports.add(new LoggerReport()); } return reports; } public void addReport(Report report) { report.setReportLocation(getReportLocation(), allRW); reports.add(report); } public boolean removeReport(Report report) { return reports.remove(report); } public void setReports(List<Report> reports) { this.reports = reports; setLocationOnReports(); } private void setLocationOnReports() { for (Report report : this.reports) { report.setReportLocation(getReportLocation(), allRW); } } public File getReportLocation() { if (reportLocation == null) { reportLocation = new File(RL.DEFAULT_REPORT_LOCATION); prepareReportLocation(); } return reportLocation; } public void setReportLocation(File location, boolean allReadWrite) { this.reportLocation = location; this.allRW = allReadWrite; prepareReportLocation(); setLocationOnReports(); } private void prepareReportLocation() { try { RL.prepareReportLocation(reportLocation, allRW); } catch (IOException e) { throw new RLRuntimeException(e); } } public void info(Event event) { event.setCaller(getCaller()); event.setLevel(Event.INFO); for (Report report : getReports()) { report.info(event); } } public void warn(Event event) { event.setCaller(getCaller()); event.setLevel(Event.WARNING); for (Report report : getReports()) { report.warn(event); } } public void error(Event event) { event.setCaller(getCaller()); event.setLevel(Event.ERROR); for (Report report : getReports()) { report.error(event); } } public void close() { for (Report report : getReports()) { report.close(); } logger.info("Closed reports"); } private Caller getCaller() { String className = ""; StringBuilder sb = new StringBuilder(); for (StackTraceElement ste : Thread.currentThread().getStackTrace()) { String cn = ste.getClassName(); if (!cn.equals(Thread.class.getName()) && !cn.equals(Reporter.class.getName()) && !cn.equals(this.getClass().getName()) && !cn.equals(RL.class.getName())) { className = ste.getClassName(); sb.append(className).append(".").append(ste.getMethodName()).append(" (").append(ste.getFileName()).append(":").append(ste.getLineNumber()) .append(")"); break; } } return new Caller(className, sb.toString()); } public static class Caller { private final String className; private final String sourceLink; public Caller(String className, String sourceLink) { this.className = className; this.sourceLink = sourceLink; } public String getClassName() { return className; } public String getSourceLink() { return sourceLink; } } }
/** Notice of modification as required by the LGPL * This file was modified by Gemstone Systems Inc. on * $Date$ **/ // $Id: IpAddress.java,v 1.29 2005/11/07 09:44:17 belaban Exp $ package com.gemstone.org.jgroups.stack; import com.gemstone.org.jgroups.Address; import com.gemstone.org.jgroups.Global; import com.gemstone.org.jgroups.JChannel; import com.gemstone.org.jgroups.JGroupsVersion; import com.gemstone.org.jgroups.util.GemFireTracer; import com.gemstone.org.jgroups.util.StreamableFixedID; import java.io.*; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; /** * Network-dependent address (Internet). Generated by the bottommost layer of the protocol * stack (UDP). Contains an InetAddress and port. * @author Bela Ban */ public class IpAddress implements StreamableFixedID, Address { private static final long serialVersionUID = -294637383250428305L; private InetAddress ip_addr=null; private int port=0; private byte[] additional_data=null; protected static final GemFireTracer log=GemFireTracer.getLog(IpAddress.class); private static final String MEMBER_WEIGHT_PREFIX = "zzzmbrwgt"; // GemStoneAddition public static boolean resolve_dns=true; // GemStoneAddition - resolve names by default private transient int size=-1; /** GemStoneAddition - can this member become the group coordinator? */ private boolean shouldNotBeCoordinator; /** GemStoneAddition - does this member have split-brain detection enabled? */ private boolean splitBrainEnabled; private byte memberWeight; /** GemStoneAddition - member GemFire version */ private transient short version = JGroupsVersion.CURRENT_ORDINAL; // static { // /* Trying to get value of resolve_dns. PropertyPermission not granted if // * running in an untrusted environment with JNLP */ // try { // resolve_dns=Boolean.valueOf(System.getProperty("resolve.dns", "false")).booleanValue(); // } // catch (SecurityException ex){ // resolve_dns=false; // } // } // GemStoneAddition - defaults to true public boolean preferredForCoordinator() { return !shouldNotBeCoordinator; } // GemStoneAddition - defaults to false public boolean splitBrainEnabled() { return this.splitBrainEnabled; } // GemStoneAddition - member weight public void setMemberWeight(int weight) { this.memberWeight = (byte)Math.min(weight, 255); } // GemStoneAddition - member weight public int getMemberWeight() { return this.memberWeight; } /** GemStoneAddition - pids help with debugging quite a bit */ public void setProcessId(int pid) { this.processId = pid; } /** GemStoneAddition - get the pid if any */ public int getProcessId() { return this.processId; } /** GemStoneAddition - can this member be the GMS coordinator? */ public void shouldntBeCoordinator(boolean shouldNotBe) { this.shouldNotBeCoordinator = shouldNotBe; } /** GemStoneAddition - sets whether this member has split-brain detection enabled */ public void splitBrainEnabled(boolean enabled) { this.splitBrainEnabled = enabled; } /** * GemstoneAddition returns a number that can be used to differentiate two * addresses with the same InetAddress and port. This ID is not used in * equality comparisons so that the coordinator can distinguish between * old and new IDs. Instead, equality comparisons use the birthViewId so * that post-join comparisons of IpAddresses can easily distinguish between * new and old, reused addresses. */ public int getUniqueID() { return this.directPort != 0? this.directPort : this.processId; } /** * get the roles of this GemFire member (GemStoneAddition) */ public String getName() { return this.name == null? "" : this.name; } /** * get the roles of this GemFire member (GemStoneAddition) */ public String[] getRoles() { if (this.additional_data != null) { try { DataInput di = new DataInputStream(new ByteArrayInputStream(this.additional_data)); return JChannel.getGfFunctions().readStringArray(di); } catch (Exception e) { throw new RuntimeException("unable to read roles", e); } } return new String[0]; } /** * set the durable client attributes (GemStoneAddition) */ public void setDurableClientAttributes(Object d) { durableClientAttributes = d; } /** * get the durable client attributes (GemStoneAddition) */ public Object getDurableClientAttributes() { return durableClientAttributes; } /** * set the roles of this GemFire member (GemStoneAddition) */ public void setRoles(String[] roles) { // use additional_data to hold roles if (roles.length > 0) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dao = new DataOutputStream(baos); JChannel.getGfFunctions().writeStringArray(roles, dao); this.additional_data = baos.toByteArray(); } catch (Exception e) { throw new RuntimeException("unable to serialize roles", e); } } } /** GemStoneAddition - process id */ private int processId; private int vmKind; private int birthViewId = -1; private int directPort; private String name; private Object durableClientAttributes; // Used only by Externalization public IpAddress() { } public IpAddress(String i, int p) { port=p; try { ip_addr=InetAddress.getByName(i); } catch(Exception e) { if(log.isWarnEnabled()) log.warn("failed to get " + i + ": " + e); } if(this.ip_addr == null) setAddressToLocalHost(); // setGemFireAttributes(MemberAttributes.DEFAULT); // GemStoneAddition } public IpAddress(InetAddress i, int p) { ip_addr=i; port=p; if(this.ip_addr == null) setAddressToLocalHost(); JChannel.getGfFunctions().setDefaultGemFireAttributes(this); // GemStoneAddition } private void setAddressToLocalHost() { try { // GemStoneAddition - use the configured GemFire bind address, if present, as the default String bindAddress = System.getProperty("gemfire.jg-bind-address"); if (bindAddress != null && bindAddress.length() > 0) { ip_addr=InetAddress.getByName(bindAddress); } else { ip_addr=JChannel.getGfFunctions().getLocalHost(); // get first NIC found (on multi-homed systems) } size=-1; // GemStoneAddition setSize(size(version)); } catch(Exception e) { if(log.isWarnEnabled()) log.warn("caught unexpected exception", e); } } public IpAddress(int port) { this.port=port; setAddressToLocalHost(); JChannel.getGfFunctions().setDefaultGemFireAttributes(this); // GemStoneAddition } public final InetAddress getIpAddress() {return ip_addr;} public final int getPort() {return port;} public final SocketAddress getSocketAddress() { // GemStoneAddition return new InetSocketAddress(ip_addr, port); } /** GemStoneAddition - cache the result of querying */ private transient boolean isMcastAddr; private transient boolean isMcastAddrCached; public final boolean isMulticastAddress() { if (!isMcastAddrCached) { isMcastAddr = ip_addr != null && ip_addr.isMulticastAddress(); isMcastAddrCached = true; } return isMcastAddr; } /** * Returns the additional_data. * @return byte[] */ public final byte[] getAdditionalData() { return additional_data; } /** * Sets the additional_data. * @param additional_data The additional_data to set */ public final void setAdditionalData(byte[] additional_data) { this.additional_data = additional_data; size=-1; // GemStoneAddition setSize(size(version)); } // GemStoneAddition public int getVmKind() { return this.vmKind; } // GemStoneAddition public void setVmKind(int vmKind) { this.vmKind = vmKind; } // GemStoneAddition public int getDirectPort() { return this.directPort; } // GemStoneAddition public void setDirectPort(int directPort) { this.directPort = directPort; } // GemStoneAddition public int getBirthViewId() { return this.birthViewId; } // GemStoneAddition public void setBirthViewId(long vid) { this.birthViewId = (int)(vid & Integer.MAX_VALUE); } // GemStoneAddition public final short getVersionOrdinal() { return this.version; } // GemStoneAddition public final void setVersionOrdinal(short version) { this.version = version; } // GemStoneAddition public void setName(String v) { if (name == null) { this.name = ""; } else { this.name = v; } } /** * Establishes an order between 2 addresses. Assumes other contains non-null IpAddress. * Excludes channel_name from comparison. * @return 0 for equality, value less than 0 if smaller, greater than 0 if greater. */ public final int compare(IpAddress other) { return compareTo(other); } /** * implements the java.lang.Comparable interface * @see java.lang.Comparable * @param o - the Object to be compared * @return a negative integer, zero, or a positive integer as this object is less than, * equal to, or greater than the specified object. * @exception java.lang.ClassCastException - if the specified object's type prevents it * from being compared to this Object. */ public final int compareTo(Object o) { // int h1, h2, rc; // added Nov 7 2005, makes sense with canonical addresses if(this == o) return 0; if ((o == null) || !(o instanceof IpAddress)) throw new ClassCastException("comparison between different classes: the other object is " + (o != null? o.getClass() : o)); IpAddress other = (IpAddress) o; if(ip_addr == null) if (other.ip_addr == null) return port < other.port ? -1 : (port > other.port ? 1 : 0); else return -1; // GemStoneAddition - use ipAddress bytes instead of hash, which is really a hash in Ipv6 addresses // h1=ip_addr.hashCode(); // h2=other.ip_addr.hashCode(); // rc=h1 < h2? -1 : h1 > h2? 1 : 0; byte[] myBytes = ip_addr.getAddress(); byte[] otherBytes = other.ip_addr.getAddress(); if (myBytes != otherBytes) { for (int i = 0; i < myBytes.length; i++) { if (i >= otherBytes.length) return -1; // same as far as they go, but shorter... if (myBytes[i] < otherBytes[i]) return -1; if (myBytes[i] > otherBytes[i]) return 1; } if (myBytes.length > otherBytes.length) return 1; // same as far as they go, but longer... } int comp = ((port < other.port) ? -1 : (port > other.port ? 1 : 0)); // GemStoneAddition - bug #41983, address of kill-9'd member is reused // before it can be ejected from membership if (comp == 0) { if (this.birthViewId >= 0 && other.birthViewId >= 0) { if (this.birthViewId < other.birthViewId) { comp = -1; } else if (other.birthViewId < this.birthViewId) { comp = 1; } } else if (this.processId != 0 && other.processId != 0) { // starting in 8.0 we also consider the processId. During startup // we may have a message from a member that hasn't finished joining // and address canonicalization may find an old address that has // the same addr:port. Since the new member doesn't have a viewId // its address will be equal to the old member's address unless // we also pay attention to the processId. if (this.processId < other.processId){ comp = -1; } else if (other.processId < this.processId) { comp = 1; } } } return comp; } @Override // GemStoneAddition public final boolean equals(Object obj) { if(this == obj) return true; // added Nov 7 2005, makes sense with canonical addresses if(obj == null) return false; if (!(obj instanceof IpAddress)) return false; // GemStoneAddition return compareTo(obj) == 0 ? true : false; } @Override // GemStoneAddition public final int hashCode() { return ip_addr != null ? ip_addr.hashCode() + port : port; } @Override // GemStoneAddition public String toString() { StringBuffer sb=new StringBuffer(); if(ip_addr == null) sb.append("<null>"); else { if(ip_addr.isMulticastAddress()) sb.append(ip_addr.getHostAddress()); else { String host_name=null; if(resolve_dns) // GemStoneAddition host_name=JChannel.getGfFunctions().getHostName(ip_addr); else host_name=ip_addr.getHostAddress(); appendShortName(host_name, sb); } } // GemStoneAddition - name and process id if (!"".equals(this.name) || processId > 0) { sb.append('('); if (!"".equals(this.name)) { sb.append(this.name); if (processId > 0) { sb.append(':'); } } if (processId > 0) { sb.append(processId); } String vmKindStr = JChannel.getGfFunctions().getVmKindString(vmKind); sb.append(vmKindStr); sb.append(')'); } // GemStoneAddition - coordinator inhibition if (this.splitBrainEnabled) { if (!this.shouldNotBeCoordinator) { sb.append("<ec>"); } } if (this.birthViewId >= 0) { sb.append("<v" + this.birthViewId + ">"); } if (this.version != JChannel.getGfFunctions().getCurrentVersionOrdinal()) { sb.append("(version:").append(this.version) .append(')'); } /* if (this.splitBrainEnabled) { sb.append("<sb>"); } */ sb.append(":" + port); //GemStoneAddition - don't print encoded additional_data // if(additional_data != null) // sb.append(" (additional data: ").append(additional_data.length).append(" bytes)"); return sb.toString(); } /** * Input: "daddy.nms.fnc.fujitsu.com", output: "daddy". Appends result to string buffer 'sb'. * @param hostname The hostname in long form. Guaranteed not to be null * @param sb The string buffer to which the result is to be appended */ private void appendShortName(String hostname, StringBuffer sb) { if(hostname == null) return; int index=hostname.indexOf('.'); if(index > 0 && !Character.isDigit(hostname.charAt(0))) sb.append(hostname.substring(0, index)); else sb.append(hostname); } public void writeExternal(ObjectOutput out) throws IOException { if(ip_addr != null) { byte[] address=ip_addr.getAddress(); out.writeByte(address.length); out.write(address, 0, address.length); } else { out.writeByte(0); } out.writeShort(port & 0xffff); out.writeInt(processId); // GemStoneAddition out.writeInt(directPort); out.writeByte(vmKind); out.writeInt(this.birthViewId); out.writeUTF(getName()); if(additional_data != null) { out.writeInt(additional_data.length); out.write(additional_data, 0, additional_data.length); } else { out.writeInt(0); } out.writeByte(getFlags()); // GemStoneAddition JGroupsVersion.writeOrdinal(out, this.version, true); // GemStoneAddition } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { int len=in.readUnsignedByte(); if(len > 0) { byte[] a = new byte[len]; in.readFully(a); this.ip_addr=InetAddress.getByAddress(a); } //then read the port port=in.readUnsignedShort(); // GemStoneAddition - process id, etc processId = in.readInt(); directPort = in.readInt(); vmKind = in.readByte(); birthViewId = in.readInt(); name = in.readUTF(); len=in.readInt(); if(len > 0) { additional_data=new byte[len]; in.readFully(additional_data, 0, additional_data.length); } int flags = in.readUnsignedByte(); // GemStoneAddition setFlags(flags); readVersion(flags, in); // GemStoneAddition } public void writeTo(DataOutputStream out) throws IOException { toData(out); } public void readFrom(DataInputStream in) throws IOException { fromData(in); } public byte getFlags() { // GemStoneAddition - flags int flags = 0; if (this.shouldNotBeCoordinator) { flags |= 0x1; } if (this.splitBrainEnabled) { flags |= 0x2; } // always add version to flags but allow for absence of this flag flags |= 0x4; return (byte)(flags & 0xff); } public void setFlags(int flags) { // GemStoneAddition - flags if ((flags & 0x1) == 0x1) { this.shouldNotBeCoordinator = true; } if ((flags & 0x2) == 0x2) { this.splitBrainEnabled = true; } } // GemStoneAddition - version public void readVersion(int flags, DataInput in) throws IOException { if ((flags & 0x4) == 0x4) { this.version = JGroupsVersion.readOrdinal(in); if (this.version == 0) { this.version = JChannel.getGfFunctions().getCurrentVersionOrdinal(); } } } // GemStoneAddition - dataserializable public int getDSFID() { return IP_ADDRESS; } // GemStoneAddition - dataserializable public void toData(DataOutput out) throws IOException { byte[] address; if(ip_addr != null) { address=ip_addr.getAddress(); out.writeByte(address.length); out.write(address, 0, address.length); } else { out.writeByte(0); } out.writeShort(port); out.writeInt(processId); // GemStoneAddition out.writeInt(directPort); out.writeByte(vmKind); out.writeInt(birthViewId); out.writeUTF(getName()); // for 6.x piggyback the weight in the roles array. For 7.0 we will // need to add it as a field and, hopefully, add an extensible way to // add new attributes that an old version of the product can ignore. // The GossipServer FILE_FORMAT will need to be bumped when we do that. // out.writeByte(memberWeight); if (memberWeight > 0) { String[] forser; String[] roles = getRoles(); forser = new String[roles.length+1]; if (roles.length > 0) { System.arraycopy(roles, 0, forser, 0, roles.length); } forser[forser.length-1] = MEMBER_WEIGHT_PREFIX + memberWeight; ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutput dao = new DataOutputStream(baos); JChannel.getGfFunctions().writeStringArray(forser, dao); byte[] payload = baos.toByteArray(); out.writeInt(payload.length); out.write(payload, 0, payload.length); } else { if(additional_data != null) { out.writeInt(additional_data.length); out.write(additional_data, 0, additional_data.length); } else { out.writeInt(0); } } out.writeByte(getFlags()); JGroupsVersion.writeOrdinal(out, this.version, true); } // GemStoneAddition - dataserializable public void fromData(DataInput in) throws IOException { int len=in.readUnsignedByte(); if(len > 0) { byte[] a = new byte[len]; in.readFully(a); this.ip_addr=InetAddress.getByAddress(a); } port=in.readUnsignedShort(); processId = in.readInt(); // GemStoneAddition directPort = in.readInt(); vmKind = in.readByte(); birthViewId = in.readInt(); name = in.readUTF(); len=in.readInt(); if(len > 0) { additional_data=new byte[len]; in.readFully(additional_data, 0, additional_data.length); String roles[] = getRoles(); int lastIndex = roles.length-1; int numValidRoles = lastIndex; if (roles.length > 0 && roles[lastIndex].startsWith(MEMBER_WEIGHT_PREFIX)) { String weightString = roles[lastIndex].substring(MEMBER_WEIGHT_PREFIX.length()); memberWeight = Byte.parseByte(weightString); String[] newroles = new String[numValidRoles]; System.arraycopy(roles, 0, newroles, 0, numValidRoles); setRoles(newroles); } } // GemStoneAddition - flags int flags = in.readUnsignedByte(); setFlags(flags); // GemStoneAddition - version readVersion(flags, in); } // GemStoneAddition - for ack processing we don't need the whole // address byte[] cachedAddress; public void toDataShort(DataOutput out) throws IOException { byte[] address; if (cachedAddress != null) address = cachedAddress; else { address=ip_addr.getAddress(); cachedAddress = address; } out.writeByte(address.length); out.write(address, 0, address.length); out.writeShort(port); } // GemStoneAddition - for ack processing public void fromDataShort(DataInput in) throws IOException { int len=in.readUnsignedByte(); //read the four bytes byte[] a = new byte[len]; //in theory readFully(byte[]) should be faster //than read(byte[]) since latter reads // 4 bytes one at a time in.readFully(a); //look up an instance in the cache this.ip_addr=InetAddress.getByAddress(a); //then read the port port=in.readUnsignedShort(); } public int size(short version) { if(size >= 0) return size; // address length int tmp_size = Global.BYTE_SIZE; // address if(ip_addr != null) tmp_size+=ip_addr.getAddress().length; // 4 bytes for IPv4 // port tmp_size += Global.SHORT_SIZE; // PID tmp_size += Global.INT_SIZE; // GemStoneAddition // direct-port tmp_size += Global.INT_SIZE; // GemStoneAddition // vm-kind tmp_size += Global.BYTE_SIZE; // view-id tmp_size += Global.INT_SIZE; // GemStoneAddition // additional data size tmp_size += Global.INT_SIZE; // additional data if(additional_data != null) tmp_size+=additional_data.length; // flags tmp_size += Global.BYTE_SIZE; // version tmp_size += (this.version < 256? 1 : 3); // GemStoneAddition - ignore durableClientAttributes in size calculations // since client IDs are never used in datagram size estimations setSize(tmp_size); return tmp_size; } @Override // GemStoneAddition public Object clone() { IpAddress ret=new IpAddress(ip_addr, port); ret.processId = this.processId; // GemStoneAddition ret.shouldNotBeCoordinator = this.shouldNotBeCoordinator; // GemStoneAddition ret.splitBrainEnabled = this.splitBrainEnabled; // GemStoneAddition ret.name = this.name; // GemStoneAddition ret.version = this.version; // GemStoneAddition ret.birthViewId = this.birthViewId; // GemStoneAddition if(additional_data != null) { ret.additional_data=new byte[additional_data.length]; System.arraycopy(additional_data, 0, ret.additional_data, 0, additional_data.length); } return ret; } @Override public short[] getSerializationVersions() { return null; } public void setBirthViewId(int birthViewId) { this.birthViewId = birthViewId; } public int getSize() { return size; } public void setSize(int size) { this.size = size; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.facebook; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.camel.Consumer; import org.apache.camel.NoTypeConversionAvailableException; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.facebook.config.FacebookEndpointConfiguration; import org.apache.camel.component.facebook.config.FacebookNameStyle; import org.apache.camel.component.facebook.data.FacebookMethodsType; import org.apache.camel.component.facebook.data.FacebookPropertiesHelper; import org.apache.camel.impl.DefaultEndpoint; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.util.EndpointHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToGetMethod; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToSearchMethod; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getCandidateMethods; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getMissingProperties; import static org.apache.camel.component.facebook.data.FacebookPropertiesHelper.getEndpointPropertyNames; /** * The Facebook component provides access to all of the Facebook APIs accessible using Facebook4J. * * It allows producing messages to retrieve, add, and delete posts, likes, comments, photos, albums, videos, photos, * checkins, locations, links, etc. It also supports APIs that allow polling for posts, users, checkins, groups, locations, etc. */ @UriEndpoint(firstVersion = "2.14.0", scheme = "facebook", title = "Facebook", syntax = "facebook:methodName", consumerClass = FacebookConsumer.class, label = "social") public class FacebookEndpoint extends DefaultEndpoint implements FacebookConstants { private static final Logger LOG = LoggerFactory.getLogger(FacebookEndpoint.class); private FacebookNameStyle nameStyle; @UriPath(name = "methodName", description = "What operation to perform") @Metadata(required = "true") private String method; private FacebookMethodsType methodName; @UriParam private FacebookEndpointConfiguration configuration; @UriParam private String inBody; // candidate methods based on method name and endpoint configuration private List<FacebookMethodsType> candidates; public FacebookEndpoint(String uri, FacebookComponent facebookComponent, String remaining, FacebookEndpointConfiguration configuration) throws NoTypeConversionAvailableException { super(uri, facebookComponent); this.configuration = configuration; this.method = remaining; } public Producer createProducer() throws Exception { return new FacebookProducer(this); } public Consumer createConsumer(Processor processor) throws Exception { // make sure inBody is not set for consumers if (inBody != null) { throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint"); } final FacebookConsumer consumer = new FacebookConsumer(this, processor); // also set consumer.* properties configureConsumer(consumer); return consumer; } public boolean isSingleton() { return true; } @Override public void configureProperties(Map<String, Object> options) { super.configureProperties(options); // set configuration properties first try { if (configuration == null) { configuration = new FacebookEndpointConfiguration(); } EndpointHelper.setReferenceProperties(getCamelContext(), configuration, options); EndpointHelper.setProperties(getCamelContext(), configuration, options); } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } // extract reading properties FacebookPropertiesHelper.configureReadingProperties(configuration, options); // validate configuration configuration.validate(); // validate and initialize state initState(); } private void initState() { // get endpoint property names final Set<String> arguments = new HashSet<String>(); arguments.addAll(getEndpointPropertyNames(configuration)); // add inBody argument for producers if (inBody != null) { arguments.add(inBody); } final String[] argNames = arguments.toArray(new String[arguments.size()]); candidates = new ArrayList<FacebookMethodsType>(); candidates.addAll(getCandidateMethods(method, argNames)); if (!candidates.isEmpty()) { // found an exact name match, allows disambiguation if needed this.nameStyle = FacebookNameStyle.EXACT; } else { // also search for long forms of method name, both get* and search* // Note that this set will be further sorted by Producers and Consumers // producers will prefer get* forms, and consumers should prefer search* forms candidates.addAll(getCandidateMethods(convertToGetMethod(method), argNames)); if (!candidates.isEmpty()) { this.nameStyle = FacebookNameStyle.GET; } int nGetMethods = candidates.size(); candidates.addAll(getCandidateMethods(convertToSearchMethod(method), argNames)); // error if there are no candidates if (candidates.isEmpty()) { throw new IllegalArgumentException( String.format("No matching operation for %s, with arguments %s", method, arguments)); } if (nameStyle == null) { // no get* methods found nameStyle = FacebookNameStyle.SEARCH; } else if (candidates.size() > nGetMethods) { // get* and search* methods found nameStyle = FacebookNameStyle.GET_AND_SEARCH; } } // log missing/extra properties for debugging if (LOG.isDebugEnabled()) { final Set<String> missing = getMissingProperties(method, nameStyle, arguments); if (!missing.isEmpty()) { LOG.debug("Method {} could use one or more properties from {}", method, missing); } } } public FacebookEndpointConfiguration getConfiguration() { return configuration; } public List<FacebookMethodsType> getCandidates() { return Collections.unmodifiableList(candidates); } public String getInBody() { return inBody; } public String getMethod() { return method; } public FacebookNameStyle getNameStyle() { return nameStyle; } /** * Sets the name of a parameter to be passed in the exchange In Body */ public void setInBody(String inBody) { // validate property name ObjectHelper.notNull(inBody, "inBody"); if (!FacebookPropertiesHelper.getValidEndpointProperties().contains(inBody)) { throw new IllegalArgumentException("Unknown property " + inBody); } this.inBody = inBody; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.query.text.analyzer; import org.apache.commons.lang3.StringUtils; import org.apache.jena.rdf.model.Resource; import org.apache.lucene.analysis.Analyzer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.Arrays; import java.util.Hashtable; import java.util.List; import java.util.Map.Entry; public class Util { private static Logger log = LoggerFactory.getLogger(Util.class) ; private static Hashtable<String, Class<?>> analyzersClasses; //mapping between BCP-47 language tags and lucene analyzersClasses private static Hashtable<String, Analyzer> cache = new Hashtable<>(); //to avoid unnecessary multiple analyzer instantiations // cache of defined text:defineAnalyzers private static Hashtable<String, Analyzer> definedAnalyzers = new Hashtable<>(); // cache of defined text:indexAnalyzers private static Hashtable<String, Analyzer> indexAnalyzers = new Hashtable<>(); // cache of text:searchFor language tags private static Hashtable<String, List<String>> searchForTags = new Hashtable<>(); // map of auxiliary index info private static Hashtable<String, List<String>> auxIndexes = new Hashtable<>(); // map of property resource to list of properties private static Hashtable<Resource, List<Resource>> propsLists = new Hashtable<>(); // cache of effective fields private static Hashtable<String, Hashtable<String,String>> effectiveFields = new Hashtable<>(); static { initAnalyzerDefs(); } public static Analyzer getLocalizedAnalyzer(String lang) { if (lang == null) return null; if (cache.containsKey(lang)) return cache.get(lang); try { Class<?> className = analyzersClasses.get(lang); if (className == null) return null; Constructor<?> constructor = className.getConstructor(); Analyzer analyzer = (Analyzer)constructor.newInstance(); cache.put(lang, analyzer); return analyzer; } catch (Exception e) { e.printStackTrace(); return null; } } public static void addAnalyzer(String lang, Analyzer analyzer) { cache.put(lang, analyzer); } public static Analyzer getDefinedAnalyzer(Resource key) { return definedAnalyzers.get(key.getURI()); } public static void defineAnalyzer(Resource key, Analyzer analyzer) { definedAnalyzers.put(key.getURI(), analyzer); } public static Analyzer getIndexAnalyzer(String tag) { return indexAnalyzers.get(tag); } public static void addIndexAnalyzer(String tag, Analyzer analyzer) { indexAnalyzers.put(tag, analyzer); } public static boolean usingIndexAnalyzers() { return !indexAnalyzers.isEmpty(); } public static void addPropsList(Resource prop, List<Resource> list) { propsLists.put(prop, list); } public static List<Resource> getPropList(Resource prop) { return propsLists.get(prop); } // for testing public static int sizePropsList() { return propsLists.size(); } public static boolean usingSearchFor(String lang) { return StringUtils.isNotBlank(lang) ? searchForTags.containsKey(lang) : false; } /** * If there are a list of tags to search for given the supplied lang tag, then the list is returned; * otherwise, a list of just the supplied lang tag * * @param lang * @return possibly empty list of tags to search for */ public static List<String> getSearchForTags(String lang) { if (usingSearchFor(lang)) { return searchForTags.get(lang); } else if (StringUtils.isNotBlank(lang)) { return Arrays.asList(lang); } else { return new ArrayList<>(); } } public static void addSearchForTags(String tag, List<String> tags) { searchForTags.put(tag, tags); } public static List<String> getAuxIndexes(String tag) { return StringUtils.isNotEmpty(tag) ? auxIndexes.get(tag) : null; } public static void addAuxIndexes(String tag, List<String> tags) { auxIndexes.put(tag, tags); } public static void finishCaching() { log.trace("call finishCaching()"); for (final Entry<String,List<String>> auxIndexesE : auxIndexes.entrySet()) { final String tag = auxIndexesE.getKey(); // ex: zh-hans final List<String> auxIndexesL = auxIndexesE.getValue(); log.trace("finishCaching: tag: {}", tag); for (final String auxIndexTag : auxIndexesL) { // ex: auxIndexTag: zh-aux-han2pinyin log.trace("finishCaching: auxIndexTag: {}", auxIndexTag); for (final String searchForTag : searchForTags.get(auxIndexTag)) { // ex: zh-latn-pinyin final Hashtable<String,String> res = effectiveFields.computeIfAbsent(tag, x -> new Hashtable<String,String>()); log.trace("add effectiveField mapping: d:{} + q:{} = e:{}", tag, searchForTag, auxIndexTag); res.put(searchForTag, auxIndexTag); } } } } public static String getEffectiveLang(final String docLang, final String queryLang) { final Hashtable<String,String> m = effectiveFields.get(docLang); if (m == null) return docLang; final String tag = m.get(queryLang); if (tag == null) { log.info("getEffectiveFields got map for {} but couldn't find effective tag for {}", docLang, queryLang); return docLang; } return tag; } private static void initAnalyzerDefs() { analyzersClasses = new Hashtable<>(); analyzersClasses.put("ar", org.apache.lucene.analysis.ar.ArabicAnalyzer.class); analyzersClasses.put("bg", org.apache.lucene.analysis.bg.BulgarianAnalyzer.class); analyzersClasses.put("ca", org.apache.lucene.analysis.ca.CatalanAnalyzer.class); analyzersClasses.put("cs", org.apache.lucene.analysis.cz.CzechAnalyzer.class); analyzersClasses.put("da", org.apache.lucene.analysis.da.DanishAnalyzer.class); analyzersClasses.put("de", org.apache.lucene.analysis.de.GermanAnalyzer.class); analyzersClasses.put("el", org.apache.lucene.analysis.el.GreekAnalyzer.class); analyzersClasses.put("en", org.apache.lucene.analysis.en.EnglishAnalyzer.class); analyzersClasses.put("es", org.apache.lucene.analysis.es.SpanishAnalyzer.class); analyzersClasses.put("eu", org.apache.lucene.analysis.eu.BasqueAnalyzer.class); analyzersClasses.put("fa", org.apache.lucene.analysis.fa.PersianAnalyzer.class); analyzersClasses.put("fi", org.apache.lucene.analysis.fi.FinnishAnalyzer.class); analyzersClasses.put("fr", org.apache.lucene.analysis.fr.FrenchAnalyzer.class); analyzersClasses.put("ga", org.apache.lucene.analysis.ga.IrishAnalyzer.class); analyzersClasses.put("gl", org.apache.lucene.analysis.gl.GalicianAnalyzer.class); analyzersClasses.put("hi", org.apache.lucene.analysis.hi.HindiAnalyzer.class); analyzersClasses.put("hu", org.apache.lucene.analysis.hu.HungarianAnalyzer.class); analyzersClasses.put("hy", org.apache.lucene.analysis.hy.ArmenianAnalyzer.class); analyzersClasses.put("id", org.apache.lucene.analysis.id.IndonesianAnalyzer.class); analyzersClasses.put("it", org.apache.lucene.analysis.it.ItalianAnalyzer.class); analyzersClasses.put("ja", org.apache.lucene.analysis.cjk.CJKAnalyzer.class); analyzersClasses.put("ko", org.apache.lucene.analysis.cjk.CJKAnalyzer.class); analyzersClasses.put("lv", org.apache.lucene.analysis.lv.LatvianAnalyzer.class); analyzersClasses.put("nl", org.apache.lucene.analysis.nl.DutchAnalyzer.class); analyzersClasses.put("no", org.apache.lucene.analysis.no.NorwegianAnalyzer.class); analyzersClasses.put("pt", org.apache.lucene.analysis.pt.PortugueseAnalyzer.class); analyzersClasses.put("ro", org.apache.lucene.analysis.ro.RomanianAnalyzer.class); analyzersClasses.put("ru", org.apache.lucene.analysis.ru.RussianAnalyzer.class); analyzersClasses.put("sv", org.apache.lucene.analysis.sv.SwedishAnalyzer.class); analyzersClasses.put("th", org.apache.lucene.analysis.th.ThaiAnalyzer.class); analyzersClasses.put("tr", org.apache.lucene.analysis.tr.TurkishAnalyzer.class); analyzersClasses.put("zh", org.apache.lucene.analysis.cjk.CJKAnalyzer.class); } }
package krawczls.deploymentManagement; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.Random; import javax.jms.Connection; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.MessageProducer; import javax.jms.ObjectMessage; import javax.jms.Session; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import krawczls.deploymentManagement.ReplicationDeploymentProcessor; import krawczls.deploymentManagement.ReplicationDeploymentRoute; import krawczls.deploymentManagement.ReplicationStartContext; import krawczls.deploymentManagement.ReplicationStartProcessor; import krawczls.executionEngineManagement.WorkflowEngineProcessor; import krawczls.executionEngineManagement.WorkflowEngineRoute; import krawczls.executionEngineRegistry.WorkflowEngine; import krawczls.executionEngineRegistry.WorkflowEngineRegistry; import org.apache.activemq.ActiveMQConnectionFactory; //import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.impl.SimpleRegistry; import org.apache.commons.lang.SystemUtils; import constants.Constants; public class ReplicationStartContext { public static ActiveMQConnectionFactory connection_factory = new ActiveMQConnectionFactory("tcp://127.0.0.1:61616"); public static Connection connection = createMyConnection(connection_factory); public static int master = 0; public static int counter = 0; public static int heartbeatRate = 50000; public static int timeout = 100000; public static boolean withFailure = false; public static boolean initiateFailure = false; public static boolean withRoundRobin = false; public static boolean startFailureNow = false; public static boolean errorOccured = false; final static Random randomGenerator = new Random(); public static ArrayList<String> roundRobinEngineList = new ArrayList<String>(); public static Integer roundRobinIndex = 0; public static ArrayList<String> logs = new ArrayList<String>(); public static EntityManagerFactory emf = Persistence.createEntityManagerFactory("workflowEngineRegistry"); public static Connection createMyConnection(ActiveMQConnectionFactory factory) { Connection connection = null; try { connection = factory.createConnection(); } catch (final JMSException e) { e.printStackTrace(); } return connection; } public static void main(String[] args) throws Exception { System.out.println("Replication Context started"); emptyLogFile(); try { ReplicationStartContext.thread(new ReplicationStartContextThread(), false); Thread.sleep(1000); } catch (Exception e) { System.out.println("Replication Context could not be started."); } } public static void thread(Runnable runnable, boolean daemon) { Thread brokerThread = new Thread(runnable); brokerThread.setDaemon(daemon); brokerThread.start(); } public static void emptyLogFile() { try { File file; if(SystemUtils.IS_OS_UNIX) { file = new File("/home/ubuntu/logs/log.txt"); } else { file = new File("log.txt"); } if(file.exists()) { FileWriter fWriter = new FileWriter(file.getAbsoluteFile(), true); BufferedWriter bWriter = new BufferedWriter(fWriter); bWriter.write(""); bWriter.close(); } } catch(IOException e) { e.printStackTrace(); } } public static synchronized void writeToLogFile(String text) { try { File file; if(SystemUtils.IS_OS_UNIX) { file = new File("/home/ubuntu/logs/log.txt"); } else { file = new File("log.txt"); } if(!file.exists()) { file.createNewFile(); } FileWriter fWriter = new FileWriter(file.getAbsoluteFile(), true); BufferedWriter bWriter = new BufferedWriter(fWriter); bWriter.write(text); bWriter.newLine(); bWriter.close(); } catch(IOException e) { } } public static void decideWhetherAnEngineShouldFail(Random randomGenerator, int currentFail) { WorkflowEngineRegistry registry = new WorkflowEngineRegistry(); ArrayList<WorkflowEngine> workflowEngines = new ArrayList<WorkflowEngine>(); try { workflowEngines = registry.getAllActiveEngines(); } catch (Exception e) { e.printStackTrace(); } int failureProbability = 602; for(WorkflowEngine currentEngine : workflowEngines) { int randomNumber = randomGenerator.nextInt(failureProbability * workflowEngines.size()); if (Constants.DEBUG_LEVEL > 0) { System.out.println("currentFail: " + currentFail); System.out.println("randomNr: " + randomNumber); } if(currentFail > randomNumber) { errorOccured = true; //Send a failure message to the engine try { Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); Destination failureDestination = session.createQueue("de.unistuttgart.rep." + currentEngine.getWorkflowEngineIp()); MessageProducer failureProducer = session.createProducer(failureDestination); // SynchronizationMessage syncMessage = new SynchronizationMessage(); // syncMessage.set_message("Fail"); ObjectMessage failureMessage = session.createObjectMessage(new Boolean(true)); failureProducer.send(failureMessage); } catch(Exception e) { } } } } public static void readRoundRobinConfig() { BufferedReader reader = null; String roundRobin = "false"; try { if(SystemUtils.IS_OS_UNIX) { reader = new BufferedReader(new FileReader ("/home/ubuntu/configs/robin.txt")); } else { reader = new BufferedReader(new FileReader ("robin.txt")); } roundRobin = reader.readLine(); System.out.println(roundRobin); } catch(IOException e) { e.printStackTrace(); } finally { try { if(reader != null) { reader.close(); } } catch(IOException e) { e.printStackTrace(); } } if(roundRobin.equals("true")) { withRoundRobin = true; } else { withRoundRobin = false; } } public static void readFailureConfig() { BufferedReader reader = null; String failure = "false"; String failureInitiator = "false"; try { if(SystemUtils.IS_OS_UNIX) { reader = new BufferedReader(new FileReader ("/home/ubuntu/configs/failure.txt")); } else { reader = new BufferedReader(new FileReader ("failure.txt")); } failure = reader.readLine(); failureInitiator = reader.readLine(); } catch(IOException e) { e.printStackTrace(); } finally { try { if(reader != null) { reader.close(); } } catch(IOException e) { e.printStackTrace(); } } if(failure.equals("true")) { withFailure = true; } else { withFailure = false; } if(failureInitiator.equals("true")) { initiateFailure = true; } else { initiateFailure = false; } } public static void getHeartbeatRateAndTimeout() { BufferedReader reader = null; try { if(SystemUtils.IS_OS_UNIX) { reader = new BufferedReader(new FileReader ("/home/ubuntu/configs/timeout.txt")); } else { reader = new BufferedReader(new FileReader ("timeout.txt")); } heartbeatRate = Integer.parseInt(reader.readLine()); timeout = Integer.parseInt(reader.readLine()); } catch(IOException e) { e.printStackTrace(); } finally { try { if(reader != null) { reader.close(); } } catch(IOException e) { e.printStackTrace(); } } } public static class ReplicationStartContextThread implements Runnable { public void run() { ReplicationStartContext.ReplicationStartContextThread replicationStartContextThread = this; synchronized (replicationStartContextThread) { try { SimpleRegistry registry = new SimpleRegistry(); registry.put("replicationDeploymentProcessor", new ReplicationDeploymentProcessor()); registry.put("replicationStartProcessor", new ReplicationStartProcessor()); registry.put("workflowEngineProcessor", new WorkflowEngineProcessor()); DefaultCamelContext context = new DefaultCamelContext(registry); //connection = connection_factory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); session.createQueue("deploy"); session.createQueue("start"); session.createQueue("de.unistuttgart.rep"); context.addRoutes(new ReplicationDeploymentRoute()); context.addRoutes(new WorkflowEngineRoute()); emptyLogFile(); getHeartbeatRateAndTimeout(); System.out.println("This is the timeout: " + timeout); readRoundRobinConfig(); readFailureConfig(); context.start(); if(Constants.DEBUG_LEVEL > 0) { System.out.println("context running"); } if(withFailure && initiateFailure) { while(!startFailureNow) { this.wait(10000); } // this.wait(60000); int currentFail = 0; boolean x = true; long time = (new Date()).getTime(); while (x) { long curTime = (new Date()).getTime(); if(curTime - time >= 10000) { currentFail = currentFail+5; time = curTime; } this.wait(1000); final int finalFail = currentFail; Thread thread = new Thread(){ public void run(){ decideWhetherAnEngineShouldFail(randomGenerator, finalFail); } }; thread.start(); } } else { boolean x = true; while (x) { this.wait(10000); } } context.stop(); } catch (Exception e) { e.printStackTrace(); } finally { try { connection.close(); } catch (Exception e) { e.printStackTrace(); } emf.close(); } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.string; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Before; import org.junit.Test; import java.util.Arrays; import java.util.Collections; import java.util.Map; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; /** */ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { private static Settings DOC_VALUES_SETTINGS = Settings.builder().put(FieldDataType.FORMAT_KEY, FieldDataType.DOC_VALUES_FORMAT_VALUE).build(); IndexService indexService; DocumentMapperParser parser; @Before public void before() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @Test public void testLimit() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), notNullValue()); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "12345") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), notNullValue()); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "123456") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field"), nullValue()); } private void assertDefaultAnalyzedFieldType(IndexableFieldType fieldType) { assertThat(fieldType.omitNorms(), equalTo(false)); assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)); assertThat(fieldType.storeTermVectors(), equalTo(false)); assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); } private void assertEquals(IndexableFieldType ft1, IndexableFieldType ft2) { assertEquals(ft1.tokenized(), ft2.tokenized()); assertEquals(ft1.omitNorms(), ft2.omitNorms()); assertEquals(ft1.indexOptions(), ft2.indexOptions()); assertEquals(ft1.storeTermVectors(), ft2.storeTermVectors()); assertEquals(ft1.docValuesType(), ft2.docValuesType()); } private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception { String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(ImmutableMap.<String, String>of())).endObject().string(); mapper = parser.parse(mapping); ParsedDocument doc = mapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "2345") .endObject() .bytes()); assertEquals(expected, doc.rootDoc().getField("field").fieldType()); } @Test public void testDefaultsForAnalyzed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); assertDefaultAnalyzedFieldType(fieldType); assertParseIdemPotent(fieldType, defaultMapper); } @Test public void testDefaultsForNotAnalyzed() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); assertThat(fieldType.omitNorms(), equalTo(true)); assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); assertThat(fieldType.storeTermVectors(), equalTo(false)); assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); assertParseIdemPotent(fieldType, defaultMapper); // now test it explicitly set mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject() .endObject().endObject().string(); defaultMapper = parser.parse(mapping); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); fieldType = doc.rootDoc().getField("field").fieldType(); assertThat(fieldType.omitNorms(), equalTo(false)); assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS)); assertThat(fieldType.storeTermVectors(), equalTo(false)); assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); assertParseIdemPotent(fieldType, defaultMapper); // also test the deprecated omit_norms mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject() .endObject().endObject().string(); defaultMapper = parser.parse(mapping); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); fieldType = doc.rootDoc().getField("field").fieldType(); assertThat(fieldType.omitNorms(), equalTo(false)); assertParseIdemPotent(fieldType, defaultMapper); } @Test public void testSearchQuoteAnalyzerSerialization() throws Exception { // Cases where search_quote_analyzer should not be added to the mapping. String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "string") .field("position_offset_gap", 1000) .endObject() .startObject("field2") .field("type", "string") .field("position_offset_gap", 1000) .field("analyzer", "standard") .endObject() .startObject("field3") .field("type", "string") .field("position_offset_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "simple") .endObject() .startObject("field4") .field("type", "string") .field("position_offset_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "simple") .field("search_quote_analyzer", "simple") .endObject() .endObject() .endObject().endObject().string(); DocumentMapper mapper = parser.parse(mapping); for (String fieldName : Lists.newArrayList("field1", "field2", "field3", "field4")) { Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper); assertFalse(serializedMap.containsKey("search_quote_analyzer")); } // Cases where search_quote_analyzer should be present. mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "string") .field("position_offset_gap", 1000) .field("search_quote_analyzer", "simple") .endObject() .startObject("field2") .field("type", "string") .field("position_offset_gap", 1000) .field("analyzer", "standard") .field("search_analyzer", "standard") .field("search_quote_analyzer", "simple") .endObject() .endObject() .endObject().endObject().string(); mapper = parser.parse(mapping); for (String fieldName : Lists.newArrayList("field1", "field2")) { Map<String, Object> serializedMap = getSerializedMap(fieldName, mapper); assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); } } private Map<String, Object> getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception { FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); builder.close(); Map<String, Object> fieldMap = JsonXContent.jsonXContent.createParser(builder.bytes()).mapAndClose(); @SuppressWarnings("unchecked") Map<String, Object> result = (Map<String, Object>) fieldMap.get(fieldName); return result; } @Test public void testTermVectors() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("field1") .field("type", "string") .field("term_vector", "no") .endObject() .startObject("field2") .field("type", "string") .field("term_vector", "yes") .endObject() .startObject("field3") .field("type", "string") .field("term_vector", "with_offsets") .endObject() .startObject("field4") .field("type", "string") .field("term_vector", "with_positions") .endObject() .startObject("field5") .field("type", "string") .field("term_vector", "with_positions_offsets") .endObject() .startObject("field6") .field("type", "string") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field1", "1234") .field("field2", "1234") .field("field3", "1234") .field("field4", "1234") .field("field5", "1234") .field("field6", "1234") .endObject() .bytes()); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false)); assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); } public void testDocValuesFielddata() throws Exception { IndexService indexService = createIndex("index"); DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); assertTrue(new StringFieldMapper.Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).hasDocValues()); assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("str1") .field("type", "string") .startObject("fielddata") .field("format", "fst") .endObject() .endObject() .startObject("str2") .field("type", "string") .field("index", "not_analyzed") .startObject("fielddata") .field("format", "doc_values") .endObject() .endObject() .endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument parsedDoc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("str1", "1234") .field("str2", "1234") .endObject() .bytes()); final Document doc = parsedDoc.rootDoc(); assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); } public void testDocValues() throws Exception { // doc values only work on non-analyzed content final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); try { new StringFieldMapper.Builder("anything").docValues(true).build(ctx); fail(); } catch (Exception e) { /* OK */ } assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); assertTrue(new StringFieldMapper.Builder("anything").index(true).tokenized(false).build(ctx).hasDocValues()); assertFalse(new StringFieldMapper.Builder("anything").index(true).tokenized(true).build(ctx).hasDocValues()); assertFalse(new StringFieldMapper.Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).hasDocValues()); assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") .startObject("str1") .field("type", "string") .field("index", "no") .endObject() .startObject("str2") .field("type", "string") .field("index", "not_analyzed") .endObject() .startObject("str3") .field("type", "string") .field("index", "analyzed") .endObject() .startObject("str4") .field("type", "string") .field("index", "not_analyzed") .field("doc_values", false) .endObject() .startObject("str5") .field("type", "string") .field("index", "no") .field("doc_values", true) .endObject() .endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument parsedDoc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("str1", "1234") .field("str2", "1234") .field("str3", "1234") .field("str4", "1234") .field("str5", "1234") .endObject() .bytes()); final Document doc = parsedDoc.rootDoc(); assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str5")); } // TODO: this function shouldn't be necessary. parsing should just add a single field that is indexed and dv public static DocValuesType docValuesType(Document document, String fieldName) { for (IndexableField field : document.getFields(fieldName)) { if (field.fieldType().docValuesType() != DocValuesType.NONE) { return field.fieldType().docValuesType(); } } return DocValuesType.NONE; } @Test public void testDisableNorms() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = parser.parse(mapping); ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); assertEquals(false, fieldType.omitNorms()); String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false); assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts()); doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() .startObject() .field("field", "1234") .endObject() .bytes()); fieldType = doc.rootDoc().getField("field").fieldType(); assertEquals(true, fieldType.omitNorms()); updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true); assertTrue(mergeResult.hasConflicts()); assertEquals(1, mergeResult.buildConflicts().length); assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms")); } }
/* * Copyright 2015-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.bgp.bgpio.protocol.linkstate; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Objects; import org.jboss.netty.buffer.ChannelBuffer; import org.onosproject.bgp.bgpio.exceptions.BgpParseException; import org.onosproject.bgp.bgpio.types.BgpErrorType; import org.onosproject.bgp.bgpio.types.BgpValueType; import org.onosproject.bgp.bgpio.types.IPReachabilityInformationTlv; import org.onosproject.bgp.bgpio.types.OspfRouteTypeTlv; import org.onosproject.bgp.bgpio.types.attr.BgpAttrNodeMultiTopologyId; import org.onosproject.bgp.bgpio.util.UnSupportedAttribute; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.MoreObjects; /** * Provides Implementation of Local node descriptors and prefix descriptors. */ public class BgpPrefixLSIdentifier implements Comparable<Object> { protected static final Logger log = LoggerFactory.getLogger(BgpPrefixLSIdentifier.class); public static final int TYPE_AND_LEN = 4; private NodeDescriptors localNodeDescriptors; private List<BgpValueType> prefixDescriptor; /** * Resets parameters. */ public BgpPrefixLSIdentifier() { this.localNodeDescriptors = null; this.prefixDescriptor = null; } /** * Constructor to initialize parameters. * * @param localNodeDescriptors Local node descriptors * @param prefixDescriptor Prefix Descriptors */ public BgpPrefixLSIdentifier(NodeDescriptors localNodeDescriptors, List<BgpValueType> prefixDescriptor) { this.localNodeDescriptors = localNodeDescriptors; this.prefixDescriptor = prefixDescriptor; } /** * Reads the channel buffer and parses Prefix Identifier. * * @param cb ChannelBuffer * @param protocolId protocol ID * @return object of this class * @throws BgpParseException while parsing Prefix Identifier */ public static BgpPrefixLSIdentifier parsePrefixIdendifier(ChannelBuffer cb, byte protocolId) throws BgpParseException { //Parse Local Node descriptor NodeDescriptors localNodeDescriptors = new NodeDescriptors(); localNodeDescriptors = parseLocalNodeDescriptors(cb, protocolId); //Parse Prefix descriptor List<BgpValueType> prefixDescriptor = new LinkedList<>(); prefixDescriptor = parsePrefixDescriptors(cb); return new BgpPrefixLSIdentifier(localNodeDescriptors, prefixDescriptor); } /** * Parse local node descriptors. * * @param cb ChannelBuffer * @param protocolId protocol identifier * @return LocalNodeDescriptors * @throws BgpParseException while parsing local node descriptors */ public static NodeDescriptors parseLocalNodeDescriptors(ChannelBuffer cb, byte protocolId) throws BgpParseException { ChannelBuffer tempBuf = cb.copy(); short type = cb.readShort(); short length = cb.readShort(); if (cb.readableBytes() < length) { //length + 4 implies data contains type, length and value throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.OPTIONAL_ATTRIBUTE_ERROR, tempBuf.readBytes(cb.readableBytes() + TYPE_AND_LEN)); } NodeDescriptors localNodeDescriptors = new NodeDescriptors(); ChannelBuffer tempCb = cb.readBytes(length); if (type == NodeDescriptors.LOCAL_NODE_DES_TYPE) { localNodeDescriptors = NodeDescriptors.read(tempCb, length, type, protocolId); } else { throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.MALFORMED_ATTRIBUTE_LIST, null); } return localNodeDescriptors; } /** * Parse list of prefix descriptors. * * @param cb ChannelBuffer * @return list of prefix descriptors * @throws BgpParseException while parsing list of prefix descriptors */ public static List<BgpValueType> parsePrefixDescriptors(ChannelBuffer cb) throws BgpParseException { LinkedList<BgpValueType> prefixDescriptor = new LinkedList<>(); BgpValueType tlv = null; boolean isIpReachInfo = false; ChannelBuffer tempCb; int count = 0; while (cb.readableBytes() > 0) { ChannelBuffer tempBuf = cb.copy(); short type = cb.readShort(); short length = cb.readShort(); if (cb.readableBytes() < length) { //length + 4 implies data contains type, length and value throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.OPTIONAL_ATTRIBUTE_ERROR, tempBuf.readBytes(cb.readableBytes() + TYPE_AND_LEN)); } tempCb = cb.readBytes(length); switch (type) { case OspfRouteTypeTlv.TYPE: tlv = OspfRouteTypeTlv.read(tempCb); break; case IPReachabilityInformationTlv.TYPE: tlv = IPReachabilityInformationTlv.read(tempCb, length); isIpReachInfo = true; break; case BgpAttrNodeMultiTopologyId.ATTRNODE_MULTITOPOLOGY: tlv = BgpAttrNodeMultiTopologyId.read(tempCb); count = count + 1; if (count > 1) { //length + 4 implies data contains type, length and value throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.OPTIONAL_ATTRIBUTE_ERROR, tempBuf.readBytes(length + TYPE_AND_LEN)); } break; default: UnSupportedAttribute.skipBytes(tempCb, length); } prefixDescriptor.add(tlv); } if (!isIpReachInfo) { throw new BgpParseException(BgpErrorType.UPDATE_MESSAGE_ERROR, BgpErrorType.OPTIONAL_ATTRIBUTE_ERROR, null); } return prefixDescriptor; } /** * Returns local node descriptors. * * @return local node descriptors */ public NodeDescriptors getLocalNodeDescriptors() { return this.localNodeDescriptors; } /** * Returns Prefix descriptors. * * @return Prefix descriptors */ public List<BgpValueType> getPrefixdescriptor() { return this.prefixDescriptor; } @Override public int hashCode() { return Objects.hash(prefixDescriptor.hashCode(), localNodeDescriptors); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof BgpPrefixLSIdentifier) { int countObjSubTlv = 0; int countOtherSubTlv = 0; boolean isCommonSubTlv = true; BgpPrefixLSIdentifier other = (BgpPrefixLSIdentifier) obj; Iterator<BgpValueType> objListIterator = other.prefixDescriptor.iterator(); countOtherSubTlv = other.prefixDescriptor.size(); countObjSubTlv = prefixDescriptor.size(); if (countObjSubTlv != countOtherSubTlv) { return false; } else { while (objListIterator.hasNext() && isCommonSubTlv) { BgpValueType subTlv = objListIterator.next(); if (prefixDescriptor.contains(subTlv) && other.prefixDescriptor.contains(subTlv)) { isCommonSubTlv = Objects.equals(prefixDescriptor.get(prefixDescriptor.indexOf(subTlv)), other.prefixDescriptor.get(other.prefixDescriptor.indexOf(subTlv))); } else { isCommonSubTlv = false; } } return isCommonSubTlv && Objects.equals(this.localNodeDescriptors, other.localNodeDescriptors); } } return false; } @Override public String toString() { return MoreObjects.toStringHelper(getClass()) .add("localNodeDescriptors", localNodeDescriptors) .add("prefixDescriptor", prefixDescriptor) .toString(); } @Override public int compareTo(Object o) { if (this.equals(o)) { return 0; } int result = this.localNodeDescriptors.compareTo(((BgpPrefixLSIdentifier) o).localNodeDescriptors); boolean tlvFound = false; if (result != 0) { return result; } else { int countOtherSubTlv = ((BgpPrefixLSIdentifier) o).prefixDescriptor.size(); int countObjSubTlv = prefixDescriptor.size(); if (countOtherSubTlv != countObjSubTlv) { if (countOtherSubTlv > countObjSubTlv) { return 1; } else { return -1; } } ListIterator<BgpValueType> listIterator = prefixDescriptor.listIterator(); while (listIterator.hasNext()) { BgpValueType tlv1 = listIterator.next(); for (BgpValueType tlv : ((BgpPrefixLSIdentifier) o).prefixDescriptor) { if (tlv.getType() == tlv1.getType()) { result = prefixDescriptor.get(prefixDescriptor.indexOf(tlv1)).compareTo( ((BgpPrefixLSIdentifier) o).prefixDescriptor .get(((BgpPrefixLSIdentifier) o).prefixDescriptor.indexOf(tlv))); if (result != 0) { return result; } tlvFound = true; break; } } if (!tlvFound) { return 1; } } } return 0; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.quorum.Leader; import org.apache.zookeeper.test.ClientBase.CountdownWatcher; import org.junit.Test; public class FollowerResyncConcurrencyTest extends QuorumBase { private static final Logger LOG = Logger.getLogger(FollowerResyncConcurrencyTest.class); public static final long CONNECTION_TIMEOUT = ClientTest.CONNECTION_TIMEOUT; private volatile int counter = 0; private volatile int errors = 0; /** * See ZOOKEEPER-962. This tests for one of the bugs hit while fixing this, * setting the ZXID of the SNAP packet * Starts up 3 ZKs. Shut down F1, write a node, restart the one that was shut down * The non-leader ZKs are writing to cluster * Shut down F1 again * Restart after sessions are expired, expect to get a snap file * Shut down, run some transactions through. * Restart to a diff while transactions are running in leader * @throws IOException * @throws InterruptedException * @throws KeeperException */ @Test public void testResyncBySnapThenDiffAfterFollowerCrashes() throws IOException, InterruptedException, KeeperException, Throwable { final Semaphore sem = new Semaphore(0); QuorumUtil qu = new QuorumUtil(1); qu.startAll(); CountdownWatcher watcher1 = new CountdownWatcher(); CountdownWatcher watcher2 = new CountdownWatcher(); CountdownWatcher watcher3 = new CountdownWatcher(); int index = 1; while(qu.getPeer(index).peer.leader == null) { index++; } Leader leader = qu.getPeer(index).peer.leader; assertNotNull(leader); /* Reusing the index variable to select a follower to connect to */ index = (index == 1) ? 2 : 1; LOG.info("Connecting to follower:" + index); qu.shutdown(index); final ZooKeeper zk3 = createClient(qu.getPeer(3).peer.getClientPort(), watcher3); LOG.info("zk3 has session id 0x" + Long.toHexString(zk3.getSessionId())); zk3.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); qu.restart(index); final ZooKeeper zk1 = createClient(qu.getPeer(index).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); final ZooKeeper zk2 = createClient(qu.getPeer(index).peer.getClientPort(), watcher2); LOG.info("zk2 has session id 0x" + Long.toHexString(zk2.getSessionId())); zk1.create("/first", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); Thread mytestfooThread = new Thread(new Runnable() { @Override public void run() { for(int i = 0; i < 1000; i++) { zk3.create("/mytestfoo", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 14200){ sem.release(); } } }, null); if(i%10==0){ try { Thread.sleep(100); } catch (Exception e) { } } } } }); for(int i = 0; i < 13000; i++) { zk3.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 14200){ sem.release(); } } }, null); if(i == 5000){ qu.shutdown(index); LOG.info("Shutting down s1"); } if(i == 12000){ //Restart off of snap, then get some txns for a log, then shut down qu.restart(index); Thread.sleep(300); qu.shutdown(index); mytestfooThread.start(); Thread.sleep(300); qu.restart(index); LOG.info("Setting up server: " + index); } if((i % 1000) == 0){ Thread.sleep(1000); } if(i%50 == 0) { zk2.create("/newbaz", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter == 14200){ sem.release(); } } }, null); } } // Wait until all updates return if(!sem.tryAcquire(ClientBase.CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS)) { LOG.warn("Did not aquire semaphore fast enough"); } mytestfooThread.join(ClientBase.CONNECTION_TIMEOUT); if (mytestfooThread.isAlive()) { LOG.error("mytestfooThread is still alive"); } Thread.sleep(1000); verifyState(qu, index, leader); zk1.close(); zk2.close(); zk3.close(); qu.shutdownAll(); } /** * This test: * Starts up 3 ZKs. The non-leader ZKs are writing to cluster * Shut down one of the non-leader ZKs. * Restart after sessions have expired but <500 txns have taken place (get a diff) * Shut down immediately after restarting, start running separate thread with other transactions * Restart to a diff while transactions are running in leader * * * Before fixes for ZOOKEEPER-962, restarting off of diff could get an inconsistent view of data missing transactions that * completed during diff syncing. Follower would also be considered "restarted" before all forwarded transactions * were completely processed, so restarting would cause a snap file with a too-high zxid to be written, and transactions * would be missed * * This test should pretty reliably catch the failure of restarting the server before all diff messages have been processed, * however, due to the transient nature of the system it may not catch failures due to concurrent processing of transactions * during the leader's diff forwarding. * * @throws IOException * @throws InterruptedException * @throws KeeperException * @throws Throwable */ @Test public void testResyncByDiffAfterFollowerCrashes() throws IOException, InterruptedException, KeeperException, Throwable { final Semaphore sem = new Semaphore(0); QuorumUtil qu = new QuorumUtil(1); qu.startAll(); CountdownWatcher watcher1 = new CountdownWatcher(); CountdownWatcher watcher2 = new CountdownWatcher(); CountdownWatcher watcher3 = new CountdownWatcher(); int index = 1; while(qu.getPeer(index).peer.leader == null) { index++; } Leader leader = qu.getPeer(index).peer.leader; assertNotNull(leader); /* Reusing the index variable to select a follower to connect to */ index = (index == 1) ? 2 : 1; LOG.info("Connecting to follower:" + index); final ZooKeeper zk1 = createClient(qu.getPeer(index).peer.getClientPort(), watcher1); LOG.info("zk1 has session id 0x" + Long.toHexString(zk1.getSessionId())); final ZooKeeper zk2 = createClient(qu.getPeer(index).peer.getClientPort(), watcher2); LOG.info("zk2 has session id 0x" + Long.toHexString(zk2.getSessionId())); final ZooKeeper zk3 = createClient(qu.getPeer(3).peer.getClientPort(), watcher3); LOG.info("zk3 has session id 0x" + Long.toHexString(zk3.getSessionId())); zk1.create("/first", new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk2.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL); final AtomicBoolean runNow = new AtomicBoolean(false); Thread mytestfooThread = new Thread(new Runnable() { @Override public void run() { int inSyncCounter = 0; while(inSyncCounter < 400) { if(runNow.get()) { zk3.create("/mytestfoo", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); try { Thread.sleep(10); } catch (Exception e) { } inSyncCounter++; } else { Thread.yield(); } } } }); mytestfooThread.start(); for(int i = 0; i < 5000; i++) { zk2.create("/mybar", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); if(i == 1000){ qu.shutdown(index); Thread.sleep(1100); LOG.info("Shutting down s1"); } if(i == 1100 || i == 1150 || i == 1200) { Thread.sleep(1000); } if(i == 1200){ qu.startThenShutdown(index); runNow.set(true); qu.restart(index); LOG.info("Setting up server: " + index); } if(i>=1000 && i%2== 0) { zk3.create("/newbaz", null, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL, new AsyncCallback.StringCallback() { @Override public void processResult(int rc, String path, Object ctx, String name) { counter++; if (rc != 0) { errors++; } if(counter > 7300){ sem.release(); } } }, null); } if(i == 1050 || i == 1100 || i == 1150) { Thread.sleep(1000); } } // Wait until all updates return if(!sem.tryAcquire(ClientBase.CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS)) { LOG.warn("Did not aquire semaphore fast enough"); } mytestfooThread.join(ClientBase.CONNECTION_TIMEOUT); if (mytestfooThread.isAlive()) { LOG.error("mytestfooThread is still alive"); } Thread.sleep(1000); // Verify that server is following and has the same epoch as the leader verifyState(qu, index, leader); zk1.close(); zk2.close(); zk3.close(); qu.shutdownAll(); } private static DisconnectableZooKeeper createClient(int port, CountdownWatcher watcher) throws IOException, TimeoutException, InterruptedException { DisconnectableZooKeeper zk = new DisconnectableZooKeeper( "127.0.0.1:" + port, ClientBase.CONNECTION_TIMEOUT, watcher); watcher.waitForConnected(CONNECTION_TIMEOUT); return zk; } private void verifyState(QuorumUtil qu, int index, Leader leader) { assertTrue("Not following", qu.getPeer(index).peer.follower != null); long epochF = (qu.getPeer(index).peer.getActiveServer().getZxid() >> 32L); long epochL = (leader.getEpoch() >> 32L); assertTrue("Zxid: " + qu.getPeer(index).peer.getActiveServer().getZKDatabase().getDataTreeLastProcessedZxid() + "Current epoch: " + epochF, epochF == epochL); int leaderIndex = (index == 1) ? 2 : 1; Collection<Long> sessionsRestarted = qu.getPeer(index).peer.getActiveServer().getZKDatabase().getSessions(); Collection<Long> sessionsNotRestarted = qu.getPeer(leaderIndex).peer.getActiveServer().getZKDatabase().getSessions(); for(Long l : sessionsRestarted) { assertTrue("Should have same set of sessions in both servers, did not expect: " + l, sessionsNotRestarted.contains(l)); } assertEquals("Should have same number of sessions", sessionsNotRestarted.size(), sessionsRestarted.size()); ZKDatabase restarted = qu.getPeer(index).peer.getActiveServer().getZKDatabase(); ZKDatabase clean = qu.getPeer(3).peer.getActiveServer().getZKDatabase(); ZKDatabase lead = qu.getPeer(leaderIndex).peer.getActiveServer().getZKDatabase(); for(Long l : sessionsRestarted) { assertTrue("Should have same set of sessions in both servers, did not expect: " + l, sessionsNotRestarted.contains(l)); HashSet ephemerals = restarted.getEphemerals(l); HashSet cleanEphemerals = clean.getEphemerals(l); for(Object o : cleanEphemerals) { if(!ephemerals.contains(o)) { LOG.info("Restarted follower doesn't contain ephemeral " + o); } } HashSet leadEphemerals = lead.getEphemerals(l); for(Object o : leadEphemerals) { if(!cleanEphemerals.contains(o)) { LOG.info("Follower doesn't contain ephemeral from leader " + o); } } assertEquals("Should have same number of ephemerals in both followers", ephemerals.size(), cleanEphemerals.size()); assertEquals("Leader should equal follower", lead.getEphemerals(l).size(), cleanEphemerals.size()); } } }
package com.twitter.elephantbird.util; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.protobuf.ByteString; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.DescriptorValidationException; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; import com.google.protobuf.Message; import org.apache.thrift.TBase; import org.apache.thrift.TFieldIdEnum; import org.apache.thrift.protocol.TType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.twitter.elephantbird.thrift.TStructDescriptor; import com.twitter.elephantbird.thrift.TStructDescriptor.Field; import com.twitter.elephantbird.util.Pair; /** * Translates a Thrift object into a Protocol Buffer message. * <p> * For most data types, there is a 1:1 mapping between Thrift * and Protobufs, except for:<ul/> * * <li> enums: converted to their String representations. * <li> lists: added to the enclosing struct as a repeated field. * <li> sets: protobufs doesn't have native support for them so they're * treated as lists and we rely on thrift to enforce uniqueness. * <li> maps: protobufs also doesn't have native support for these so we * create an intermediate Message type with key and value fields of the * appropriate types and add these as repeated fields of the enclosing * struct.<ul/> * * @param <T> Source thrift class */ public class ThriftToDynamicProto<T extends TBase<?, ?>> { private static final Logger LOG = LoggerFactory.getLogger(ThriftToDynamicProto.class); private static final List<Pair<String, Type>> EMPTY_FIELDS = new ArrayList<Pair<String, Type>>(); private static final String MAP_KEY_FIELD_NAME = "key"; private static final String MAP_VALUE_FIELD_NAME = "value"; private boolean supportNestedObjects = false; private boolean ignoreUnsupportedTypes = false; private final Descriptors.FileDescriptor fileDescriptor; // a map of descriptors keyed to their typeName private Map<String, DescriptorProtos.DescriptorProto.Builder> descriptorBuilderMap = Maps.newHashMap(); // a map of message builders keyed to their typeName private Map<String, DynamicMessage.Builder> messageBuilderMap = Maps.newHashMap(); /** * Create a Dynamic Protocol Buffer message with fields equivalent to those in the provided * Thrift class. * @param thriftClass * @throws DescriptorValidationException */ public ThriftToDynamicProto(Class<T> thriftClass) throws DescriptorValidationException { this(thriftClass, new ArrayList<Pair<String, Type>>()); } public ThriftToDynamicProto(Class<T> thriftClass, boolean supportNestedObjects, boolean ignoreUnsupportedTypes) throws DescriptorValidationException { this(thriftClass, new ArrayList<Pair<String, Type>>(), supportNestedObjects, ignoreUnsupportedTypes); } /** * Create a converter that produces a Dynamic Protocol Buffer message with fields equivalent to * those in the provided Thrift class, plus additional fields as defined by extraFields. Nested * Thrift Structs will be ignored, but other unsupported types will throw an exception. * @param thriftClass class to use to generate the protobuf schema * @param extraFields a list of pairs of (fieldName, protobufType) that defines extra fields to * add to the generated message. * @throws DescriptorValidationException */ public ThriftToDynamicProto(Class<T> thriftClass, List<Pair<String, Type>> extraFields) throws DescriptorValidationException { this(thriftClass, extraFields, false, false); } /** * Create a converter that produces a Dynamic Protocol Buffer message with fields equivalent to * those in the provided Thrift class, plus additional fields as defined by extraFields. * @param thriftClass class to use to generate the protobuf schema * @param extraFields a list of pairs of (fieldName, protobufType) that defines extra fields to * add to the generated message. * @param supportNestedObjects if true, builds a nested structure. Defaults is false, which will * silently ignore nested objects, which include structs, lists/sets of structs and maps of all * types. Lists/sets of base types are not considered to be nested objects. * @param ignoreUnsupportedTypes if true, ignores types that aren't supported. If false an * exception will be thrown when an unsupported type is encountered. Default is false. * @throws DescriptorValidationException */ public ThriftToDynamicProto(Class<T> thriftClass, List<Pair<String, Type>> extraFields, boolean supportNestedObjects, boolean ignoreUnsupportedTypes) throws DescriptorValidationException { this.supportNestedObjects = supportNestedObjects; this.ignoreUnsupportedTypes = ignoreUnsupportedTypes; // setup the descriptor proto builder for the top-level class DescriptorProtos.DescriptorProto.Builder desBuilder = DescriptorProtos.DescriptorProto.newBuilder(); desBuilder.setName(protoMessageType(thriftClass)); descriptorBuilderMap.put(desBuilder.getName(), desBuilder); // convert the thrift schema to a proto schema thriftToProtoSchema(desBuilder, TStructDescriptor.getInstance(thriftClass), extraFields); // add all of the message types to the file descriptor proto builder DescriptorProtos.FileDescriptorProto.Builder fileDescProtoBuilder = DescriptorProtos.FileDescriptorProto.newBuilder(); for (DescriptorProtos.DescriptorProto.Builder builder : descriptorBuilderMap.values()) { fileDescProtoBuilder.addMessageType(builder); } // create dynamic message builders to be cloned for all types Descriptors.FileDescriptor dynamicDescriptor = Descriptors.FileDescriptor.buildFrom( fileDescProtoBuilder.build(), new Descriptors.FileDescriptor[0]); for (String type : descriptorBuilderMap.keySet()) { Descriptors.Descriptor msgDescriptor = dynamicDescriptor.findMessageTypeByName(type); messageBuilderMap.put(type, DynamicMessage.newBuilder(msgDescriptor)); } fileDescriptor = dynamicDescriptor; } /** * For the given thriftClass, return a Protobufs builder to build a similar protobuf class. * @param thriftClass The thrift class for which the builder is desired. * @return a protobuf message builder */ public Message.Builder getBuilder(Class<? extends TBase<?, ?>> thriftClass) { return messageBuilderMap.get(protoMessageType(thriftClass)).clone(); } /** * Return Protobufs builder for a Map field */ private Message.Builder mapEntryProtoBuilder(TStructDescriptor descriptor, Field field) { return messageBuilderMap.get(mapProtoMessageType(descriptor, field)).clone(); } private void thriftToProtoSchema(DescriptorProtos.DescriptorProto.Builder desBuilder, TStructDescriptor fieldDesc, List<Pair<String, Type>> extraFields) throws DescriptorValidationException { int maxThriftId = doSchemaMapping(desBuilder, fieldDesc); // handle extra fields if they exist. Only supported on the top level message int extraFieldIdx = maxThriftId + 1; for (Pair<String, Type> extraField : extraFields) { addProtoField(desBuilder, extraField.getFirst(), ++extraFieldIdx, extraField.getSecond(), false); } } private int doSchemaMapping(DescriptorProtos.DescriptorProto.Builder desBuilder, TStructDescriptor fieldDesc) throws DescriptorValidationException { int maxThriftId = 0; for (Field tField : fieldDesc.getFields()) { maxThriftId = Math.max(tField.getFieldId(), maxThriftId); if (supportNestedObjects && tField.isMap()) { String typeName = mapProtoMessageType(fieldDesc, tField); if (descriptorBuilderMap.get(typeName) == null) { DescriptorProtos.DescriptorProto.Builder mapBuilder = mapDescriptorProtoBuilder(tField, typeName); descriptorBuilderMap.put(typeName, mapBuilder); addProtoField(desBuilder, tField.getName(), tField.getFieldId() + 1, typeName, true); } } else { Field field = resolveField(tField); Type protoType = thriftTypeToProtoType(field); boolean isContainer = isContainer(tField); if (supportNestedObjects && protoType == Type.TYPE_MESSAGE) { String typeName = resolveMessageTypeName(field.gettStructDescriptor()); // Protobuf field ids start at 1. Thrift starts at 0. addProtoField(desBuilder, tField.getName(), tField.getFieldId() + 1, typeName, isContainer); } else if (protoType != null) { if (supportNestedObjects || (!supportNestedObjects && !hasNestedObject(tField))) { addProtoField(desBuilder, tField.getName(), tField.getFieldId() + 1, protoType, isContainer); } } } } return maxThriftId; } // When dealing with a Set/List, we want to retrieve the relevant Field type private Field resolveField(Field inputField) { if (inputField.isList()) { return inputField.getListElemField(); } else if (inputField.isSet()) { return inputField.getSetElemField(); } else { return inputField; } } /** * Determines whether a field is considered to be a nested object based on: * - whether the field itself is a struct * - whether the field is a list/set of structs * - whether field is a Map */ private boolean hasNestedObject(Field field) { return field.isStruct() || (field.isList() && field.getListElemField().isStruct()) || (field.isSet() && field.getSetElemField().isStruct()) || field.isMap(); } /** * Generate a DescriptorProto.Builder for the Message type that will be used * to represent the entries of the input Map field. * * @param field a Map Field (field.isMap() == true) * @param typeName name of new message type */ private DescriptorProtos.DescriptorProto.Builder mapDescriptorProtoBuilder( Field field, String typeName) throws DescriptorValidationException { DescriptorProtos.DescriptorProto.Builder mapBuilder = DescriptorProtos.DescriptorProto.newBuilder().setName(typeName); Field keyField = field.getMapKeyField(); Field valueField = field.getMapValueField(); DescriptorProtos.FieldDescriptorProto.Builder keyBuilder = mapKeyProtoBuilder(); DescriptorProtos.FieldDescriptorProto.Builder valueBuilder = mapValueProtoBuilder(); setBuilderTypeFromField(keyField, keyBuilder); setBuilderTypeFromField(valueField, valueBuilder); mapBuilder.addField(keyBuilder.build()); mapBuilder.addField(valueBuilder.build()); return mapBuilder; } // field descriptor for key field to be used in Message used to hold Map entries private DescriptorProtos.FieldDescriptorProto.Builder mapKeyProtoBuilder() { return fieldDescriptorProtoBuilder(MAP_KEY_FIELD_NAME, 1).setLabel(Label.LABEL_REQUIRED); } // field descriptor for value field to be used in Message used to hold Map entries private DescriptorProtos.FieldDescriptorProto.Builder mapValueProtoBuilder() { return fieldDescriptorProtoBuilder(MAP_VALUE_FIELD_NAME, 2).setLabel(Label.LABEL_REQUIRED); } private void setBuilderTypeFromField( Field field, DescriptorProtos.FieldDescriptorProto.Builder builder ) throws DescriptorValidationException { Type valueProtoType = thriftTypeToProtoType(field); if (valueProtoType == Type.TYPE_MESSAGE) { builder.setTypeName(resolveMessageTypeName(field.gettStructDescriptor())); } else if (valueProtoType != null) { builder.setType(valueProtoType); } } /** * For a TStructDescriptor, resolves the typeName and optionally converts and memoizes it's * schema. */ private String resolveMessageTypeName(TStructDescriptor descriptor) throws DescriptorValidationException { String typeName = protoMessageType(descriptor.getThriftClass()); // Anytime we have a new message typeName, we make sure that we have a builder for it. // If not, we create one. DescriptorProtos.DescriptorProto.Builder builder = descriptorBuilderMap.get(typeName); if (builder == null) { builder = DescriptorProtos.DescriptorProto.newBuilder(); builder.setName(typeName); descriptorBuilderMap.put(typeName, builder); doSchemaMapping(builder, descriptor); } return typeName; } private void addProtoField(DescriptorProtos.DescriptorProto.Builder builder, String name, int fieldIdx, Type type, boolean isRepeated) { DescriptorProtos.FieldDescriptorProto.Builder fdBuilder = fieldDescriptorProtoBuilder(name, fieldIdx).setType(type); if (isRepeated) { fdBuilder.setLabel(Label.LABEL_REPEATED); } builder.addField(fdBuilder.build()); } private void addProtoField(DescriptorProtos.DescriptorProto.Builder builder, String name, int fieldIdx, String type, boolean isRepeated) { DescriptorProtos.FieldDescriptorProto.Builder fdBuilder = fieldDescriptorProtoBuilder(name, fieldIdx).setTypeName(type); if (isRepeated) { fdBuilder.setLabel(Label.LABEL_REPEATED); } builder.addField(fdBuilder.build()); } private DescriptorProtos.FieldDescriptorProto.Builder fieldDescriptorProtoBuilder( String name, int fieldIdx) { DescriptorProtos.FieldDescriptorProto.Builder fdBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder() .setName(name) .setNumber(fieldIdx); return fdBuilder; } /** * Convert a thrift object to a protobuf message. * @param thriftObj thrift object * @return protobuf protobuf message */ @SuppressWarnings("unchecked") public Message convert(T thriftObj) { return doConvert((TBase<?, ?>) Preconditions.checkNotNull(thriftObj, "Can not convert a null object")); } /** * conver TBase object to Message object * @param thriftObj */ @SuppressWarnings("unchecked") public <F extends TFieldIdEnum> Message doConvert(TBase<?, F> thriftObj) { if (thriftObj == null) { return null; } Class<TBase<?, F>> clazz = (Class<TBase<?, F>>) thriftObj.getClass(); checkState(clazz); Message.Builder builder = getBuilder(clazz); TStructDescriptor fieldDesc = TStructDescriptor.getInstance(clazz); int fieldId = 0; for (Field tField : fieldDesc.getFields()) { // don't want to carry over default values from unset fields if (!thriftObj.isSet((F) tField.getFieldIdEnum()) || (!supportNestedObjects && hasNestedObject(tField))) { fieldId++; continue; } // recurse into the object if it's a struct, otherwise just add the field if (supportNestedObjects && tField.getType() == TType.STRUCT) { TBase<?, ?> fieldValue = (TBase<?, ?>) fieldDesc.getFieldValue(fieldId++, thriftObj); Message message = doConvert(fieldValue); if (message != null) { FieldDescriptor protoFieldDesc = builder.getDescriptorForType().findFieldByName( tField.getName()); builder.setField(protoFieldDesc, message); } } else { fieldId = convertField(thriftObj, builder, fieldDesc, fieldId, tField); } } return builder.build(); } private void checkState(Class<? extends TBase<?, ?>> thriftClass) { Preconditions.checkState(hasBuilder(thriftClass), "No message builder found for thrift class: " + thriftClass.getCanonicalName()); } private boolean hasBuilder(Class<? extends TBase<?, ?>> thriftClass) { return messageBuilderMap.get(protoMessageType(thriftClass)) != null; } private Object sanitizeRawValue(Object value, Field tField) { Object returnValue = value; if (tField.isEnum()) { // TODO: proper enum handling returnValue = returnValue.toString(); } else if (tField.isBuffer()) { returnValue = ByteString.copyFrom((byte[]) returnValue); } if (returnValue instanceof Byte) { returnValue = new Integer((Byte) returnValue); } else if (returnValue instanceof Short) { returnValue = new Integer((Short) returnValue); } return returnValue; } /* * Determines if the field in question is a Set or List of a Struct type */ private boolean isStructContainer(Field tField) { return (tField.isList() && tField.getListElemField().isStruct()) || (tField.isSet() && tField.getSetElemField().isStruct()); } @SuppressWarnings("unchecked") private int convertField(TBase<?, ?> thriftObj, Message.Builder builder, TStructDescriptor fieldDesc, int fieldId, Field tField) { int tmpFieldId = fieldId; FieldDescriptor protoFieldDesc = builder.getDescriptorForType().findFieldByName( tField.getName()); if (protoFieldDesc == null) { // not finding a field might be ok if we're ignoring an unsupported types Type protoType = thriftTypeToProtoType(tField); if (protoType == null && (ignoreUnsupportedTypes || (!supportNestedObjects && hasNestedObject(tField)))) { return tmpFieldId; // no-op } throw new RuntimeException("Field " + tField.getName() + " not found in dynamic protobuf."); } Object fieldValue = fieldDesc.getFieldValue(tmpFieldId++, thriftObj); if (fieldValue == null) { return tmpFieldId; } try { // For non-Map containers that contain struct types, // we have to convert each struct into a Message. if (isStructContainer(tField)) { List<Message> convertedStructs = Lists.newLinkedList(); Iterable<TBase<?, ?>> structIterator = (Iterable<TBase<?, ?>>) fieldValue; for (TBase<?, ?> struct : structIterator) { convertedStructs.add(doConvert(struct)); } fieldValue = convertedStructs; } else if (tField.isMap()) { List<Message> convertedMapEntries = Lists.newLinkedList(); Map<?, ?> rawMap = (Map) fieldValue; for (Map.Entry<?, ?> entry : rawMap.entrySet()) { Message.Builder mapBuilder = mapEntryProtoBuilder(fieldDesc, tField); Message msg = buildMapEntryMessage(mapBuilder, tField, entry.getKey(), entry.getValue()); convertedMapEntries.add(msg); } fieldValue = convertedMapEntries; } else { // protobufs throws an exception if you try to set byte on an int32 field so we need to // convert it to an Integer before it gets set fieldValue = sanitizeRawValue(fieldValue, tField); } // Container types have to be added as repeated fields if (isContainer(tField)) { Iterable<?> container = (Iterable) fieldValue; for (Object obj : container) { builder.addRepeatedField(protoFieldDesc, obj); } } else { builder.setField(protoFieldDesc, fieldValue); } } catch (IllegalArgumentException e) { LOG.error(String.format("Could not set protoField(index=%d, name=%s, type=%s) with " + "thriftField(index=%d, name=%s, type=%d, value=%s)", protoFieldDesc.getIndex(), protoFieldDesc.getName(), protoFieldDesc.getType(), tmpFieldId - 1, tField.getName(), tField.getType(), fieldValue), e); throw e; } return tmpFieldId; } /** * Builds a Message that contains the key value pair of a Map entry */ private Message buildMapEntryMessage(Message.Builder mapBuilder, Field field, Object mapKey, Object mapValue) { FieldDescriptor keyFieldDescriptor = mapBuilder.getDescriptorForType().findFieldByName(MAP_KEY_FIELD_NAME); FieldDescriptor valueFieldDescriptor = mapBuilder.getDescriptorForType().findFieldByName(MAP_VALUE_FIELD_NAME); boolean isKeyStruct = field.getMapKeyField().isStruct(); boolean isValueStruct = field.getMapValueField().isStruct(); Object convertedKey; if (isKeyStruct) { convertedKey = doConvert((TBase<?, ?>) mapKey); } else { convertedKey = sanitizeRawValue(mapKey, field.getMapKeyField()); } Object convertedValue; if (isValueStruct) { convertedValue = doConvert((TBase<?, ?>) mapValue); } else { convertedValue = sanitizeRawValue(mapValue, field.getMapValueField()); } mapBuilder.setField(keyFieldDescriptor, convertedKey); mapBuilder.setField(valueFieldDescriptor, convertedValue); return mapBuilder.build(); } // Checks a field to determine whether it's a List/Set/Map private boolean isContainer(Field field) { return field.isSet() || field.isList() || field.isMap(); } private Type thriftTypeToProtoType(Field tField) { byte thriftType = tField.getType(); switch (thriftType) { case TType.BOOL: return Type.TYPE_BOOL; case TType.BYTE: return Type.TYPE_INT32; case TType.DOUBLE: return Type.TYPE_DOUBLE; case TType.I16: return Type.TYPE_INT32; case TType.I32: return Type.TYPE_INT32; case TType.I64: return Type.TYPE_INT64; case TType.STRING: // Thrift thinks bytes and strings are interchangeable. Protocol buffers are not insane. return tField.isBuffer() ? Type.TYPE_BYTES : Type.TYPE_STRING; case TType.ENUM: // TODO: proper enum handling. For now, convert to strings. return Type.TYPE_STRING; case TType.STRUCT: if (supportNestedObjects) { return Type.TYPE_MESSAGE; } return null; case TType.MAP: return null; case TType.SET: return null; case TType.LIST: return null; default: if (ignoreUnsupportedTypes) { LOG.warn("Thrift type " + thriftType + " not supported for field " + tField.getName() + ". Ignoring"); return null; } throw new IllegalArgumentException("Can't map Thrift type " + thriftType + " to a Protobuf type for field: " + tField.getName()); } } // name the proto message type after the thrift class name. Dots are not permitted in protobuf // names private String protoMessageType(Class<? extends TBase<?, ?>> thriftClass) { return thriftClass.getCanonicalName().replace(".", "_"); } /** * name the proto message used for Map types after the thrift class name of the enclosing * struct and the field name */ private String mapProtoMessageType(TStructDescriptor descriptor, Field field) { return String.format("%s_%s", protoMessageType(descriptor.getThriftClass()), field.getName()); } // Given the class name, finds the corresponding Descriptor and return the appropriate // FieldDescriptor public FieldDescriptor getFieldDescriptor(Class<? extends TBase<?, ?>> thriftClass, String fieldName) { checkState(thriftClass); Descriptors.Descriptor descriptor = getBuilder(thriftClass).getDescriptorForType(); return descriptor.findFieldByName(fieldName); } // Picks off the FileDescriptor for this instance public Descriptors.FileDescriptor getFileDescriptor() { return fileDescriptor; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.framework.preferences; import java.io.*; import java.util.*; import ghidra.framework.Application; import ghidra.framework.GenericRunInfo; import ghidra.util.Msg; import util.CollectionUtils; import utilities.util.FileUtilities; /** * Uses Properties to manage user preferences as name/value pairs. All methods * are static. */ public class Preferences { /** * The <code>APPLICATION_PREFERENCES_FILENAME</code> is the default name for the user preferences file. * @see ghidra.framework.preferences.Preferences */ public static final String APPLICATION_PREFERENCES_FILENAME = "preferences"; /** * Preference name of the user plugin path. */ private final static String USER_PLUGIN_PATH = "UserPluginPath"; /** * Preference name for the last opened archive directory. */ public static final String LAST_OPENED_ARCHIVE_DIRECTORY = "LastOpenedArchiveDirectory"; /** * Preference name for the project directory. */ public final static String PROJECT_DIRECTORY = "ProjectDirectory"; /** * Preference name for import directory that was last accessed for tools. */ public final static String LAST_TOOL_IMPORT_DIRECTORY = "LastToolImportDirectory"; /** * Preference name for export directory that was last accessed for tools. */ public final static String LAST_TOOL_EXPORT_DIRECTORY = "LastToolExportDirectory"; /** * Preference name for directory last selected for creating a new project. */ public final static String LAST_NEW_PROJECT_DIRECTORY = "LastNewProjectDirectory"; /** * Preference name for the import directory that was last accessed for * domain files. */ public final static String LAST_IMPORT_DIRECTORY = "LastImportDirectory"; /** * Preference name for the export directory that was last accessed. */ public final static String LAST_EXPORT_DIRECTORY = "LastExportDirectory"; /** * The data storage for this class. */ private static Properties properties = new Properties(); /** * Data storage that contains preferences data from a previous installation. */ private static Properties previousProperties = new Properties(); private static String filename = null; // Always attempt to load initial user preferences static { try { File userSettingsDir = Application.getUserSettingsDirectory(); if (userSettingsDir != null) { load(userSettingsDir.getAbsolutePath() + File.separatorChar + APPLICATION_PREFERENCES_FILENAME); } } catch (Exception e) { Msg.error(Preferences.class, "Unexpected exception reading preferences file: ", e); } } /** * Don't allow instantiation of this class. */ private Preferences() { // utils class } /** * Initialize properties by reading name, values from the given filename. * @param pathName name of preferences file to read in; could be null * @throws IOException if there is a problem reading the file */ private static void load(String pathName) throws IOException { // create properties Msg.info(Preferences.class, "Loading user preferences: " + pathName); properties = new Properties(); filename = pathName; File file = new File(pathName); if (file.exists()) { try (FileInputStream in = new FileInputStream(pathName)) { properties.load(in); } } // Try to load a previous installation's preferences so that they are usable as a // reference point for those clients that wish to maintain previous values. Note that // not all previous values should be used in a new application, as that may // cause issues when running; for example, path preferences can cause compile issues. loadPreviousInstallationPreferences(); } private static void loadPreviousInstallationPreferences() throws IOException { try (FileInputStream fis = getAlternateFileInputStream()) { if (fis != null) { previousProperties.load(fis); } } } /** * Clears all properties in this Preferences object. * <p> * <b>Warning: </b>Save any changes pending before calling this method, as this call will * erase any changes not written do disk via {@link #store()} */ public static void clear() { properties.clear(); } /** * Gets an input stream to a file that is the same named file within a different * application version directory for this user. This method will search for an * alternate file based on the application version directories modification times * and will use the first matching file it finds. * * @return a file input stream for an alternate file or null. */ private static FileInputStream getAlternateFileInputStream() { File previousFile = GenericRunInfo.getPreviousApplicationSettingsFile(APPLICATION_PREFERENCES_FILENAME); if (previousFile == null) { return null; } try { FileInputStream fis = new FileInputStream(previousFile); Msg.info(Preferences.class, "Loading previous preferences: " + previousFile); return fis; } catch (FileNotFoundException fnfe) { // Ignore so we can try another directory. } return null; } /** * Removes the given preference from this preferences object. * * @param name the name of the preference key to remove. * @return the value that was stored with the given key. */ public static String removeProperty(String name) { return (String) properties.remove(name); } /** * Get the property with the given name. * <p> * Note: all <code>getProperty(...)</code> methods will first check {@link System#getProperty(String)} * for a value first. This allows users to override preferences from the command-line. * @param name the property name * @return the current property value; null if not set */ public static String getProperty(String name) { // prefer system properties, which enables uses to override preferences from the command-line String systemProperty = System.getProperty(name); if (systemProperty != null) { return systemProperty; } return properties.getProperty(name, null); } /** * Get the property with the given name; if there is no property, return the defaultValue. * <p> * Note: all <code>getProperty(...)</code> methods will first check {@link System#getProperty(String)} * for a value first. This allows users to override preferences from the command-line. * @param name the property name * @param defaultValue the default value * @return the property value; default value if not set * * @see #getProperty(String, String, boolean) */ public static String getProperty(String name, String defaultValue) { // prefer system properties, which enables uses to override preferences from the command-line String systemProperty = System.getProperty(name); if (systemProperty != null) { return systemProperty; } return properties.getProperty(name, defaultValue); } /** * Get the property with the given name; if there is no property, return the defaultValue. * <p> * This version of <code>getProperty</code> will, when <code>useHistoricalValue</code> is true, look * for the given preference value in the last used installation of the application. * <p> * Note: all <code>getProperty(...)</code> methods will first check {@link System#getProperty(String)} * for a value first. This allows users to override preferences from the command-line. * * @param name The name of the property for which to get a value * @param defaultValue The value to use if there is no value yet set for the given name * @param useHistoricalValue True signals to check the last used application installation for a * value for the given name <b>if that value has not yet been set</b>. * @return the property with the given name; if there is no property, * return the defaultValue. * @see #getProperty(String) * @see #getProperty(String, String) */ public static String getProperty(String name, String defaultValue, boolean useHistoricalValue) { // prefer system properties, which enables uses to override preferences from the command-line String systemProperty = System.getProperty(name); if (systemProperty != null) { return systemProperty; } String currentValue = properties.getProperty(name); if (currentValue != null) { return currentValue; } if (!useHistoricalValue) { return defaultValue; } return previousProperties.getProperty(name, defaultValue); } /** * Set the property value. If a null value is passed, then the property is removed from * this collection of preferences. * * @param name property name * @param value value for property */ public static void setProperty(String name, String value) { if (value == null) { Msg.trace(Preferences.class, "clearing property " + name); properties.remove(name); return; } Msg.trace(Preferences.class, "setting property " + name + "=" + value); properties.setProperty(name, value); } /** * Get an array of known property names. * @return if there are no properties, return a zero-length array */ public static List<String> getPropertyNames() { Collection<String> backedCollection = CollectionUtils.asCollection(properties.keySet(), String.class); return new LinkedList<>(backedCollection); } /** * Get the filename that will be used in the store() method. * @return the filename */ public static String getFilename() { return filename; } /** * Set the filename so that when the store() method is called, the * preferences are written to this file. * @param name the filename */ public static void setFilename(String name) { filename = name; } /** * Store the preferences in a file for the current filename. * @return true if the file was written * @throws RuntimeException if the preferences filename was not set */ public static boolean store() { if (filename == null) { throw new RuntimeException("Preferences filename has not been set!"); } Msg.trace(Preferences.class, "Storing user preferences: " + filename); // make sure the preferences directory exists. File file = new File(filename); if (!file.exists()) { FileUtilities.mkdirs(file.getParentFile()); } // Save properties to file BufferedOutputStream os = null; try { os = new BufferedOutputStream(new FileOutputStream(filename)); properties.store(os, "User Preferences"); os.close(); return true; } catch (IOException e) { Msg.error(Preferences.class, "Failed to store user preferences: " + filename); } finally { if (os != null) { try { os.close(); } catch (IOException e) { // we tried } } } return false; } /** * Return the paths in the UserPluginPath property. * Return zero length array if this property is not set. * @return the paths * */ public static String[] getPluginPaths() { List<String> list = getPluginPathList(); if (list == null) { return new String[0]; } return list.toArray(new String[list.size()]); } /** * Set the paths to be used as the UserPluginPath property. * @param paths the paths */ public static void setPluginPaths(String[] paths) { if (paths == null || paths.length == 0) { properties.remove(USER_PLUGIN_PATH); return; } StringBuffer sb = new StringBuffer(); for (int i = 0; i < paths.length; i++) { sb.append(paths[i]); if (i < paths.length - 1) { sb.append(File.pathSeparator); } } properties.setProperty(USER_PLUGIN_PATH, sb.toString()); } private static List<String> getPluginPathList() { String path = properties.getProperty(USER_PLUGIN_PATH); if (path == null) { return null; } List<String> list = new ArrayList<>(5); StringTokenizer st = new StringTokenizer(path, File.pathSeparator); while (st.hasMoreElements()) { String p = (String) st.nextElement(); list.add(p); } return list; } }
package com.github.zarena; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import com.github.customentitylibrary.CustomEntityLibrary; import com.github.zarena.afkmanager.AFKManager; import com.github.zarena.signs.ZShopSign; import com.github.zarena.signs.ZSign; import com.github.zarena.signs.ZTollSign; import com.github.zarena.utils.*; import net.milkbowl.vault.economy.Economy; import org.bukkit.Bukkit; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.World; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.configuration.serialization.ConfigurationSerialization; import org.bukkit.enchantments.Enchantment; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import org.bukkit.plugin.PluginManager; import org.bukkit.plugin.RegisteredServiceProvider; import org.bukkit.plugin.java.JavaPlugin; import com.github.zarena.commands.DSpawnCommands; import com.github.zarena.commands.ISpawnCommands; import com.github.zarena.commands.ZACommands; import com.github.zarena.commands.ZSignCommands; import com.github.zarena.commands.ZSpawnCommands; import com.github.zarena.entities.ZEntityTypeConfiguration; import com.github.zarena.events.GameStopCause; import com.github.zarena.events.GameStopEvent; import com.github.zarena.killcounter.KillCounter; import com.github.zarena.listeners.BlockListener; import com.github.zarena.listeners.EntityListener; import com.github.zarena.listeners.PlayerListener; import com.github.zarena.listeners.WorldListener; import com.github.zarena.signs.ZSignCustomItem; import com.github.zarena.spout.PlayerOptionsHandler; import com.github.zarena.spout.SpoutHandler; public class ZArena extends JavaPlugin { private static ZArena instance; private Economy economy; private KillCounter kc; private GameHandler gameHandler; //Game handler private PlayerOptionsHandler playerOptionsHandler; private boolean achievementsEnabled = false; private Configuration config; protected Configuration statsBackup; private boolean spoutEnabled = false; public void onEnable() { instance = this; reloadConfig(); PluginManager pm = Bukkit.getServer().getPluginManager(); //Enable stuff CustomEntityLibrary.enable(this); Plugin spoutP = pm.getPlugin("Spout"); if(spoutP != null) { spoutEnabled = true; SpoutHandler.enable(); } Plugin achievementsP = pm.getPlugin("AchievementsX"); if(achievementsP != null) achievementsEnabled = true; if(getConfig().getBoolean(ConfigEnum.ENABLE_KILLCOUNTER.toString())) { kc = new KillCounter(); kc.enable(); } if(getConfig().getBoolean(ConfigEnum.ENABLE_AFKKICKER.toString())) { new AFKManager().enable(); } //Load some stuff the game handler relies on loadDonatorInfo(); loadZSignCustomItems(); registerSerializables(); gameHandler = new GameHandler(); //Create the Game Handler...needs to be done so early because stuff below rely on it //If the server crashed, load backups of players data try { loadBackups(); } catch(IOException e) { e.printStackTrace(); } //Load more stuff loadEntityTypes(); loadGamemodeTypes();//Note: Has to be after loadEntityTypes loadFiles(); //Load the language file and intialize the messages ChatHelper.loadLanguageFile(); Message.setMessages(); //Load metrics try { Metrics metrics = new Metrics(this); metrics.start(); } catch (IOException e) {/* Failed to submit the stats :-( */} //Load Vault economy if(getConfig().getBoolean(ConfigEnum.USE_VAULT.toString()) && Bukkit.getPluginManager().getPlugin("Vault") != null) setupEconomy(); //Register command executors getCommand("zarena").setExecutor(new ZACommands()); getCommand("zspawn").setExecutor(new ZSpawnCommands()); getCommand("dspawn").setExecutor(new DSpawnCommands()); getCommand("ispawn").setExecutor(new ISpawnCommands()); getCommand("zsign").setExecutor(new ZSignCommands()); //Register listeners new EntityListener().registerEvents(pm, this); new PlayerListener().registerEvents(pm, this); new WorldListener().registerEvents(pm, this); new BlockListener().registerEvents(pm, this); getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { onTick(); } }, 1L, 1L); } public void onDisable() { gameHandler.stop(); GameStopEvent event = new GameStopEvent(GameStopCause.SERVER_STOP); Bukkit.getServer().getPluginManager().callEvent(event); if(getConfig().getBoolean(ConfigEnum.ENABLE_KILLCOUNTER.toString())) kc.disable(); //Save stuff saveFiles(); //Being as though the onDisable method got sucessfully called, we can clear the stat backups, as this isn't a crash try { File statsBackupFile = new File(Constants.BACKUP_PATH); PrintWriter writer = new PrintWriter(statsBackupFile); writer.print(""); writer.close(); } catch(FileNotFoundException e) { ZArena.log(Level.WARNING, "Stats backup file was never properly created."); } //Reset static stuff instance = null; spoutEnabled = false; } @Override public FileConfiguration getConfig() { return config; } public Economy getEconomy() { return economy; } public GameHandler getGameHandler() { return gameHandler; } public PlayerOptionsHandler getPlayerOptionsHandler() { return playerOptionsHandler; } public static ZArena getInstance() { return instance; } public boolean isAchievementsEnabled() { return achievementsEnabled; } public boolean isSpoutEnabled() { return spoutEnabled; } private void loadBackups() throws IOException { File statsBackupFile = new File(Constants.BACKUP_PATH); if(!statsBackupFile.exists()) statsBackupFile.createNewFile(); statsBackup = Configuration.loadConfiguration(statsBackupFile); for(String key : statsBackup.getKeys(false)) { ConfigurationSection section = statsBackup.getConfigurationSection(key); //Load location World world = Bukkit.getWorld(section.getString("world")); if(world == null) world = Bukkit.getWorlds().get(0); Location loc = new Location(world, section.getDouble("x"), section.getDouble("y"), section.getDouble("z")); //Load inventory ItemStack[] items = new ItemStack[0]; if(section.getConfigurationSection("items") != null) { items = new ItemStack[section.getConfigurationSection("items").getKeys(false).size()]; int index = 0; for(String itemKey : section.getConfigurationSection("items").getKeys(false)) items[index++] = ItemStack.deserialize(section.getConfigurationSection("items."+itemKey).getValues(true)); } //Load armor ItemStack[] armor = new ItemStack[0]; if(section.getConfigurationSection("armor") != null) { armor = new ItemStack[section.getConfigurationSection("armor").getKeys(false).size()]; int index = 0; for(String itemKey : section.getConfigurationSection("armor").getKeys(false)) armor[index++] = ItemStack.deserialize(section.getConfigurationSection("armor."+itemKey).getValues(true)); } //Load gamemode, level, and money GameMode gm = GameMode.getByValue(section.getInt("gamemode")); int level = section.getInt("level"); double money = section.getDouble("money"); //Restore pre game join stuff to player PlayerStats stats = new PlayerStats(key, loc, items, armor, gm, level, money); gameHandler.getPlayerStats().put(key, stats); gameHandler.removePlayer(key); } } private void loadDonatorInfo() { ConfigurationSection startMoney = getConfig().getConfigurationSection(ConfigEnum.START_MONEY.toString()); for(String donatorSectionString : startMoney.getKeys(false)) { ConfigurationSection donatorSection = startMoney.getConfigurationSection(donatorSectionString); if(!donatorSection.contains("permission name") || !donatorSection.contains("value")) return; String permissionName = donatorSection.getString("permission name"); int value = donatorSection.getInt("value"); Permissions.startMoneyPermissions.put(permissionName, value); } ConfigurationSection extraVotes = getConfig().getConfigurationSection(ConfigEnum.EXTRA_VOTES.toString()); for(String donatorSectionString : extraVotes.getKeys(false)) { ConfigurationSection donatorSection = extraVotes.getConfigurationSection(donatorSectionString); if(!donatorSection.contains("permission name") || !donatorSection.contains("value")) return; String permissionName = donatorSection.getString("permission name"); int value = donatorSection.getInt("value"); Permissions.extraVotesPermissions.put(permissionName, value); } Permissions.registerDonatorPermNodes(getServer().getPluginManager()); } private void loadGamemodeTypes() { File gamemodesFolder = new File(Constants.GAMEMODES_FOLDER); if(!gamemodesFolder.isDirectory()) gamemodesFolder.mkdirs(); boolean defaultGamemodeFound = false; for(File file : gamemodesFolder.listFiles()) { if(file.getName().substring(file.getName().lastIndexOf('.')).equals(".yml")) { YamlConfiguration gamemodeConfig = YamlConfiguration.loadConfiguration(file); Gamemode gamemode = new Gamemode(gamemodeConfig); if(file.getName().equals(getConfig().getString(ConfigEnum.DEFAULT_GAMEMODE.toString()))) { gameHandler.setDefaultGamemode(gamemode); gameHandler.defaultGamemode = gamemode; defaultGamemodeFound = true; } else gameHandler.gamemodes.add(gamemode); } } if(!defaultGamemodeFound) { log(Level.WARNING, "ZArena: Default gamemode type file not found. Using default values"); gameHandler.setDefaultGamemode(new Gamemode(new YamlConfiguration())); } } private void loadEntityTypes() { File entitiesFolder = new File(Constants.ENTITIES_FOLDER); if(!entitiesFolder.isDirectory()) entitiesFolder.mkdirs(); boolean defaultZombieFound = false; boolean defaultWolfFound = false; boolean defaultSkeletonFound = false; for(File file : entitiesFolder.listFiles()) { if(file.getName().substring(file.getName().lastIndexOf('.')).equals(".yml")) { YamlConfiguration config = YamlConfiguration.loadConfiguration(file); ZEntityTypeConfiguration entityConfig = new ZEntityTypeConfiguration(config); try {config.save(file);} catch(IOException e) {e.printStackTrace();} if(file.getName().equals(getConfig().getString(ConfigEnum.DEFAULT_ZOMBIE.toString()))) { gameHandler.getWaveHandler().defaultZombieType = entityConfig; defaultZombieFound = true; } else if(file.getName().equals(getConfig().getString(ConfigEnum.DEFAULT_WOLF.toString()))) { gameHandler.getWaveHandler().defaultWolfType = entityConfig; defaultWolfFound = true; } else if(file.getName().equals(getConfig().getString(ConfigEnum.DEFAULT_SKELETON.toString()))) { gameHandler.getWaveHandler().defaultSkeletonType = entityConfig; defaultSkeletonFound = true; } else { gameHandler.getWaveHandler().addType(entityConfig); } } } if(!defaultZombieFound) { log(Level.WARNING, "Default zombie not found, using default values."); gameHandler.getWaveHandler().defaultZombieType = new ZEntityTypeConfiguration(new YamlConfiguration()); } if(!defaultWolfFound) { log(Level.WARNING, "Default wolf not found, using default values."); gameHandler.getWaveHandler().defaultWolfType = new ZEntityTypeConfiguration(new YamlConfiguration()); } if(!defaultSkeletonFound) { log(Level.WARNING, "Default skeleton not found, using default values."); gameHandler.getWaveHandler().defaultSkeletonType = new ZEntityTypeConfiguration(new YamlConfiguration()); } } private void loadFiles() { gameHandler.loadLevelHandler(); if(spoutEnabled) loadPlayerOptions(); } private void loadPlayerOptions() { File path = new File(Constants.OPTIONS_PATH); try { FileInputStream fis = new FileInputStream(path); CustomObjectInputStream ois = new CustomObjectInputStream(fis); playerOptionsHandler = new PlayerOptionsHandler(); playerOptionsHandler.readExternal(ois); ois.close(); fis.close(); } catch (Exception e) { log(Level.WARNING, "ZArena: Couldn't load the PlayerOptions database. Ignore if this is the first time the plugin has been run."); playerOptionsHandler = new PlayerOptionsHandler(); } } private void loadPluginFirstTime() { try { Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "FastSkeleton.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "FastZombie.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "FireZombie.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "GigaZombie.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "HellHound.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "NormalSkeleton.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "NormalWolf.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "NormalZombie.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "StrongZombie.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "WitherSkeleton.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "ZombiePigman.yml"); Utils.extractFromJar(new File(Constants.ENTITIES_FOLDER), "ZombieVillager.yml"); Utils.extractFromJar(new File(Constants.GAMEMODES_FOLDER), "Apocalypse.yml"); Utils.extractFromJar(new File(Constants.GAMEMODES_FOLDER), "Hardcore.yml"); Utils.extractFromJar(new File(Constants.GAMEMODES_FOLDER), "NoBuying.yml"); Utils.extractFromJar(new File(Constants.GAMEMODES_FOLDER), "Normal.yml"); Utils.extractFromJar(new File(Constants.PLUGIN_FOLDER), "language.yml"); Utils.extractFromJar(new File(Constants.PLUGIN_FOLDER), "config.yml"); } catch (IOException e) { log(Level.WARNING, "ZArena: Error loading default files. You can download them manually from the plugins dev.bukkit.org page."); } } private void loadZSignCustomItems() { ConfigurationSection customItems = getConfig().getConfigurationSection(ConfigEnum.CUSTOM_ITEMS.toString()); for(String customItemString : customItems.getKeys(false)) { ConfigurationSection customItem = customItems.getConfigurationSection(customItemString); if(!customItem.contains("type")) //The type is a necessary paramater of the custom item. continue; int type = customItem.getInt("type"); int amount = customItem.getInt("amount", 1); short damageValue = (short)((int)customItem.getInt("damage value", 0)); byte id = (byte) ((int)customItem.getInt("id", 0)); String[] name = new String[2]; String configName = customItem.getName(); int spaceIndex = configName.indexOf(" "); if(spaceIndex == -1) { name[0] = configName; name[1] = ""; } else { name[0] = configName.substring(0, spaceIndex); name[1] = configName.substring(spaceIndex + 1); } Map<Enchantment, Integer> enchantments = new HashMap<Enchantment, Integer>(); for(String enchantName : customItem.getStringList("enchantments")) { String[] args = Utils.getConfigArgs(enchantName); Enchantment enchantment = Enchantment.getById(Utils.parseInt(enchantName.split("\\s")[0], -1)); int level = (args.length > 0) ? Utils.parseInt(args[0], 1) : 1; enchantments.put(enchantment, level); } new ZSignCustomItem(name, type, amount, damageValue, id, enchantments); //Creation of new instances of this object automatically add the instance to a list of them } } private int tick; private void onTick() { if(tick % 20 == 0) { if(spoutEnabled) SpoutHandler.updatePlayerOptions(); } tick++; } private void registerSerializables() { ConfigurationSerialization.registerClass(ZSign.class); ConfigurationSerialization.registerClass(ZShopSign.class); ConfigurationSerialization.registerClass(ZTollSign.class); ConfigurationSerialization.registerClass(LocationSer.class); ConfigurationSerialization.registerClass(ZLevel.class); } @Override public void reloadConfig() { File configFile = new File(Constants.PLUGIN_FOLDER+"/config.yml"); if(!configFile.exists()) loadPluginFirstTime(); config = Configuration.loadConfiguration(configFile); //If true, update from old config to new config if(!config.contains(ConfigEnum.VERSION.toString())) { try { File old = File.createTempFile("configTemp", "yml"); old.deleteOnExit(); FileInputStream input = new FileInputStream(configFile); FileOutputStream output = new FileOutputStream(old); byte[] buffer = new byte[1024]; //Create a buffer //Have the inputstream read the buffer and write it to it's new directory int read; while ((read = input.read(buffer)) > 0) output.write(buffer, 0, read); Utils.extractFromJar(new File(Constants.PLUGIN_FOLDER), "config.yml", true); Configuration newConfig = Configuration.loadConfiguration(configFile); Configuration oldConfig = Configuration.loadConfiguration(old); Utils.convertToNewConfig(newConfig, oldConfig); newConfig.save(old); } catch(IOException e) { e.printStackTrace(); } } else if(config.getInt(ConfigEnum.VERSION.toString()) == 1) { File entitiesFolder = new File(Constants.ENTITIES_FOLDER); for(File file : entitiesFolder.listFiles()) { if(file.getName().substring(file.getName().lastIndexOf('.')).equals(".yml")) { YamlConfiguration config = YamlConfiguration.loadConfiguration(file); config.set("Speed", config.getDouble("Speed", .23) * 4.347826086956522); try {config.save(file);} catch(IOException e) {e.printStackTrace();} } } config.set(ConfigEnum.VERSION.toString(), 2); try { config.save(configFile); } catch(IOException e) { e.printStackTrace(); } } //Determine if any values are missing from the config boolean somethingMissing = false; for(ConfigEnum c : ConfigEnum.values()) { if(!config.contains(c.toString())) { somethingMissing = true; break; } } //If any values are missing, set them to the defaults, which are gotten from the default config.yml in the //plugin's jar file if(somethingMissing) { Map<String, Object> oldValues = config.getValues(true); //Set the config back to the default so we preserve the correct order of stuff config = Configuration.loadConfiguration(ZArena.class.getResourceAsStream("/config.yml")); //Set back all of the user defined values for(Map.Entry<String, Object> e : oldValues.entrySet()) config.set(e.getKey(), e.getValue()); saveConfig(); } } private void saveFiles() { gameHandler.saveLevelHandler(true); if(spoutEnabled) savePlayerOptions(); } private void savePlayerOptions() { File path = new File(Constants.OPTIONS_PATH); try { FileOutputStream fos = new FileOutputStream(path); ObjectOutputStream oos = new ObjectOutputStream(fos); playerOptionsHandler.writeExternal(oos); oos.close(); fos.close(); } catch (IOException e) { e.printStackTrace(); log(Level.WARNING, "ZArena: Error saving the PlayerOptions database."); } } private boolean setupEconomy() { RegisteredServiceProvider<Economy> economyProvider = getServer().getServicesManager().getRegistration(net.milkbowl.vault.economy.Economy.class); if (economyProvider != null) economy = economyProvider.getProvider(); return (economy != null); } public static void log(Level level, String msg) { getInstance().getLogger().log(level, msg); } public static void log(String msg) { log(Level.INFO, msg); } }
/* * Copyright to the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rioproject.monitor.service; import net.jini.id.Uuid; import org.rioproject.associations.AssociationDescriptor; import org.rioproject.associations.AssociationType; import org.rioproject.deploy.*; import org.rioproject.impl.servicebean.ServiceElementUtil; import org.rioproject.monitor.service.util.LoggingUtil; import org.rioproject.opstring.ServiceElement; import org.rioproject.sla.ServiceLevelAgreements; import org.rioproject.system.MeasuredResource; import org.rioproject.system.ResourceCapability; import org.rioproject.system.capability.PlatformCapability; import org.rioproject.system.capability.connectivity.TCPConnectivity; import org.rioproject.system.capability.platform.OperatingSystem; import org.rioproject.system.capability.platform.ProcessorArchitecture; import org.rioproject.system.capability.platform.StorageCapability; import org.rioproject.watch.ThresholdValues; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.rmi.MarshalledObject; import java.rmi.RemoteException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; /** * An {@code InstantiatorResource} is the object being leased and controlled by the * {@code ServiceResource}, and represents an available {@link ServiceBeanInstantiator} service * that can be used to instantiate a service. * * @see org.rioproject.servicebean.ServiceBean * @see org.rioproject.deploy.ServiceBeanInstantiator * * @author Dennis Reedy */ public class InstantiatorResource { /** * The ServiceBeanInstantiator */ private final ServiceBeanInstantiator instantiator; /** * The ServiceBeanInstantiator wrapped in a MarshalledObject */ private final MarshalledObject<ServiceBeanInstantiator> wrappedServiceBeanInstantiator; /** * The maximum number of services the ServiceBeanInstantiator can instantiate */ private final AtomicInteger serviceLimit = new AtomicInteger(0); /** * An in-process counter indicating the InstantiatorResource is being used to * provision a service */ private final AtomicInteger inProcessCounter = new AtomicInteger(); /** * The handback option provided by the ServiceBeanInstantiator and sent back * to the ServiceBeanInstantiator as part of the ProvisionEvent */ private final MarshalledObject<?> handback; /** * A Copy of the ResourceCapability object from the ServiceBeanInstantiator */ private ResourceCapability resourceCapability; //private final Object resourceCapabilityLock = new Object(); /** * Whether the instantiator is ready to accept requests for the * instantiation of dynamic services */ private boolean dynamicEnabled = false; /** * Table of ServiceElement instances and how many the InstantiatorResource * has instantiated */ private final Map<ServiceElement, List<DeployedService>> serviceElementMap = new ConcurrentHashMap<>(); /** Table of in process ServiceElement instances */ private final Map<ServiceElement, Integer> inProcessMap = new ConcurrentHashMap<>(); /** * Name of the ServiceBeanInstantiator */ private final String instantiatorName; /** * The Uuid that has been assigned to the ServiceBeanInstantiator */ private final Uuid instantiatorUuid; private final List<ServiceElement> uninstantiables = new ArrayList<>(); /** The Logger */ private static final Logger logger = LoggerFactory.getLogger(InstantiatorResource.class); /** * Create an InstantiatorResource * * @param wrappedServiceBeanInstantiator The ServiceBeanInstantiator wrapped in a MarshalledObject * @param instantiator A ServiceBeanInstantiator * @param instantiatorName Name for the ServiceBeanInstantiator * @param instantiatorUuid The Uuid that has been assigned to the * ServiceBeanInstantiator, may be null * @param handback The handback object the ServiceBeanInstantiator has * provided, may be null * @param resourceCapability The ResourceCapability object for the * ServiceBeanInstantiator * @param serviceLimit The total number of services the ServiceBeanInstantiator * will allocate */ public InstantiatorResource(MarshalledObject<ServiceBeanInstantiator> wrappedServiceBeanInstantiator, ServiceBeanInstantiator instantiator, String instantiatorName, Uuid instantiatorUuid, MarshalledObject<?> handback, ResourceCapability resourceCapability, int serviceLimit) { this.wrappedServiceBeanInstantiator = wrappedServiceBeanInstantiator; this.instantiator = instantiator; this.instantiatorName = instantiatorName; this.instantiatorUuid = instantiatorUuid; this.handback = handback; this.resourceCapability = resourceCapability; this.serviceLimit.set(serviceLimit); } MarshalledObject<ServiceBeanInstantiator> getWrappedServiceBeanInstantiator() { return wrappedServiceBeanInstantiator; } /** * Add a DeployedService instance to the serviceElementMap. * * @param newDeployedService The service to add */ public void addDeployedService(DeployedService newDeployedService) { ServiceElement sElem = newDeployedService.getServiceElement(); if (serviceElementMap.containsKey(sElem)) { List<DeployedService> list = serviceElementMap.get(sElem); if (!list.contains(newDeployedService)) { list.add(newDeployedService); serviceElementMap.put(sElem, list); } } else { List<DeployedService> list = new ArrayList<>(); list.add(newDeployedService); serviceElementMap.put(sElem, list); } } /** * Set the DeployedService instances * * @param deployedServices List of active & deployed services */ void setDeployedServices(List<DeployedService> deployedServices) { serviceElementMap.clear(); for (DeployedService deployedService : deployedServices) { addDeployedService(deployedService); } } /** * Get the name of the ServiceBeanInstantiator * * @return The name of the ServiceBeanInstantiator */ public String getName() { return instantiatorName; } /** * Get the Uuid that has been assigned to the ServiceBeanInstantiator * * @return The Uuid for the ServiceBeanInstantiator */ public Uuid getInstantiatorUuid() { return instantiatorUuid; } /** * Get the ServiceBeanInstantiator * * @return The ServiceBeanInstantiator */ ServiceBeanInstantiator getServiceBeanInstantiator() { return instantiator; } /** * Get the active ServiceRecord instances for this InstantiatorResource * * @return Array of active ServiceRecord instances for this * InstantiatorResource * * @throws RemoteException If the active ServiceReecords cannot be obtained */ ServiceRecord[] getActiveServiceRecords() throws RemoteException { ServiceRecord[] records = null; /* * Addresses an observed anomaly where for some reason we could not * communicate back to the Cybernode, the connection was reset. The * strategy here is to retry 3 times, waiting 1 second between retries * to attempt to get the active ServiceRecord instances */ int RETRY = 3; RemoteException toThrow = null; for (int i=0; i< RETRY; i++) { try { records = getInstantiator().getServiceRecords(ServiceRecord.ACTIVE_SERVICE_RECORD); break; } catch(RemoteException e) { logger.warn("Exception [{}] occurred, retry [{}] ....", e.getClass().getName(), i); toThrow = e; try { Thread.sleep(1000); } catch (InterruptedException ignore) { logger.trace("Timeout Interrupted, handled"); } } } if (toThrow!=null) throw toThrow; return records; } /** * Get the ServiceRecord instances for a ServiceElement on this * InstantiatorResource * * @param elem The ServiceElement * * @return Array of ServiceRecord instances for a ServiceElement on this * InstantiatorResource. If there are no ServiceRecords, return a * zero-length array * * @throws RemoteException If the ServiceReecords cannot be obtained */ ServiceRecord[] getServiceRecords(ServiceElement elem) throws RemoteException { ServiceStatement statement = null; /* * Addresses an observed anomaly where for some reason we could not * communicate back to the Cybernode, the connection was reset. The * strategy here is to retry 3 times, waiting 1 second between retries * to attempt to get the active ServiceRecord instances */ int RETRY = 3; RemoteException toThrow = null; for (int i=0; i< RETRY; i++) { try { statement = getInstantiator().getServiceStatement(elem); break; } catch(RemoteException e) { logger.warn("Exception [{}] occurred, retry [{}] ....", e.getClass().getName(), i); toThrow = e; try { Thread.sleep(1000); } catch (InterruptedException ignore) { logger.trace("Timeout Interrupted, handled"); } } } if (toThrow!=null) throw toThrow; return statement == null ? new ServiceRecord[0] : statement.getServiceRecords(); } /** * Determine if the instance is found on this InstantiatorResource * * @param sElem The ServiceElement instance * @param uuid The id of the instance * * @return true if the instance is found on this InstantiatorResource */ boolean hasServiceElementInstance(ServiceElement sElem, Uuid uuid) { boolean found = false; if (serviceElementMap.containsKey(sElem)) { List<DeployedService> list = serviceElementMap.get(sElem); DeployedService[] ids = list.toArray(new DeployedService[0]); for (DeployedService deployedService : ids) { if (deployedService.getServiceBeanInstance().getServiceBeanID().equals(uuid)) { found = true; break; } } } return found; } /** * Remove (decrement) a ServiceElement instance in the serviceElementMap. * If the ServiceElement exists in the table decrease it's instance counter * by one. If the ServiceElement instances counter is decremented to zero, * remove the ServiceElement from the serviceElementMap * * @param sElem The ServiceElement instance to decrease * @param uuid The id of the instance to remove * * @return The removed ServiceBeanInstance, or null if the instance was removed */ ServiceBeanInstance removeServiceElementInstance(ServiceElement sElem, Uuid uuid) { ServiceBeanInstance removedInstance = null; if (serviceElementMap.containsKey(sElem)) { List<DeployedService> list = serviceElementMap.get(sElem); list.removeIf(Objects::isNull); for (DeployedService deployedService : list) { if (deployedService.getServiceBeanInstance() != null && deployedService.getServiceBeanInstance().getServiceBeanID().equals(uuid)) { list.remove(deployedService); removedInstance = deployedService.getServiceBeanInstance(); break; } } if (list.isEmpty()) { serviceElementMap.remove(sElem); } else { serviceElementMap.put(sElem, list); } } return removedInstance; } ServiceElement[] getServiceElements() { ServiceElement[] elems = new ServiceElement[serviceElementMap.size()]; int i=0; for (Map.Entry<ServiceElement, List<DeployedService>> entry : serviceElementMap.entrySet()) { elems[i++] = entry.getKey(); } return elems; } /** * Get the number of ServiceElement instances * * @param sElem The ServiceElement to count * * @return The number of instances of the ServiceElement the * ServiceBeanInstantiator has instantiated. If not found return 0 */ public int getServiceElementCount(ServiceElement sElem) { int numInstances = 0; if (serviceElementMap.containsKey(sElem)) { List<DeployedService> list = serviceElementMap.get(sElem); numInstances = list.size(); } try { ServiceBeanInstance[] instances = instantiator.getServiceBeanInstances(sElem); if (numInstances != instances.length) { logger.warn("Discrepancy in instances for [{}], recorded: {}, actual: {}", LoggingUtil.getLoggingName(sElem), numInstances, instances.length); numInstances = instances.length; serviceElementMap.remove(sElem); for (ServiceBeanInstance instance : instances) { addDeployedService(new DeployedService(sElem, instance, null)); } } } catch (IOException e) { logger.warn("Issue communicating to [{}]", LoggingUtil.getLoggingName(sElem), e); } logger.trace("Get service element count for [{}], {} has {} instances", LoggingUtil.getLoggingName(sElem), getName(), numInstances); return numInstances; } /** * Get the total number of all ServiceElement instances * * @return The total number of ServiceElement instances the * ServiceBeanInstantiator has instantiated */ public int getServiceElementCount() { int totalInstances = 0; Set<ServiceElement> keys = serviceElementMap.keySet(); for (ServiceElement key : keys) { List<DeployedService> list = serviceElementMap.get(key); totalInstances += list.size(); } return totalInstances; } /** * Get the ServiceBeanInstantiator * * @return The Instantiator */ public ServiceBeanInstantiator getInstantiator() { return instantiator; } /** * Get the number of active and in-process services * * @return The number of active and in-process services. */ public int getServiceCount() { int count = getServiceElementCount(); int inProcess = getInProcessCounter(); return count + inProcess; } /** * Get the handback object * * @return The handback object */ public MarshalledObject<?> getHandback() { return handback; } /** * Get the ServiceDeployment for a ServiceBeanInstance * * @param sElem The ServiceElement, used as the key to the serviceElementMap * @param instance The ServiceBeanInstance to locate * @return The ServiceDeployment for a ServiceBeanInstance, or if not * found return a null */ DeployedService getServiceDeployment(ServiceElement sElem, ServiceBeanInstance instance) { DeployedService deployedService = null; if (serviceElementMap.containsKey(sElem)) { List<DeployedService> list = serviceElementMap.get(sElem); DeployedService[] services = list.toArray(new DeployedService[0]); for (DeployedService service : services) { if (service.getServiceBeanInstance().equals(instance)) { deployedService = service; break; } } } return deployedService; } /** * Get the ResourceCapability object * * @return The ResourceCapability of the ServiceBeanInstantiator */ public ResourceCapability getResourceCapability() { return resourceCapability; } /** * Set the ResourceCapability * * @param resourceCapability The ResourceCapability object of the * ServiceBeanInstantiator */ void setResourceCapability(ResourceCapability resourceCapability) { this.resourceCapability = resourceCapability; } /** * Set the serviceLimit property * * @param serviceLimit The maximum number of services the ServiceBeanInstantiator can instantiate */ void setServiceLimit(int serviceLimit) { this.serviceLimit.set(serviceLimit); } /** * Get the serviceLimit property * * @return The maximum number of services the ServiceBeanInstantiator can instantiate */ public int getServiceLimit() { return serviceLimit.get(); } /** * Increment the inprocess counter * * @param sElem The ServiceElement to add */ public void incrementProvisionCounter(ServiceElement sElem) { inProcessCounter.incrementAndGet(); if (inProcessMap.containsKey(sElem)) { int i = inProcessMap.get(sElem); i++; inProcessMap.put(sElem, i); } else { inProcessMap.put(sElem, 1); } } /** * Decrement the inprocess counter * * @param sElem The ServiceElement to remove */ public synchronized void decrementProvisionCounter(ServiceElement sElem) { if (inProcessCounter.get()>0) { inProcessCounter.decrementAndGet(); } if (inProcessMap.containsKey(sElem)) { int i = inProcessMap.get(sElem); i--; if (i==0) inProcessMap.remove(sElem); else inProcessMap.put(sElem, i); } } /** * Get the in-process counter value * * @return The in-process counter value */ public int getInProcessCounter() { return inProcessCounter.get(); } /** * Get the inprocess counter value for a ServiceElement * * @param sElem The ServiceElement to use * * @return The inprocess counter value for a ServiceElement */ public int getInProcessCounter(ServiceElement sElem) { int count = 0; if (inProcessMap.containsKey(sElem)) { count = inProcessMap.get(sElem); } return count; } /** * Get all in process elements, excluding the element passed in * * @param exclude The ServiceElement to exclude * * @return An array of ServiceElements */ ServiceElement[] getServiceElementsInprocess(ServiceElement exclude) { ArrayList<ServiceElement> list = new ArrayList<>(); Set<ServiceElement> keys = inProcessMap.keySet(); for (ServiceElement element : keys) { if (!element.equals(exclude)) list.add(element); } return list.toArray(new ServiceElement[0]); } /** * Get the host address of the ServiceBeanInstantiator * * @return The host address of the ServiceBeanInstantiator */ public String getHostAddress() { return (resourceCapability.getAddress()); } /** * Get the host name of the ServiceBeanInstantiator * * @return The host name of the ServiceBeanInstantiator */ String getHostName() { return (resourceCapability.getHostName()); } public void addUninstantiable(ServiceElement serviceElement) { uninstantiables.add(serviceElement); } public boolean isUninstantiable(ServiceElement serviceElement) { return uninstantiables.contains(serviceElement); } public void removeUninstantiable(ServiceElement serviceElement) { uninstantiables.remove(serviceElement); } /** * Set the dynamicEnabled attribute to <code>true</code> indicating that * the ServiceBeanInstantiator is available for the provisioning of * ServiceBean objects which have a provisioning type of <i>dynamic </i> */ public void setDynamicEnabledOn() { dynamicEnabled = true; } /** * Get the dynamicEnabled property * * @return <code>true</code> if the ServiceBeanInstantiator is available * for the provisioning of ServiceBean objects which have a provisioning * type of <i>dynamic </i>, otherwise return <code>false</code> */ public boolean getDynamicEnabled() { return dynamicEnabled; } /** * Determine if the provided {@code ProvisionRequest} can be instantiated on the * compute resource represented by this {@code InstantiatorResource}. If it is * determined that there are downloadable {@code PlatformCapability} components * which can meet the platform requirements the service has declared, these components will be * verified, and the targeted {@code InstantiatorResource} checked to ensure adequate disk space is available. * * @param provisionRequest The {@code ProvisionRequest} * @return Return true if the {@code InstantiatorResource} supports the * operational requirements of the {@code ProvisionRequest} * @throws ProvisionException If there are errors obtaining available disk space. Note this will only * happen if the {@code ProvisionRequest} contains downloadable {@code PlatformCapability} components and there * is a problem obtaining the size of the download. */ public boolean canProvision(final ProvisionRequest provisionRequest) throws ProvisionException { ServiceElement sElem = provisionRequest.getServiceElement(); if (sElem.getPlanned() == 0) { return false; } String provType = sElem.getProvisionType().toString(); /* * Check if the serviceLimit has been reached */ if (getServiceElementCount() == serviceLimit.get() && !provType.equals(ServiceElement.ProvisionType.FIXED.toString())) { String failureReason = String.format("%s not selected to allocate service [%s], it has reached it's service limit of [%d]", getName(), LoggingUtil.getLoggingName(sElem), serviceLimit.get()); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } /* * Check if the maximum amount per machine has been reached */ if (sElem.getMaxPerMachine()!=-1) { int serviceCount = getServiceElementCount(sElem); int inProcessCount = getInProcessCounter(sElem); int numInstances = serviceCount+inProcessCount; if (numInstances >= sElem.getMaxPerMachine()) { String failureReason = String.format("%s not selected to allocate service [%s], declaration specifies no more than %d services per machine, found %d", getName(), LoggingUtil.getLoggingName(sElem), sElem.getMaxPerMachine(), numInstances); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } } /* * Fixed service allocation is similar to maxPerMachine, ensure that * there are not too many service allocated */ if (sElem.getProvisionType() == ServiceElement.ProvisionType.FIXED) { int planned = sElem.getPlanned(); int actual = getServiceElementCount(sElem)+getInProcessCounter(sElem); int numAllowed = planned-actual; if (numAllowed <= 0) { String failureReason = String.format("Do not allocate %s service [%s] to %s has [%d] instance(s), planned [%d]", provType, LoggingUtil.getLoggingName(sElem), getName(), actual, planned); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } else { String failureReason = String.format("%s has [%d] instance(s), planned [%d] of %s service [%s]", getName(), actual, planned, provType, LoggingUtil.getLoggingName(sElem)); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); } } if (!AssociationMatcher.meetsColocationRequirements(sElem, this)) { StringBuilder b = new StringBuilder(); b.append(getName()).append(" not selected to allocate ").append(LoggingUtil.getLoggingName(sElem)); b.append(", required colocated services not present: "); AssociationDescriptor[] aDesc = ServiceElementUtil.getAssociationDescriptors(sElem, AssociationType.COLOCATED); int found = 0; for (AssociationDescriptor anADesc : aDesc) { if (found > 0) b.append(", "); found++; b.append(anADesc.getName()); } String failureReason = b.toString(); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } if (!AssociationMatcher.meetsOpposedRequirements(sElem, this)) { String failureReason = AssociationMatcher.getLastErrorMessage(); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } if (!resourceCapability.measuredResourcesWithinRange()) { StringBuilder buffer = new StringBuilder(); MeasuredResource[] m = resourceCapability.getMeasuredResources(ResourceCapability.MEASURED_RESOURCES_BREACHED); for (MeasuredResource aM : m) { buffer.append("\n"); buffer.append("[").append(aM.getIdentifier()).append("] "); buffer.append("Low: ").append(aM.getThresholdValues().getLowThreshold()).append(", "); buffer.append("High: ").append(aM.getThresholdValues().getHighThreshold()).append(", "); buffer.append("Actual: ").append(aM.getValue()); } String failureReason = String.format("%s not selected to allocate service [%s], MeasuredResources have exceeded threshold constraints: %s", getName(), LoggingUtil.getLoggingName(sElem), buffer.toString()); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } if (meetsGeneralRequirements(provisionRequest) && meetsQuantitativeRequirements(provisionRequest)) { Collection<SystemComponent> unsupportedReqs = meetsQualitativeRequirements(provisionRequest); if (unsupportedReqs.isEmpty()) { logger.debug("{} meets qualitative requirements for [{}]", getName(), LoggingUtil.getLoggingName(sElem)); return true; } else { /* Create a String representation of the unsupportedReqs * object for logging */ int x = 0; StringBuilder buffer = new StringBuilder(); for (SystemComponent unsupportedReq : unsupportedReqs) { if (x > 0) buffer.append(", "); buffer.append("[").append(unsupportedReq.toString()).append("]"); x++; } String unsupportedReqsString = buffer.toString(); logger.debug("{} does not meet requirements for {} service [{}]", getName(), provType, LoggingUtil.getLoggingName(sElem)); /* Determine if the resource supports persistent provisioning */ if (!resourceCapability.supportsPersistentProvisioning()) { String failureReason = String.format("Cannot allocate %s service [%s] to %s, required SystemComponents cannot be " + "provisioned. This is because the %s is not configured for persistentProvisioning. " + "If you want to enable this feature, verify the %s's configuration for the " + "org.rioproject.cybernode.persistentProvisioning property is set to true", provType, LoggingUtil.getLoggingName(sElem), getName(), getName(), getName()); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } /* * Check if the unsupported PlatformCapability objects can be * provisioned. If there are any that cannot be provisioned move * onto the next resource */ boolean provisionableCaps = true; for (SystemComponent sysReq : unsupportedReqs) { if (sysReq.getStagedSoftware()==null) { provisionableCaps = false; break; } } if (!provisionableCaps) { String failureReason = getName() + " does not meet requirements for " + provType + " service " + "[" + LoggingUtil.getLoggingName(sElem) + "] " + unsupportedReqsString; provisionRequest.addFailureReason(failureReason); logger.warn(failureReason); return false; } /* Get the size of the download(s) */ long requiredSize = 0; IOException failed = null; try { for (SystemComponent sysReq : unsupportedReqs) { StagedSoftware download = sysReq.getStagedSoftware(); if (download != null) { long size = download.getDownloadSize(); if (size < 0) { logger.warn("Unable to obtain download size for {}, received {}, abort provision request", download.getLocation(), size); requiredSize = size; break; } requiredSize += size; if (download.getPostInstallAttributes() != null && download.getPostInstallAttributes().getStagedData() != null) { StagedData postInstall = download.getPostInstallAttributes().getStagedData(); size = postInstall.getDownloadSize(); if (size < 0) { logger.warn("Unable to obtain download size for PostInstall {}, abort provision request", postInstall.getLocation()); requiredSize = size; break; } requiredSize += size; } } } } catch(IOException e) { failed = e; } if (requiredSize < 0 || failed != null) throw new ProvisionException("Service ["+LoggingUtil.getLoggingName(sElem)+"] "+ "instantiation failed", failed == null? new IOException("Unable to obtain download size"):failed, true); /* Find out if the resource has the necessary disk-space */ if (supportsStorageRequirement(requiredSize, resourceCapability.getPlatformCapabilities())) { logger.debug("{} supports provisioning requirements for {} service [{}]", getName(), provType, LoggingUtil.getLoggingName(sElem)); sElem.setProvisionablePlatformCapabilities(unsupportedReqs); return true; } double avail = getAvailableStorage(resourceCapability.getPlatformCapabilities()); StringBuilder sb = new StringBuilder(); sb.append(getName()).append(" "); if (avail>0) { /* For logging purposes compute the size in GB */ double GB = Math.pow(1024, 3); avail = avail/GB; sb.append("does not have adequate disk-space for ") .append("[") .append(LoggingUtil.getLoggingName(sElem)).append("] ") .append("Required=") .append(+requiredSize).append(", ") .append("Available=").append(avail).append(" GB"); } else { sb.append("does not report a StorageCapability. ") .append("Rio cannot allocate the ") .append("[") .append(LoggingUtil.getLoggingName(sElem)) .append("] ") .append("service with a software download size of ") .append(+requiredSize).append(". ") .append("This may be due to a known limitation ") .append("found when running the ").append(getName()) .append(" on a Windows machine, or if the ") .append("DiskSpace monitor has been disabled. ") .append("Check the Cybernode environment and ") .append("configuration."); } String failureReason = sb.toString(); provisionRequest.addFailureReason(failureReason); logger.warn(failureReason); return false; } } else { String failureReason = String.format("%s does not meet general or quantitative requirements for %s service [%s]", getName(), provType, LoggingUtil.getLoggingName(sElem)); logger.debug(failureReason); return false; } } /** * Determine if an Array of PlatformCapability components contains a * StorageCapability and if that StorageCapability has the requested disk * space size available * * @param requestedSize The size to verify * @param pCaps Array of PlatformCapability instances to use * @return Return true if the Array of PlatformCapability * components contains a StorageCapability and if that StorageCapability has * the requested disk space size available */ private boolean supportsStorageRequirement(long requestedSize, PlatformCapability[] pCaps) { boolean supports = false; for (PlatformCapability pCap : pCaps) { if (pCap instanceof StorageCapability) { StorageCapability storage = (StorageCapability) pCap; supports = storage.supports(requestedSize); break; } } return supports; } /** * Get the available storage from the StorageCapability * * @param pCaps Array of PlatformCapability instances to use * * @return The available storage from the StorageCapability. If a * StorageCapability cannot be found return -1 */ private double getAvailableStorage(PlatformCapability[] pCaps) { double available = -1; for (PlatformCapability pCap : pCaps) { if (pCap instanceof StorageCapability) { StorageCapability storage = (StorageCapability) pCap; Double dCap = (Double) storage.getValue(StorageCapability.CAPACITY); if (dCap != null) { available = dCap; } break; } } return available; } /** * This method determines whether or not the defined criteria meets general * requirements: * <ul> * <li>If there is a cluster of machines defined, the compute resource is * defined in the cluster of machines that have been defined * </ul> * <br> * * @param provisionRequest The ProvisionRequest * @return Return true if the provided ResourceCapability meets * general requirements */ boolean meetsGeneralRequirements(final ProvisionRequest provisionRequest) { /* * If we have a cluster defined, then see if the provided resource has * either an IP address or hostname thats in the list of IP addresses * and hostnames in our machine cluster list. If it isnt in the list, * then there is no sense in proceeding */ ServiceElement sElem = provisionRequest.getServiceElement(); String[] machineCluster = sElem.getCluster(); if (machineCluster != null && machineCluster.length > 0) { logger.debug("ServiceBean [{}] has a cluster requirement", LoggingUtil.getLoggingName(sElem)); boolean found = false; for (String aMachineCluster : machineCluster) { if (aMachineCluster.equals(resourceCapability.getAddress()) || aMachineCluster.equalsIgnoreCase(resourceCapability.getHostName())) found = true; } if (!found) { StringBuilder builder = new StringBuilder(); for (String m : machineCluster) { if (builder.length()>0) builder.append(", "); builder.append(m); } String failureReason = String.format("%s not found in cluster requirement [%s] for [%s]", getName(), builder.toString(), LoggingUtil.getLoggingName(sElem)); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return false; } } return true; } /** * This method verifies whether the ResourceCapability can support the * Qualitative Requirements specified by the ServiceBean * * @param request The ProvisionRequest object * @return A Collection of SystemRequirement objects which the * ResourceCapability does not support. If the Collection has zero entries, * then the provided ResourceCapability supports the Qualitative * Requirements specified by the ServiceBean */ Collection<SystemComponent> meetsQualitativeRequirements(final ProvisionRequest request) { ServiceElement sElem = request.getServiceElement(); ServiceLevelAgreements sla = sElem.getServiceLevelAgreements(); SystemComponent[] serviceRequirements = sla.getSystemRequirements().getSystemComponents(); List<SystemComponent> unsupportedRequirements = new ArrayList<>(); /* * If there are no PlatformCapability requirements we can return * successfully */ if (serviceRequirements.length == 0) return unsupportedRequirements; PlatformCapability[] platformCapabilities = resourceCapability.getPlatformCapabilities(); List<SystemComponent> operatingSystems = new ArrayList<>(); List<SystemComponent> architectures = new ArrayList<>(); List<SystemComponent> machineAddresses = new ArrayList<>(); List<SystemComponent> remaining = new ArrayList<>(); for (SystemComponent serviceRequirement : serviceRequirements) { if (isOperatingSystem(serviceRequirement)) { operatingSystems.add(serviceRequirement); } else if (isArchitecture(serviceRequirement)) { architectures.add(serviceRequirement); } else if (isMachineAddress(serviceRequirement)) { machineAddresses.add(serviceRequirement); } else { remaining.add(serviceRequirement); } } /* * Check if we have a match in one of the sought after architectures */ if (!architectures.isEmpty()) { ProcessorArchitecture architecture = getArchitecture(); Result result = check(architecture, architectures); if (!result.supported) { String failureReason = formatFailureReason(architectures, (String)architecture.getCapabilities().get(ProcessorArchitecture.ARCHITECTURE), "architecture", sElem, result.excluded.isEmpty(), ProcessorArchitecture.ARCHITECTURE); if (logger.isWarnEnabled()) { logger.warn(failureReason); } request.addFailureReason(failureReason); unsupportedRequirements.addAll(architectures); return unsupportedRequirements; } } /* * Check if we have a match in one of the sought after operating systems */ if (!operatingSystems.isEmpty()) { OperatingSystem operatingSystem = getOperatingSystem(); Result result = check(operatingSystem, operatingSystems); if (!result.supported) { String failureReason = formatFailureReason(operatingSystems, operatingSystem.getCapabilities().get(OperatingSystem.NAME).toString(), "operating system", sElem, result.excluded.isEmpty(), OperatingSystem.NAME); if (logger.isWarnEnabled()) { logger.warn(failureReason); } request.addFailureReason(failureReason); unsupportedRequirements.addAll(operatingSystems); return unsupportedRequirements; } } /* * Check if we have a match in one of the sought after machine addresses */ if (!machineAddresses.isEmpty()) { TCPConnectivity tcpConnectivity = getTCPConnectivity(); Result result = check(tcpConnectivity, machineAddresses); if (!result.supported) { String formattedComponents = formatSystemComponents(machineAddresses, TCPConnectivity.HOST_NAME, TCPConnectivity.HOST_ADDRESS); String failureReason; if (result.excluded.isEmpty()) { failureReason = String.format("The machine addresses being requested [%s] do not match the " + "target resource's machine name/ip [%s/%s] for [%s]", formattedComponents, tcpConnectivity.getCapabilities().get(TCPConnectivity.HOST_NAME), tcpConnectivity.getCapabilities().get(TCPConnectivity.HOST_ADDRESS), LoggingUtil.getLoggingName(sElem)); } else { failureReason = String.format("The target resource's machine name/ip [%s/%s] is on the exclusion list of [%s] for [%s]", tcpConnectivity.getCapabilities().get(TCPConnectivity.HOST_NAME), tcpConnectivity.getCapabilities().get(TCPConnectivity.HOST_ADDRESS), formattedComponents, LoggingUtil.getLoggingName(sElem)); } if (logger.isWarnEnabled()) { logger.warn(failureReason); } request.addFailureReason(failureReason); unsupportedRequirements.addAll(machineAddresses); return unsupportedRequirements; } } /* * Check remaining PlatformCapability objects for supportability */ for (SystemComponent serviceRequirement : remaining) { boolean supported = false; /* * Iterate through all resource PlatformCapability objects and see * if any of them supports the current PlatformCapability. If none * are found, then we don't have a match */ for (PlatformCapability platformCapability : platformCapabilities) { if (platformCapability.supports(serviceRequirement)) { if (serviceRequirement.exclude()) { continue; } supported = true; break; } } if (!supported) { unsupportedRequirements.add(serviceRequirement); } } return unsupportedRequirements; } private Result check(final PlatformCapability platformCapability, final List<SystemComponent> systemComponents) { Result result = new Result(); boolean supported = false; for (SystemComponent serviceRequirement : systemComponents) { if (serviceRequirement.exclude()) { if (platformCapability.supports(serviceRequirement)) { result.excluded.add(serviceRequirement); } else { supported = true; } break; } else { if (platformCapability.supports(serviceRequirement)) { supported = true; break; } } } result.supported = supported; return result; } private static class Result { boolean supported; List<SystemComponent> excluded = new ArrayList<>(); } private String formatSystemComponents(final List<SystemComponent> systemComponents, final String... keys) { StringBuilder builder = new StringBuilder(); for (String key : keys) { for (SystemComponent serviceRequirement : systemComponents) { if (builder.length()>0) builder.append(", "); String value = (String) serviceRequirement.getAttributes().get(key); if (value!=null) builder.append(value); } } return builder.toString(); } private String formatFailureReason(final List<SystemComponent> systemComponents, final String capability, final String name, final ServiceElement sElem, final boolean notExcluded, final String... keys) { String formattedComponents = formatSystemComponents(systemComponents, keys); String failureReason; if (notExcluded) { failureReason = String.format("The %ss being requested [%s] are not supported by the " + "target resource's %s [%s] for [%s]", name, formattedComponents, name, capability, LoggingUtil.getLoggingName(sElem)); } else { failureReason = String.format("The target resource's %s [%s] is on the exclusion list of [%s] for [%s]", name, capability, formattedComponents, LoggingUtil.getLoggingName(sElem)); } return failureReason; } private boolean isOperatingSystem(SystemComponent systemComponent) { String name = systemComponent.getName(); String className = systemComponent.getClassName(); if (className==null) { return name.equals(OperatingSystem.ID); } return systemComponent.getClassName().equals(OperatingSystem.class.getName()); } private boolean isArchitecture(SystemComponent systemComponent) { String name = systemComponent.getName(); String className = systemComponent.getClassName(); if (className==null) { return name.equals(ProcessorArchitecture.ID); } return systemComponent.getClassName().equals(ProcessorArchitecture.class.getName()); } private boolean isMachineAddress(SystemComponent systemComponent) { String name = systemComponent.getName(); String className = systemComponent.getClassName(); if (className==null) { return name.equals(TCPConnectivity.ID); } return systemComponent.getClassName().equals(TCPConnectivity.class.getName()); } /*boolean isHardwareRelated(SystemComponent systemComponent) { return isArchitecture(systemComponent) || isOperatingSystem(systemComponent) || isMachineAddress(systemComponent); }*/ private ProcessorArchitecture getArchitecture () { ProcessorArchitecture architecture = null; for (PlatformCapability platformCapability : resourceCapability.getPlatformCapabilities()) { if (platformCapability instanceof ProcessorArchitecture) { architecture = (ProcessorArchitecture) platformCapability; break; } } return architecture; } private OperatingSystem getOperatingSystem () { OperatingSystem operatingSystem = null; for (PlatformCapability platformCapability : resourceCapability.getPlatformCapabilities()) { if (platformCapability instanceof OperatingSystem) { operatingSystem = (OperatingSystem) platformCapability; break; } } return operatingSystem; } private TCPConnectivity getTCPConnectivity () { TCPConnectivity tcpConnectivity = null; for (PlatformCapability platformCapability : resourceCapability.getPlatformCapabilities()) { if (platformCapability instanceof TCPConnectivity) { tcpConnectivity = (TCPConnectivity) platformCapability; break; } } return tcpConnectivity; } /** * This method verifies whether the ResourceCapability can support the * Quantitative Requirements specified by the ServiceBean * * @param provisionRequest The ProvisionRequest * @return Return true if the provided ResourceCapability meets * Quantitative requirements */ boolean meetsQuantitativeRequirements(final ProvisionRequest provisionRequest) { ServiceElement sElem = provisionRequest.getServiceElement(); ServiceLevelAgreements sla = sElem.getServiceLevelAgreements(); boolean provisionable = true; String[] systemThresholdIDs = sla.getSystemRequirements().getSystemThresholdIDs(); if (systemThresholdIDs.length == 0) return (true); MeasuredResource[] measured = resourceCapability.getMeasuredResources(); /* * If the number of MeasuredCapabilities is less then what we are asking * for there is no reason to continue */ if (measured == null || measured.length < systemThresholdIDs.length) { StringBuilder message = new StringBuilder(); message.append(getName()).append(" "); if (measured==null) { message.append("has a [null] MeasuredCapability instance, ServiceBean ["); message.append(LoggingUtil.getLoggingName(sElem)).append("] "); message.append("has a requirement to test ").append(systemThresholdIDs.length); } else { message.append("only has [").append(measured.length).append("] MeasuredCapability instances, "); message.append("ServiceBean [").append(LoggingUtil.getLoggingName(sElem)).append("] "); message.append("has a requirement to test [").append(systemThresholdIDs.length).append("]"); } provisionRequest.addFailureReason(message.toString()); logger.debug(message.toString()); return false; } /* * Check each of the MeasuredResource objects */ for (String systemThresholdID : systemThresholdIDs) { boolean supported = false; ThresholdValues systemThreshold = sla.getSystemRequirements().getSystemThresholdValue(systemThresholdID); if (systemThresholdID.equals(SystemRequirements.SYSTEM)) { double systemUtilization = systemThreshold.getHighThreshold(); if (systemUtilization < resourceCapability.getUtilization()) { String failureReason = String.format("%s cannot meet system utilization requirement. Desired: %f, Actual: %f", getName(), systemUtilization, resourceCapability.getUtilization()); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); return (false); } else { supported = true; logger.debug("[System] utilization requirement met. Desired {}, Actual {}", systemUtilization, resourceCapability.getUtilization()); } } /* * Iterate through all resource MeasuredResource objects and see if * any of them supports the current MeasuredResource. If none are * found, then we don't have a match */ for (MeasuredResource mRes : measured) { if (mRes.getIdentifier().equals(systemThresholdID)) { if (mRes.evaluate(systemThreshold)) { supported = true; logger.debug("{} meets [{}] utilization requirement. Desired Low: {}, High: {}, Actual: {}", getName(), systemThresholdID, systemThreshold.getLowThreshold(), systemThreshold.getHighThreshold(), mRes.getValue()); break; } else { String failureReason = String.format("%s cannot meet [%s], utilization requirement. Desired Low: %f, High: %f, Actual: %f", getName(), systemThresholdID, systemThreshold.getLowThreshold(), systemThreshold.getHighThreshold(), mRes.getValue()); provisionRequest.addFailureReason(failureReason); logger.debug(failureReason); } } } if (!supported) { provisionable = false; break; } } return provisionable; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InstantiatorResource that = (InstantiatorResource) o; return instantiatorUuid.equals(that.instantiatorUuid); } @Override public int hashCode() { return instantiatorUuid.hashCode(); } }
package net.maizegenetics.dna.snp; import net.maizegenetics.dna.snp.depth.AlleleDepth; import net.maizegenetics.dna.snp.genotypecall.GenotypeCallTable; import net.maizegenetics.dna.map.Chromosome; import net.maizegenetics.dna.map.PositionList; import net.maizegenetics.dna.snp.bit.BitStorage; import net.maizegenetics.taxa.TaxaList; import net.maizegenetics.util.BitSet; /** * A representation of the SNP and indel variation for a set of taxa and genomic * positions. * <p> * </p> * GenotypeTable always consist of a TaxaList, PositionList, and * GenotypeCallTable. Additionally, as needed they also can represent allele in * a bit form, with sequencing depth, or other scores (e.g. quality scores). * <p> * </p> * Use GenotypeTableBuilder to create GenotypeTable. * * @author Terry Casstevens * @author Ed Buckler */ public interface GenotypeTable { /** * This encoding is used to lump together allele values with frequencies too * low to be retained as one of the maximum number of alleles. */ public static byte RARE_ALLELE = 0xE; public static byte RARE_DIPLOID_ALLELE = (byte) 0xEE; public static String RARE_ALLELE_STR = "Z"; public static byte UNKNOWN_ALLELE = 0xF; public static byte UNKNOWN_DIPLOID_ALLELE = (byte) 0xFF; public static String UNKNOWN_ALLELE_STR = "N"; public static String UNKNOWN_DIPLOID_ALLELE_STR = "N:N"; public static char UNKNOWN_ALLELE_CHAR = 'N'; public static enum SITE_SCORE_TYPE { None, MixedScoreTypes, QualityScore, ImputedProbablity, Dosage }; /** * This defines the possible allele scope types. */ public static enum ALLELE_SORT_TYPE { /** * This is the default where alleles are sorted by frequency. Same as * alleles(). */ Frequency, /** * This sorts alleles based on there depth value. */ Depth, /** * This uses the allele frequency of a base/global Genotype table * determine sort order of alleles. That Genotype table is usually a * superset. */ Global_Frequency, /** * This sorts alleles based on the reference sequence. */ Reference }; /** * This defines the possible alleles. */ public static enum WHICH_ALLELE { /** * Major Allele - Most frequent allele. */ Major(0), /** * Minor Allele - Second most frequent allele. */ Minor(1), /** * Global Major Allele */ Global_Major(2), /** * Global Minor Allele */ Global_Minor(3), /** * Reference Allele */ Reference(4), /** * Alternate to Reference Allele */ Alternate(5), /** * High Coverage Allele */ HighCoverage(6), /** * Low Coverage Allele */ LowCoverage(7), /** * Remaining Minor Alleles */ Minor2(8), Minor3(9), Minor4(10), Minor5(11); private final int myIndex; /** * Count of the number of allele types */ public final static int COUNT = WHICH_ALLELE.values().length; WHICH_ALLELE(int index) { myIndex = index; } /** * Sequential index that can be use for primitive arrays */ public int index() { return myIndex; } private static WHICH_ALLELE[] FREQ_ALLELES = new WHICH_ALLELE[]{Major, Minor, Minor2, Minor3, Minor4, Minor5}; public static WHICH_ALLELE[] frequencyAlleles() { return FREQ_ALLELES; } }; /** * Returns the immutable Genotype matrix. Taxa and Positions are not part of * the matrix. This method is used for copying Genotype tables, when either * the Taxa or Positions have changed. * * @return genotype matrix */ public GenotypeCallTable genotypeMatrix(); /** * Returns diploid value (genotype) for a given taxon and site. * * @param taxon taxon * @param site site * * @return high four bits generally encode the more frequent allele and the * lower four bits encode the less frequent allele. */ public byte genotype(int taxon, int site); /** * Returns diploid values for given taxon and site. Same values as * genotype(), except two values are already separated into two bytes. * * @param taxon taxon * @param site site * * @return first byte (index 0) holds first allele value in right-most four * bits. second byte (index 1) holds second allele value in right-most four * bits. */ public byte[] genotypeArray(int taxon, int site); /** * Returns diploid values for given taxon, chromosome, and physical * position. The chromosome and physical position should map to an unique * site. * * @param taxon taxon * @param chromosome chromosome * @param physicalPosition physical position * * @return first four bits are the first allele value and the second four * bits are the second allele value. */ public byte genotype(int taxon, Chromosome chromosome, int physicalPosition); /** * Returns sequence of diploid allele values for given taxon in specified * range (end site excluded). Each value in array is what would be returned * by genotype(). * * @param taxon taxon * @param startSite start site * @param endSite end site * * @return sequence of diploid allele values. */ public byte[] genotypeRange(int taxon, int startSite, int endSite); /** * Returns sequence of diploid allele values for all sites for given taxon. * Each value in array is what would be returned by genotype(). * * @param taxon taxon * * @return sequence of diploid allele values. */ public byte[] genotypeAllSites(int taxon); /** * Returns sequence of diploid allele values for all taxa for given site. * Each value in array is what would be returned by genotype(). * * @param site site * * @return sequence of diploid allele values. */ public byte[] genotypeAllTaxa(int site); /** * Returns sequence of true/false values indicating whether taxon at each * site matches a specific allele (based on frequency). Allele number of * value 0 would be the major allele. Allele number of value 1 would be the * minor allele. Allele number of value 2 would be the third most frequent * allele value and so on. * * @param taxon taxon * @param allele allele * * @return sequence of true/false values. */ public BitSet allelePresenceForAllSites(int taxon, WHICH_ALLELE allele); /** * Returns sequence of true/false values indicating whether taxon at sites * (in given blocks, 64 sites per block including start block but excluding * end block) matches a specific allele. * * @param taxon taxon * @param allele allele * @param startBlock starting block * @param endBlock end block * * @return sequence of true/false values. */ public long[] allelePresenceForSitesBlock(int taxon, WHICH_ALLELE allele, int startBlock, int endBlock); /** * Returns sequence of true/false values indicating whether taxon at each * site for given parent matches a specific allele. * * @param taxon taxon * @param firstParent true for first parent (false for second parent) * @param allele allele * * @return sequence of true/false values. */ public BitSet haplotypeAllelePresenceForAllSites(int taxon, boolean firstParent, WHICH_ALLELE allele); /** * Returns sequence of true/false values indicating whether site at each * taxon for given parent matches a specific allele (based on frequency). * Allele number of value 0 would be the major allele. Allele number of * value 1 would be the minor allele. Allele number of value 2 would be the * third most frequent allele value and so on. * * @param site site * @param firstParent true for first parent (false for second parent) * @param allele allele * * @return sequence of true/false values. */ public BitSet haplotypeAllelePresenceForAllTaxa(int site, boolean firstParent, WHICH_ALLELE allele); /** * Returns sequence of true/false values indicating whether taxon at sites * (in given blocks, 64 sites per block including start block but excluding * end block) for given parent matches a specific allele (based on * frequency). Allele number of value 0 would be the major allele. Allele * number of value 1 would be the minor allele. Allele number of value 2 * would be the third most frequent allele value and so on. * * @param taxon taxon * @param firstParent true for first parent (false for second parent) * @param allele allele * @param startBlock starting block * @param endBlock end block * * @return sequence of true/false values. */ public long[] haplotypeAllelePresenceForSitesBlock(int taxon, boolean firstParent, WHICH_ALLELE allele, int startBlock, int endBlock); /** * Returns string representation of diploid values returned by genotype() * for given taxon and site. The two allele values will be separated by a * colon (:) delimiter. * * @param taxon taxon * @param site site * * @return string representation of diploid values. */ public String genotypeAsString(int taxon, int site); /** * Returns string representation of diploid alleles for given taxon in * specified range (end site excluded). Each value in string is what would * be returned by genotypeAsString(). * * @param taxon taxon * @param startSite start site * @param endSite end site * * @return string representation of alleles in range */ public String genotypeAsStringRange(int taxon, int startSite, int endSite); /** * Returns string representation of diploid alleles for given taxon for all * sites. Each value in string is what would be returned by * genotypeAsString(). * * @param taxon taxon * * @return string representation of alleles */ public String genotypeAsStringRow(int taxon); /** * Returns string representation of diploid values returned by * genotypeArray() for given taxon and site. Same two allele values as * genotypeAsString(), except already separated into two Strings. * * @param taxon taxon * @param site site * * @return string representations of diploid values. */ public String[] genotypeAsStringArray(int taxon, int site); /** * Return (haploid) reference allele values at given site. * * @param site site * * @return first four bits are the first allele value and the second four * bits are the second allele value. */ public byte referenceAllele(int site); /** * Returns (haploid) reference alleles in specified range. * End site not included. * * @param startSite start site * @param endSite end site * * @return reference allele values. */ public byte[] referenceAlleles(int startSite, int endSite); /** * Returns (haploid) reference alleles for all sites. * * @return reference allele values. */ public byte[] referenceAlleleForAllSites(); /** * Return whether this genotype table has defined reference sequence. * * @return true if this genotype table has reference sequence. */ public boolean hasReference(); /** * Returns whether allele values at given taxon and site are heterozygous. * If two values returned by genotype() are different, this will return * false. * * @param taxon taxon * @param site site * * @return whether heterozygous */ public boolean isHeterozygous(int taxon, int site); /** * Returns number of heterozygous taxa at given site. * * @param site site * * @return number of heterozygous taxa */ public int heterozygousCount(int site); /** * Get SNP ID for specified site. * * @param site site * @return site name */ public String siteName(int site); /** * Returns total number of sites of this genotype table. * * @return number of sites */ public int numberOfSites(); /** * Return number of sites for given chromosome. * * @param chromosome chromosome * * @return number of sites */ public int chromosomeSiteCount(Chromosome chromosome); /** * Get the first (inclusive) and last (inclusive) site of the specified * chromosome in this genotype table. * * @param chromosome chromosome * * @return first and last site */ public int[] firstLastSiteOfChromosome(Chromosome chromosome); /** * Returns number of taxa * * @return number of taxa */ public int numberOfTaxa(); /** * Return the position list for the genotype table. * * @return PositionList for all sites. */ public PositionList positions(); /** * Returns the physical position at given site. * * @param site site * * @return physical position */ public int chromosomalPosition(int site); /** * Return site of given physical position in chromosome. If the physical * position doesn't exist, (-(insertion point) - 1) is returned. If * chromosome is not found, an exception is thrown. * * @param physicalPosition physical position * @param chromosome chromosome. if null, the first chromosome is used. * * @return index */ public int siteOfPhysicalPosition(int physicalPosition, Chromosome chromosome); /** * Return site of given physical position / SNP ID in chromosome. If the * physical position doesn't exist, (-(insertion point) - 1) is returned. If * chromosome is not found, an exception is thrown. This is to support * multiple sites with the same physical position but different SNP IDs. * * @param physicalPosition physical position * @param chromosome chromosome. if null, the first chromosome is used. * @param snpName SNP ID * * @return index */ public int siteOfPhysicalPosition(int physicalPosition, Chromosome chromosome, String snpName); /** * Returns all physical positions. * * @return physical positions. */ public int[] physicalPositions(); /** * Return Chromosome Name for given site. * * @param site site * * @return Chromosome Name */ public String chromosomeName(int site); /** * Return Chromosome for given site. * * @param site site * * @return Chromosome */ public Chromosome chromosome(int site); /** * Return Chromosome with matching name. First to match will be returned. * * @param name name * * @return Chromosome */ public Chromosome chromosome(String name); /** * Return all chromosomes. * * @return chromosomes */ public Chromosome[] chromosomes(); /** * Return number of chromosomes. * * @return number of chromosomes */ public int numChromosomes(); /** * Returns starting site for each chromosome. * * @return starting site for each chromosome. */ public int[] chromosomesOffsets(); /** * Returns the site score of the given taxon and site. * * @param taxon taxon index * @param site site * * @return site score. */ public float siteScore(int taxon, int site); /** * Returns the site scores. * * @return site scores. */ public float[][] siteScores(); /** * Returns true if this genotype table has sequencing depth. * * @return true if this genotype table has sequencing depth. */ public boolean hasDepth(); /** * Returns true if this genotype table has site scores. * * @return true if this genotype table has site scores. */ public boolean hasSiteScores(); /** * Return what type of site scores this genotype table has. * * @return site score type. */ public GenotypeTable.SITE_SCORE_TYPE siteScoreType(); /** * Return size of indel at given site. * * @param site site * * @return indel size */ public int indelSize(int site); /** * Returns whether give site is an indel. * * @param site site * * @return true if indel */ public boolean isIndel(int site); /** * Returns whether all sites are polymorphic. * * @return true if all sites are polymorphic. */ public boolean isAllPolymorphic(); /** * Return whether given site is polymorphic. * * @param site site * * @return true if given site is polymorphic. */ public boolean isPolymorphic(int site); /** * Return most common allele at given site. Gap is included as state. * Heterozygous count one for each allele value. Homozygous counts two for * the allele value. * * @param site site * * @return most common allele */ public byte majorAllele(int site); /** * Return most common allele at given site. Gap is included as state. * Heterozygous count one for each allele value. Homozygous counts two for * the allele value. * * @param site site * * @return most common allele as String */ public String majorAlleleAsString(int site); /** * Return most common minor allele at given site. Gap is included as state. * Heterozygous count one for each allele value. Homozygous counts two for * the allele value. * * @param site site * * @return most common minor allele */ public byte minorAllele(int site); /** * Return most common minor allele at given site. Gap is included as state. * Heterozygous count one for each allele value. Homozygous counts two for * the allele value. * * @param site site * * @return most common minor allele as String */ public String minorAlleleAsString(int site); /** * Return all minor alleles at given site. Gap is included as state. * Heterozygous count one for each allele value. Homozygous counts two for * the allele value. * * @param site site * * @return all minor alleles */ public byte[] minorAlleles(int site); /** * Returns all alleles at given site in order of frequency. Gap is included * as state. Heterozygous count one for each allele value. Homozygous counts * two for the allele value. * * @param site site * * @return all alleles */ public byte[] alleles(int site); /** * Return frequency for most common minor allele at given site. Gap is * included as state. Heterozygous count one for each allele value. * Homozygous counts two for the allele value. * * @param site site * * @return frequency */ public double minorAlleleFrequency(int site); /** * Return frequency for major allele at given site. Gap is included as * state. Heterozygous count one for each allele value. Homozygous counts * two for the allele value. * * @param site site * * @return frequency */ public double majorAlleleFrequency(int site); /** * Return taxa list of this genotype table. * * @return taxa list. */ public TaxaList taxa(); /** * Return taxa name at given index. * * @param index * * @return taxa name */ public String taxaName(int index); /** * Gets the Genome Assembly. * * @return the genome assembly. */ public String genomeVersion(); /** * Return whether is positive strand at given site. * * @param site site * * @return whether is positive strand. */ public boolean isPositiveStrand(int site); /** * Returns individual genotype tables within this genotype table. * * @return list of genotype tables. */ public GenotypeTable[] compositeAlignments(); /** * Return sorted list of alleles from highest frequency to lowest at given * site in genotype table. Resulting double dimension array holds alleles * (bytes) in result[0]. And the counts are in result[1]. Counts haploid * values twice and diploid values once. Higher ploids are not supported. * * @param site site * * @return sorted list of alleles and counts */ public int[][] allelesSortedByFrequency(int site); /** * Return sorted list of diploid vales from highest frequency to lowest at * given site in genotype table. Resulting double dimension array holds * diploids (Strings) in result[0]. And the counts are in result[1] * (Integers). * * @param site site * * @return sorted list of diploids and counts */ public Object[][] genosSortedByFrequency(int site); /** * Returns whether this genotype table is phased. * * @return true if phased. */ public boolean isPhased(); /** * Returns true if this genotype table retains rare alleles. If false, rare * alleles are recorded as unknown. * * @return whether rare alleles are retained. */ public boolean retainsRareAlleles(); /** * Returns allele values as strings for all sites. The first dimension of * the array indexes the sites. The second dimension indexes the allele * values for given site. The indices for the allele values are used as the * codes to store data. These codes (indices) are returned by the genotype() * methods. If only one array of allele values is returned, that is the * encoding for all sites. * * @return allele values for all sites. */ public String[][] alleleDefinitions(); /** * Same as alleleDefinitions() for only one site. * * @param site site * * @return allele values for given site. */ public String[] alleleDefinitions(int site); /** * Returns String representation of allele value at site. * * @param site site * @param value allele value * * @return String representation */ public String genotypeAsString(int site, byte value); /** * Returns String representation of diploid allele value at site. * * @param site site * @param value diploid allele value * * @return String representation */ public String diploidAsString(int site, byte value); /** * Return max number of alleles defined for any given site. * * @return max number of alleles. */ public int maxNumAlleles(); /** * Returns total number of non-missing allele values for given site. This * can be twice the number of taxa, as diploid values are supported. * * @param site site * @return number of non-missing allele values. */ public int totalGametesNonMissingForSite(int site); /** * Returns total number of non-missing taxa for given site. Taxa are * considered missing only if both allele values are Unknown (N). * * @param site site * * @return number of non-missing taxa.. */ public int totalNonMissingForSite(int site); /** * Returns the minor allele count for given site. * * @param site site * @return minor allele count */ public int minorAlleleCount(int site); /** * Returns the major allele count for given site. * * @param site site * @return major allele count */ public int majorAlleleCount(int site); /** * Returns counts of all diploid combinations from highest frequency to * lowest for whole genotype table. Resulting double dimension array holds * diploids (Strings) in result[0]. And the counts are in result[1] (Longs). * * @return diploid counts. */ public Object[][] genoCounts(); /** * Returns counts of all major/minor allele combinations from highest * frequency to lowest for whole genotype table. Resulting double dimension * array holds major/minor allele (Strings) in result[0]. And the counts are * in result[1] (Longs). * * @return diploid counts. */ public Object[][] majorMinorCounts(); /** * Returns total number of non-missing allele values for given taxon. This * can be twice the number of sites, as diploid values are supported. * * @param taxon taxon * * @return number of non-missing allele values. */ public int totalGametesNonMissingForTaxon(int taxon); /** * Returns number of heterozygous sites at given taxon. * * @param taxon taxon * * @return number of heterozygous sites */ public int heterozygousCountForTaxon(int taxon); /** * Returns total number of non-missing sites for given taxon. Sites are * considered missing only if both allele values are Unknown (N). * * @param taxon taxon * * @return number of non-missing sites. */ public int totalNonMissingForTaxon(int taxon); /** * Returns allele depth object (null if not present) * * @return allele depth associated with genotypeTable */ public AlleleDepth depth(); /** * Returns depth count for each diploid allele at the given taxon and site. * * @param taxon taxon * @param site site * * @return two counts */ public int[] depthForAlleles(int taxon, int site); /** * Returns all alleles at given site in order defined by scope. * * @param scope scope * @param site site * * @return alleles */ public byte[] allelesBySortType(GenotypeTable.ALLELE_SORT_TYPE scope, int site); /** * Returns sequence of true/false values indicating whether site at each * taxon matches a specific allele. * * @param site site * @param allele allele * * @return sequence of true/false values. */ public BitSet allelePresenceForAllTaxa(int site, WHICH_ALLELE allele); /** * Returns BitStorage for this Genotype * * @param allele allele * * @return BitStorage */ public BitStorage bitStorage(GenotypeTable.WHICH_ALLELE allele); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.processor.internals; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; import org.apache.kafka.streams.errors.LogAndFailExceptionHandler; import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.StateStore; import org.apache.kafka.test.GlobalStateManagerStub; import org.apache.kafka.test.MockProcessorNode; import org.apache.kafka.test.MockSourceNode; import org.apache.kafka.test.NoOpProcessorContext; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Set; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class GlobalStateTaskTest { private final LogContext logContext = new LogContext(); private final String topic1 = "t1"; private final String topic2 = "t2"; private final TopicPartition t1 = new TopicPartition(topic1, 1); private final TopicPartition t2 = new TopicPartition(topic2, 1); private final MockSourceNode sourceOne = new MockSourceNode<>( new String[]{topic1}, new StringDeserializer(), new StringDeserializer()); private final MockSourceNode sourceTwo = new MockSourceNode<>( new String[]{topic2}, new IntegerDeserializer(), new IntegerDeserializer()); private final MockProcessorNode processorOne = new MockProcessorNode<>(); private final MockProcessorNode processorTwo = new MockProcessorNode<>(); private final Map<TopicPartition, Long> offsets = new HashMap<>(); private final NoOpProcessorContext context = new NoOpProcessorContext(); private ProcessorTopology topology; private GlobalStateManagerStub stateMgr; private GlobalStateUpdateTask globalStateTask; @Before public void before() { final Set<String> storeNames = Utils.mkSet("t1-store", "t2-store"); final Map<String, SourceNode> sourceByTopics = new HashMap<>(); sourceByTopics.put(topic1, sourceOne); sourceByTopics.put(topic2, sourceTwo); final Map<String, String> storeToTopic = new HashMap<>(); storeToTopic.put("t1-store", topic1); storeToTopic.put("t2-store", topic2); topology = ProcessorTopology.with( asList(sourceOne, sourceTwo, processorOne, processorTwo), sourceByTopics, Collections.<StateStore>emptyList(), storeToTopic); offsets.put(t1, 50L); offsets.put(t2, 100L); stateMgr = new GlobalStateManagerStub(storeNames, offsets); globalStateTask = new GlobalStateUpdateTask(topology, context, stateMgr, new LogAndFailExceptionHandler(), logContext); } @Test public void shouldInitializeStateManager() { final Map<TopicPartition, Long> startingOffsets = globalStateTask.initialize(); assertTrue(stateMgr.initialized); assertEquals(offsets, startingOffsets); } @Test public void shouldInitializeContext() { globalStateTask.initialize(); assertTrue(context.initialized); } @Test public void shouldInitializeProcessorTopology() { globalStateTask.initialize(); assertTrue(sourceOne.initialized); assertTrue(sourceTwo.initialized); assertTrue(processorOne.initialized); assertTrue(processorTwo.initialized); } @Test public void shouldProcessRecordsForTopic() { globalStateTask.initialize(); globalStateTask.update(new ConsumerRecord<>(topic1, 1, 1, "foo".getBytes(), "bar".getBytes())); assertEquals(1, sourceOne.numReceived); assertEquals(0, sourceTwo.numReceived); } @Test public void shouldProcessRecordsForOtherTopic() { final byte[] integerBytes = new IntegerSerializer().serialize("foo", 1); globalStateTask.initialize(); globalStateTask.update(new ConsumerRecord<>(topic2, 1, 1, integerBytes, integerBytes)); assertEquals(1, sourceTwo.numReceived); assertEquals(0, sourceOne.numReceived); } private void maybeDeserialize(final GlobalStateUpdateTask globalStateTask, final byte[] key, final byte[] recordValue, final boolean failExpected) { final ConsumerRecord<byte[], byte[]> record = new ConsumerRecord<>( topic2, 1, 1, 0L, TimestampType.CREATE_TIME, 0L, 0, 0, key, recordValue ); globalStateTask.initialize(); try { globalStateTask.update(record); if (failExpected) { fail("Should have failed to deserialize."); } } catch (final StreamsException e) { if (!failExpected) { fail("Shouldn't have failed to deserialize."); } } } @Test public void shouldThrowStreamsExceptionWhenKeyDeserializationFails() { final byte[] key = new LongSerializer().serialize(topic2, 1L); final byte[] recordValue = new IntegerSerializer().serialize(topic2, 10); maybeDeserialize(globalStateTask, key, recordValue, true); } @Test public void shouldThrowStreamsExceptionWhenValueDeserializationFails() { final byte[] key = new IntegerSerializer().serialize(topic2, 1); final byte[] recordValue = new LongSerializer().serialize(topic2, 10L); maybeDeserialize(globalStateTask, key, recordValue, true); } @Test public void shouldNotThrowStreamsExceptionWhenKeyDeserializationFailsWithSkipHandler() { final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask( topology, context, stateMgr, new LogAndContinueExceptionHandler(), logContext ); final byte[] key = new LongSerializer().serialize(topic2, 1L); final byte[] recordValue = new IntegerSerializer().serialize(topic2, 10); maybeDeserialize(globalStateTask2, key, recordValue, false); } @Test public void shouldNotThrowStreamsExceptionWhenValueDeserializationFails() { final GlobalStateUpdateTask globalStateTask2 = new GlobalStateUpdateTask( topology, context, stateMgr, new LogAndContinueExceptionHandler(), logContext ); final byte[] key = new IntegerSerializer().serialize(topic2, 1); final byte[] recordValue = new LongSerializer().serialize(topic2, 10L); maybeDeserialize(globalStateTask2, key, recordValue, false); } @Test public void shouldFlushStateManagerWithOffsets() throws IOException { final Map<TopicPartition, Long> expectedOffsets = new HashMap<>(); expectedOffsets.put(t1, 52L); expectedOffsets.put(t2, 100L); globalStateTask.initialize(); globalStateTask.update(new ConsumerRecord<>(topic1, 1, 51, "foo".getBytes(), "foo".getBytes())); globalStateTask.flushState(); assertEquals(expectedOffsets, stateMgr.checkpointed()); } @Test public void shouldCheckpointOffsetsWhenStateIsFlushed() { final Map<TopicPartition, Long> expectedOffsets = new HashMap<>(); expectedOffsets.put(t1, 102L); expectedOffsets.put(t2, 100L); globalStateTask.initialize(); globalStateTask.update(new ConsumerRecord<>(topic1, 1, 101, "foo".getBytes(), "foo".getBytes())); globalStateTask.flushState(); assertThat(stateMgr.checkpointed(), equalTo(expectedOffsets)); } }
package com.elmakers.mine.bukkit.utility.platform.base; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.logging.Level; import org.bukkit.Bukkit; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.entity.Player; import com.elmakers.mine.bukkit.utility.Base64Coder; import com.elmakers.mine.bukkit.utility.CompatibilityConstants; import com.elmakers.mine.bukkit.utility.ProfileCallback; import com.elmakers.mine.bukkit.utility.ProfileResponse; import com.elmakers.mine.bukkit.utility.UUIDCallback; import com.elmakers.mine.bukkit.utility.platform.Platform; import com.elmakers.mine.bukkit.utility.platform.SkinUtils; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public abstract class SkinUtilsBase implements SkinUtils { protected final Platform platform; protected final Map<UUID, ProfileResponse> responseCache = new HashMap<>(); protected final Map<String, UUID> uuidCache = new HashMap<>(); protected final Map<String, Object> loadingUUIDs = new HashMap<>(); protected final Map<UUID, Object> loadingProfiles = new HashMap<>(); protected Gson gson; protected long holdoff = 0; protected SkinUtilsBase(final Platform platform) { this.platform = platform; } @Override public Gson getGson() { if (gson == null) { gson = new Gson(); } return gson; } @Override public String getTextureURL(String texturesJson) { String url = null; JsonElement element = new JsonParser().parse(texturesJson); if (element != null && element.isJsonObject()) { JsonObject object = element.getAsJsonObject(); JsonObject texturesObject = object.getAsJsonObject("textures"); if (texturesObject != null && texturesObject.has("SKIN")) { JsonObject skin = texturesObject.getAsJsonObject("SKIN"); if (skin != null && skin.has("url")) { url = skin.get("url").getAsString(); } } } return url; } @Override public String getOnlineSkinURL(Player player) { Object profile = getProfile(player); return profile == null ? null : getProfileURL(profile); } @Override public String getOnlineSkinURL(String playerName) { if (playerName.startsWith("http")) return playerName; Player player = platform.getDeprecatedUtils().getPlayerExact(playerName); String url = null; if (player != null) { url = getOnlineSkinURL(player); } return url; } private String fetchURL(String urlString) throws IOException { StringBuffer response = new StringBuffer(); URL url = new URL(urlString); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setConnectTimeout(30000); conn.setReadTimeout(30000); conn.setInstanceFollowRedirects(true); try (InputStream in = conn.getInputStream()) { BufferedReader reader = new BufferedReader( new InputStreamReader(in, StandardCharsets.UTF_8)); String inputLine = ""; while ((inputLine = reader.readLine()) != null) { response.append(inputLine); } } return response.toString(); } private void engageHoldoff() { holdoff = 10 * 60000; } private void synchronizeCallbackUUID(final UUIDCallback callback, final UUID uuid) { Bukkit.getScheduler().runTask(platform.getPlugin(), new Runnable() { @Override public void run() { callback.result(uuid); } }); } private void synchronizeCallbackProfile(final ProfileCallback callback, final ProfileResponse response) { Bukkit.getScheduler().runTask(platform.getPlugin(), new Runnable() { @Override public void run() { callback.result(response); } }); } @Override public void fetchUUID(final String playerName, final UUIDCallback callback) { final Player onlinePlayer = platform.getDeprecatedUtils().getPlayerExact(playerName); if (onlinePlayer != null) { final UUID uuid = onlinePlayer.getUniqueId(); boolean contains; synchronized (uuidCache) { contains = uuidCache.containsKey(playerName); if (!contains) { uuidCache.put(playerName, onlinePlayer.getUniqueId()); } } if (!contains) { Bukkit.getScheduler().runTaskAsynchronously(platform.getPlugin(), new Runnable() { @Override public void run() { File cacheFolder = new File(platform.getPlugin().getDataFolder(), "data/profiles"); if (!cacheFolder.exists()) { cacheFolder.mkdirs(); } try { File playerCache = new File(cacheFolder, playerName + ".yml"); YamlConfiguration config = new YamlConfiguration(); config.set("uuid", uuid.toString()); config.save(playerCache); } catch (IOException ex) { platform.getLogger().log(Level.WARNING, "Error saving to player UUID cache", ex); } } }); } callback.result(onlinePlayer.getUniqueId()); return; } UUID cached; synchronized (uuidCache) { cached = uuidCache.get(playerName); } if (cached != null) { callback.result(cached); return; } Bukkit.getScheduler().runTaskLaterAsynchronously(platform.getPlugin(), new Runnable() { @Override public void run() { Object lock; synchronized (loadingUUIDs) { lock = loadingUUIDs.get(playerName); if (lock == null) { lock = new Object(); loadingUUIDs.put(playerName, lock); } } synchronized (lock) { UUID cached; synchronized (uuidCache) { cached = uuidCache.get(playerName); } if (cached != null) { callback.result(cached); return; } File cacheFolder = new File(platform.getPlugin().getDataFolder(), "data/profiles"); if (!cacheFolder.exists()) { cacheFolder.mkdirs(); } UUID uuid; final File playerCache = new File(cacheFolder, playerName + ".yml"); try { if (playerCache.exists()) { YamlConfiguration config = YamlConfiguration.loadConfiguration(playerCache); uuid = UUID.fromString(config.getString("uuid")); } else { String uuidJSON = fetchURL("https://api.mojang.com/users/profiles/minecraft/" + playerName); if (uuidJSON.isEmpty()) { if (CompatibilityConstants.DEBUG) platform.getLogger().warning("Got empty UUID JSON for " + playerName); synchronizeCallbackUUID(callback, null); return; } String uuidString = null; JsonElement element = new JsonParser().parse(uuidJSON); if (element != null && element.isJsonObject()) { uuidString = element.getAsJsonObject().get("id").getAsString(); } if (uuidString == null) { engageHoldoff(); if (CompatibilityConstants.DEBUG) platform.getLogger().warning("Failed to parse UUID JSON for " + playerName + ", will not retry for 10 minutes"); synchronizeCallbackUUID(callback, null); return; } if (CompatibilityConstants.DEBUG) platform.getLogger().info("Got UUID: " + uuidString + " for " + playerName); uuid = UUID.fromString(addDashes(uuidString)); YamlConfiguration config = new YamlConfiguration(); config.set("uuid", uuid.toString()); config.save(playerCache); } synchronized (uuidCache) { uuidCache.put(playerName, uuid); } } catch (Exception ex) { if (CompatibilityConstants.DEBUG) { platform.getLogger().log(Level.WARNING, "Failed to fetch UUID for: " + playerName + ", will not retry for 10 minutes", ex); } else { platform.getLogger().log(Level.WARNING, "Failed to fetch UUID for: " + playerName + ", will not retry for 10 minutes"); } engageHoldoff(); uuid = null; } synchronizeCallbackUUID(callback, uuid); } } }, holdoff / 50); } private String addDashes(String uuidString) { StringBuilder builder = new StringBuilder(uuidString); for (int i = 8, j = 0; i <= 20; i += 4, j++) builder.insert(i + j, '-'); return builder.toString(); } @Override public void fetchProfile(final String playerName, final ProfileCallback callback) { fetchUUID(playerName, new UUIDCallback() { @Override public void result(UUID uuid) { if (uuid != null) { fetchProfile(uuid, callback); } else { callback.result(null); } } }); } @Override public void fetchProfile(final UUID uuid, final ProfileCallback callback) { final Player onlinePlayer = Bukkit.getPlayer(uuid); if (onlinePlayer != null) { boolean contains; final ProfileResponse response = new ProfileResponse(this, platform.getLogger(), onlinePlayer); synchronized (responseCache) { contains = responseCache.containsKey(uuid); if (!contains) { responseCache.put(uuid, response); } } if (!contains) { Bukkit.getScheduler().runTaskAsynchronously(platform.getPlugin(), new Runnable() { @Override public void run() { File cacheFolder = new File(platform.getPlugin().getDataFolder(), "data/profiles"); if (!cacheFolder.exists()) { cacheFolder.mkdirs(); } try { File playerCache = new File(cacheFolder, uuid + ".yml"); YamlConfiguration config = new YamlConfiguration(); response.save(config); config.save(playerCache); } catch (IOException ex) { platform.getLogger().log(Level.WARNING, "Error saving to player profile cache", ex); } } }); } callback.result(response); return; } ProfileResponse cached; synchronized (responseCache) { cached = responseCache.get(uuid); } if (cached != null) { callback.result(cached); return; } final com.elmakers.mine.bukkit.utility.platform.SkinUtils skinUtils = this; Bukkit.getScheduler().runTaskLaterAsynchronously(platform.getPlugin(), new Runnable() { @Override public void run() { Object lock; synchronized (loadingUUIDs) { lock = loadingProfiles.get(uuid); if (lock == null) { lock = new Object(); loadingProfiles.put(uuid, lock); } } synchronized (lock) { ProfileResponse cached; synchronized (responseCache) { cached = responseCache.get(uuid); } if (cached != null) { callback.result(cached); return; } File cacheFolder = new File(platform.getPlugin().getDataFolder(), "data/profiles"); if (!cacheFolder.exists()) { cacheFolder.mkdirs(); } final File playerCache = new File(cacheFolder, uuid + ".yml"); if (playerCache.exists()) { YamlConfiguration config = YamlConfiguration.loadConfiguration(playerCache); ProfileResponse fromCache = new ProfileResponse(skinUtils, config); synchronized (responseCache) { responseCache.put(uuid, fromCache); } synchronizeCallbackProfile(callback, fromCache); return; } if (CompatibilityConstants.DEBUG) { platform.getLogger().info("Fetching profile for " + uuid); } try { String profileJSON = fetchURL("https://sessionserver.mojang.com/session/minecraft/profile/" + uuid.toString().replace("-", "")); if (profileJSON.isEmpty()) { synchronizeCallbackProfile(callback, null); engageHoldoff(); if (CompatibilityConstants.DEBUG) platform.getLogger().warning("Failed to fetch profile JSON for " + uuid + ", will not retry for 10 minutes"); return; } if (CompatibilityConstants.DEBUG) platform.getLogger().info("Got profile: " + profileJSON); JsonElement element = new JsonParser().parse(profileJSON); if (element == null || !element.isJsonObject()) { synchronizeCallbackProfile(callback, null); engageHoldoff(); if (CompatibilityConstants.DEBUG) platform.getLogger().warning("Failed to parse profile JSON for " + uuid + ", will not retry for 10 minutes"); return; } JsonObject profileJson = element.getAsJsonObject(); JsonArray properties = profileJson.getAsJsonArray("properties"); String encodedTextures = null; for (int i = 0; i < properties.size(); i++) { JsonElement property = properties.get(i); if (property.isJsonObject()) { JsonObject objectProperty = property.getAsJsonObject(); if (objectProperty.has("name") && objectProperty.has("value")) { if (objectProperty.get("name").getAsString().equals("textures")) { encodedTextures = objectProperty.get("value").getAsString(); break; } } } } if (encodedTextures == null) { synchronizeCallbackProfile(callback, null); engageHoldoff(); if (CompatibilityConstants.DEBUG) platform.getLogger().warning("Failed to find textures in profile JSON, will not retry for 10 minutes"); return; } String decodedTextures = Base64Coder.decodeString(encodedTextures); if (CompatibilityConstants.DEBUG) platform.getLogger().info("Decoded textures: " + decodedTextures); String skinURL = getTextureURL(decodedTextures); // A null skin URL here is normal if the player has no skin. if (CompatibilityConstants.DEBUG) platform.getLogger().info("Got skin URL: " + skinURL + " for " + profileJson.get("name").getAsString()); ProfileResponse response = new ProfileResponse(skinUtils, uuid, profileJson.get("name").getAsString(), skinURL, profileJSON); synchronized (responseCache) { responseCache.put(uuid, response); } YamlConfiguration saveToCache = new YamlConfiguration(); response.save(saveToCache); saveToCache.save(playerCache); synchronizeCallbackProfile(callback, response); holdoff = 0; } catch (Exception ex) { if (CompatibilityConstants.DEBUG) { platform.getLogger().log(Level.WARNING, "Failed to fetch profile for: " + uuid + ", will not retry for 10 minutes", ex); } else { platform.getLogger().log(Level.WARNING, "Failed to fetch profile for: " + uuid + ", will not retry for 10 minutes"); } engageHoldoff(); synchronizeCallbackProfile(callback, null); } } } }, holdoff / 50); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.test.functional.timer; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.naming.InitialContext; import javax.persistence.Persistence; import javax.sql.DataSource; import org.drools.core.time.TimerService; import org.jbpm.process.core.timer.TimerServiceRegistry; import org.jbpm.process.core.timer.impl.GlobalTimerService; import org.jbpm.process.core.timer.impl.QuartzSchedulerService; import org.jbpm.runtime.manager.impl.AbstractRuntimeManager; import org.jbpm.services.task.identity.JBossUserGroupCallbackImpl; import org.jbpm.test.listener.process.NodeLeftCountDownProcessEventListener; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.kie.api.event.process.DefaultProcessEventListener; import org.kie.api.event.process.ProcessEventListener; import org.kie.api.event.process.ProcessNodeLeftEvent; import org.kie.api.event.process.ProcessStartedEvent; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.manager.RuntimeEngine; import org.kie.api.runtime.manager.RuntimeEnvironment; import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder; import org.kie.api.runtime.manager.RuntimeManager; import org.kie.api.runtime.manager.RuntimeManagerFactory; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.task.UserGroupCallback; import org.kie.internal.io.ResourceFactory; import org.kie.internal.runtime.manager.context.ProcessInstanceIdContext; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(Parameterized.class) public class GlobalQuartzDBTimerServiceTest extends GlobalTimerServiceBaseTest { private int managerType; @Parameters public static Collection<Object[]> persistence() { Object[][] data = new Object[][] { { 1 }, { 2 }, { 3 } }; return Arrays.asList(data); }; public GlobalQuartzDBTimerServiceTest(int managerType) { this.managerType = managerType; } @Before public void setUp() throws IOException, SQLException { cleanupSingletonSessionId(); createTimerSchema(); System.setProperty("org.quartz.properties", "quartz-db.properties"); emf = Persistence.createEntityManagerFactory("org.jbpm.test.persistence"); globalScheduler = new QuartzSchedulerService(); ((QuartzSchedulerService)globalScheduler).forceShutdown(); } @After public void tearDown() throws IOException, SQLException { try { globalScheduler.shutdown(); } finally { dropTimerSchema(); System.clearProperty("org.quartz.properties"); cleanup(); } } @Override protected RuntimeManager getManager(RuntimeEnvironment environment, boolean waitOnStart) { RuntimeManager manager = null; if (managerType ==1) { manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); } else if (managerType == 2) { manager = RuntimeManagerFactory.Factory.get().newPerRequestRuntimeManager(environment); } else if (managerType == 3) { manager = RuntimeManagerFactory.Factory.get().newPerProcessInstanceRuntimeManager(environment); } else { throw new IllegalArgumentException("Invalid runtime manager type"); } if (waitOnStart) { // wait for the 2 seconds (default startup delay for quartz) try { Thread.sleep(2000); } catch (InterruptedException e) { // do nothing } } return manager; } @Test(timeout=20000) public void testTimerStartManagerClose() { NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("StartProcess", 3); QuartzSchedulerService additionalCopy = new QuartzSchedulerService(); additionalCopy.initScheduler(null); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener(){ @Override public void beforeProcessStarted(ProcessStartedEvent event) { timerExporations.add(event.getProcessInstance().getId()); } }; environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2) .schedulerService(globalScheduler) .registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener)) .get(); manager = getManager(environment, false); RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); assertEquals(0, timerExporations.size()); countDownListener.waitTillCompleted(); manager.disposeRuntimeEngine(runtime); int atDispose = timerExporations.size(); assertTrue(atDispose > 0); ((AbstractRuntimeManager)manager).close(true); countDownListener.reset(1); countDownListener.waitTillCompleted(3000); assertEquals(atDispose, timerExporations.size()); additionalCopy.shutdown(); } /** * Test that illustrates that jobs are persisted and survives server restart * and as soon as GlobalTimerService is active jobs are fired and it loads and aborts the * process instance to illustrate jobs are properly removed when isntance is aborted * NOTE: this test is disabled by default as it requires real db (not in memory) * and test to be executed separately each with new jvm process */ @Test @Ignore public void testAbortGlobalTestService() throws Exception { RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle3.bpmn2"), ResourceType.BPMN2) .addConfiguration("drools.timerService", "org.jbpm.process.core.timer.impl.RegisteredTimerServiceDelegate") .get(); RuntimeManager manger = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); // build GlobalTimerService instance TimerService globalTs = new GlobalTimerService(manger, globalScheduler); // and register it in the registry under 'default' key TimerServiceRegistry.getInstance().registerTimerService("default", globalTs); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener(){ @Override public void afterNodeLeft(ProcessNodeLeftEvent event) { if (event.getNodeInstance().getNodeName().equals("timer")) { timerExporations.add(event.getProcessInstance().getId()); } } }; long id = -1; Thread.sleep(5000); RuntimeEngine runtime = manger.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); ksession.addEventListener(listener); ksession.abortProcessInstance(id); ProcessInstance processInstance = ksession.getProcessInstance(id); assertNull(processInstance); // let's wait to ensure no more timers are expired and triggered Thread.sleep(3000); ksession.dispose(); } /** * Test that illustrates that jobs are persisted and survives server restart * and as soon as GlobalTimerService is active jobs are fired * NOTE: this test is disabled by default as it requires real db (not in memory) * and test to be executed separately each with new jvm process */ @Test @Ignore public void testContinueGlobalTestService() throws Exception { RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle2.bpmn2"), ResourceType.BPMN2) .addConfiguration("drools.timerService", "org.jbpm.process.core.timer.impl.RegisteredTimerServiceDelegate") .get(); RuntimeManager manger = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment); // build GlobalTimerService instance TimerService globalTs = new GlobalTimerService(manger, globalScheduler); // and register it in the registry under 'default' key TimerServiceRegistry.getInstance().registerTimerService("default", globalTs); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener(){ @Override public void afterNodeLeft(ProcessNodeLeftEvent event) { if (event.getNodeInstance().getNodeName().equals("timer")) { timerExporations.add(event.getProcessInstance().getId()); } } }; Thread.sleep(5000); } @Test(timeout=20000) public void testContinueTimer() { // JBPM-4443 NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("timer", 2); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener(){ @Override public void afterNodeLeft(ProcessNodeLeftEvent event) { if (event.getNodeInstance().getNodeName().equals("timer")) { timerExporations.add(event.getProcessInstance().getId()); } } }; // No special configuration for TimerService in order to test RuntimeManager default environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle4.bpmn2"), ResourceType.BPMN2) .registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener)) .get(); manager = getManager(environment, true); RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); ProcessInstance processInstance = ksession.startProcess("IntermediateCatchEvent"); manager.disposeRuntimeEngine(runtime); countDownListener.waitTillCompleted(); manager.close(); countDownListener.reset(1); // ---- restart ---- environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/IntermediateCatchEventTimerCycle4.bpmn2"), ResourceType.BPMN2) .registerableItemsFactory(new TestRegisterableItemsFactory(listener)) .get(); manager = getManager(environment, true); manager.disposeRuntimeEngine(runtime); countDownListener.waitTillCompleted(3000); assertEquals(2, timerExporations.size()); } @Test(timeout=20000) public void testTimerRequiresRecoveryJobNameFlagSet() throws Exception { Properties properties= new Properties(); properties.setProperty("mary", "HR"); properties.setProperty("john", "HR"); UserGroupCallback userGroupCallback = new JBossUserGroupCallbackImpl(properties); environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/HumanTaskWithBoundaryTimer.bpmn"), ResourceType.BPMN2) .schedulerService(globalScheduler) .userGroupCallback(userGroupCallback) .get(); manager = getManager(environment, true); RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); Map<String, Object> params = new HashMap<String, Object>(); params.put("test", "john"); ProcessInstance processInstance = ksession.startProcess("PROCESS_1", params); Connection connection = null; Statement stmt = null; try { connection = ((DataSource)InitialContext.doLookup("jdbc/jbpm-ds")).getConnection(); stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery("select REQUESTS_RECOVERY, JOB_NAME from QRTZ_JOB_DETAILS"); while(resultSet.next()) { boolean requestsRecovery = resultSet.getBoolean(1); assertEquals("Requests recovery must be set to true", true, requestsRecovery); String jobName = resultSet.getString(2); assertTrue(jobName + " does not contain timer name", jobName.contains("Boundary Event")); } } finally { if(stmt != null) { stmt.close(); } if(connection != null) { connection.close(); } } ksession.abortProcessInstance(processInstance.getId()); manager.disposeRuntimeEngine(runtime); } @Test(timeout=25000) public void testContinueTimerWithMisfire() throws Exception { // RHBPMS-4729 System.setProperty("org.quartz.properties", "quartz-db-short-misfire.properties"); NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("StartProcess", 2); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener(){ @Override public void beforeProcessStarted(ProcessStartedEvent event) { timerExporations.add(event.getProcessInstance().getId()); } }; // No special configuration for TimerService in order to test RuntimeManager default environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2) .registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener)) .get(); manager = getManager(environment, true); RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); countDownListener.waitTillCompleted(); manager.disposeRuntimeEngine(runtime); manager.close(); System.out.println("==== manager.close() ===="); countDownListener.reset(3); // Simulate interval between shutdown and start so the Trigger is older than (now - misfireThreshold) Thread.sleep(5000); // ---- restart ---- environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2) .registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener)) .get(); manager = getManager(environment, true); countDownListener.waitTillCompleted(4000); assertEquals(5, timerExporations.size()); } @Test(timeout = 20000) public void testQuartzJobDeletionOnManagerCloseWithTimerStart() throws Exception { NodeLeftCountDownProcessEventListener countDownListener = new NodeLeftCountDownProcessEventListener("StartProcess", 3); QuartzSchedulerService additionalCopy = new QuartzSchedulerService(); additionalCopy.initScheduler(null); // prepare listener to assert results final List<Long> timerExporations = new ArrayList<Long>(); ProcessEventListener listener = new DefaultProcessEventListener() { @Override public void beforeProcessStarted(ProcessStartedEvent event) { timerExporations.add(event.getProcessInstance().getId()); } }; environment = RuntimeEnvironmentBuilder.Factory.get() .newDefaultBuilder() .entityManagerFactory(emf) .addAsset(ResourceFactory.newClassPathResource("org/jbpm/test/functional/timer/TimerStart2.bpmn2"), ResourceType.BPMN2) .schedulerService(globalScheduler) .registerableItemsFactory(new TestRegisterableItemsFactory(listener, countDownListener)) .get(); manager = getManager(environment, false); RuntimeEngine runtime = manager.getRuntimeEngine(ProcessInstanceIdContext.get()); KieSession ksession = runtime.getKieSession(); assertEquals(0, timerExporations.size()); countDownListener.waitTillCompleted(); manager.disposeRuntimeEngine(runtime); int atDispose = timerExporations.size(); assertTrue(atDispose > 0); ((AbstractRuntimeManager) manager).close(true); countDownListener.reset(1); countDownListener.waitTillCompleted(3000); assertEquals(atDispose, timerExporations.size()); Connection connection = null; Statement stmt = null; try { connection = ((DataSource) InitialContext.doLookup("jdbc/jbpm-ds")).getConnection(); stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery("select JOB_NAME, JOB_GROUP from QRTZ_JOB_DETAILS"); while (resultSet.next()) { String jobName = resultSet.getString(1); String jobGroup = resultSet.getString(2); fail("QRTZ_JOB_DETAILS table must be cleaned up. But a record exists :" + " jobName = " + jobName + ", jobGroup = " + jobGroup); } stmt.close(); stmt = connection.createStatement(); ResultSet resultSet2 = stmt.executeQuery("select TRIGGER_NAME, TRIGGER_GROUP from QRTZ_TRIGGERS"); while (resultSet2.next()) { String triggerName = resultSet2.getString(1); String triggerGroup = resultSet2.getString(2); fail("QRTZ_TRIGGERS table must be cleaned up. But a record exists :" + " triggerName = " + triggerName + ", triggerGroup = " + triggerGroup); } } finally { if (stmt != null) { stmt.close(); } if (connection != null) { connection.close(); } } additionalCopy.shutdown(); } }
/** * Copyright (C) 2008 Mathieu Carbou <mathieu.carbou@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mycila.testing.plugin.db.api; import java.math.BigDecimal; import java.net.URL; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.Ref; import java.sql.Struct; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; /** * <P>The class that defines the constants that are used to identify generic * SQL types, called JDBC types. * The actual type constant values are equivalent to those in XOPEN. * <p/> * This class is never instantiated. * <p/> * See http://java.sun.com/j2se/1.4.2/docs/guide/jdbc/getstart/mapping.html */ public enum SqlType { /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>BIT</code>. */ BIT(Types.BIT, Boolean.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>TINYINT</code>. */ TINYINT(Types.TINYINT, Byte.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>SMALLINT</code>. */ SMALLINT(Types.SMALLINT, Short.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>INTEGER</code>. */ INTEGER(Types.INTEGER, Integer.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>BIGINT</code>. */ BIGINT(Types.BIGINT, Long.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>FLOAT</code>. */ FLOAT(Types.FLOAT, Double.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>REAL</code>. */ REAL(Types.REAL, Float.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>DOUBLE</code>. */ DOUBLE(Types.DOUBLE, Double.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>NUMERIC</code>. */ NUMERIC(Types.NUMERIC, BigDecimal.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>DECIMAL</code>. */ DECIMAL(Types.DECIMAL, BigDecimal.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>CHAR</code>. */ CHAR(Types.CHAR, String.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>VARCHAR</code>. */ VARCHAR(Types.VARCHAR, String.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>LONGVARCHAR</code>. */ LONGVARCHAR(Types.LONGVARCHAR, String.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>DATE</code>. */ DATE(Types.DATE, Date.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>TIME</code>. */ TIME(Types.TIME, Time.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>TIMESTAMP</code>. */ TIMESTAMP(Types.TIMESTAMP, Timestamp.class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>BINARY</code>. */ BINARY(Types.BINARY, byte[].class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>VARBINARY</code>. */ VARBINARY(Types.VARBINARY, byte[].class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>LONGVARBINARY</code>. */ LONGVARBINARY(Types.LONGVARBINARY, byte[].class), /** * <P>The constant in the Java programming language, sometimes referred * to as a type code, that identifies the generic SQL type * <code>NULL</code>. */ NULL(Types.NULL, Void.class), /** * The constant in the Java programming language that indicates * that the SQL type is database-specific and * gets mapped to a Java object that can be accessed via * the methods <code>getObject</code> and <code>setObject</code>. */ OTHER(Types.OTHER, Object.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>JAVA_OBJECT</code>. * * @since 1.2 */ JAVA_OBJECT(Types.JAVA_OBJECT, Class.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>DISTINCT</code>. * * @since 1.2 */ DISTINCT(Types.DISTINCT, Object.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>STRUCT</code>. * * @since 1.2 */ STRUCT(Types.STRUCT, Struct.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>ARRAY</code>. * * @since 1.2 */ ARRAY(Types.ARRAY, Array.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>BLOB</code>. * * @since 1.2 */ BLOB(Types.BLOB, Blob.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>CLOB</code>. * * @since 1.2 */ CLOB(Types.CLOB, Clob.class), /** * The constant in the Java programming language, sometimes referred to * as a type code, that identifies the generic SQL type * <code>REF</code>. * * @since 1.2 */ REF(Types.REF, Ref.class), /** * The constant in the Java programming language, somtimes referred to * as a type code, that identifies the generic SQL type <code>DATALINK</code>. * * @since 1.4 */ DATALINK(Types.DATALINK, URL.class), /** * The constant in the Java programming language, somtimes referred to * as a type code, that identifies the generic SQL type <code>BOOLEAN</code>. * * @since 1.4 */ BOOLEAN(Types.BOOLEAN, Boolean.class), UNKNOWN(-1, Object.class); private final Class<?> javaType; private final int sqlType; private SqlType(int sqlType, Class<?> javaType) { this.sqlType = sqlType; this.javaType = javaType; } public int sqlType() { return sqlType; } public Class<?> javaType() { return javaType; } public static SqlType fromSqlType(int sqlType) { for (SqlType type : values()) { if (type.sqlType() == sqlType) { return type; } } return UNKNOWN; } }
package com.alorma.github.ui.activity.gists; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.Switch; import com.afollestad.materialdialogs.MaterialDialog; import com.alorma.github.R; import com.alorma.github.sdk.bean.dto.response.Gist; import com.alorma.github.sdk.bean.dto.response.GistFile; import com.alorma.github.sdk.bean.dto.response.GistFilesMap; import com.alorma.github.sdk.services.gists.PublishGistClient; import com.alorma.github.ui.activity.base.BackActivity; import com.alorma.github.ui.adapter.GistCreatedDetailFilesAdapter; import com.alorma.github.ui.utils.DialogUtils; import com.mikepenz.iconics.IconicsDrawable; import com.mikepenz.octicons_typeface_library.Octicons; import rx.Subscriber; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; /** * Created by Bernat on 02/04/2015. */ public class CreateGistActivity extends BackActivity implements GistCreatedDetailFilesAdapter.GistCreateAdapterListener { private static final int GIST_FILE_CREATOR = 540; private static final int GIST_FILE_EDITOR = 541; private GistCreatedDetailFilesAdapter adapter; private boolean sharingMode; private EditText gistDescription; private Switch gistPrivate; private RecyclerView recyclerView; private int editingPosition; public static Intent createLauncherIntent(Context context) { return new Intent(context, CreateGistActivity.class); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_create_gist); recyclerView = (RecyclerView) findViewById(R.id.recycler); recyclerView.setLayoutManager(new LinearLayoutManager(this)); adapter = new GistCreatedDetailFilesAdapter(LayoutInflater.from(this)); adapter.setGistCreateAdapterListener(this); recyclerView.setAdapter(adapter); sharingMode = Intent.ACTION_SEND.equals(getIntent().getAction()); if (getSupportActionBar() != null) { getSupportActionBar().setDisplayHomeAsUpEnabled(true); } gistDescription = (EditText) findViewById(R.id.gistDescription); gistPrivate = (Switch) findViewById(R.id.gistPrivate); FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fabButton); fab.setImageDrawable(new IconicsDrawable(this, Octicons.Icon.oct_gist_new).color(Color.WHITE).actionBar()); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { launchEmptyEditor(); } }); } private void launchEmptyEditor() { Intent intent = GistEditorActivity.createLauncherIntent(this, getIntent().getExtras()); startActivityForResult(intent, GIST_FILE_CREATOR); } private void launchEditor(GistFile file) { Intent intent = GistEditorActivity.createLauncherIntent(this, file); startActivityForResult(intent, GIST_FILE_EDITOR); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_create_gist, menu); MenuItem publishItem = menu.findItem(R.id.action_publish_gist); IconicsDrawable publishIcon = new IconicsDrawable(this, Octicons.Icon.oct_package); publishIcon.actionBar(); publishIcon.color(Color.WHITE); publishItem.setIcon(publishIcon); return true; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (data != null && resultCode == RESULT_OK) { GistFile file = (GistFile) data.getParcelableExtra(GistEditorActivity.EXTRA_FILE); if (file != null) { switch (requestCode) { case GIST_FILE_CREATOR: adapter.add(file); break; case GIST_FILE_EDITOR: adapter.update(editingPosition, file); break; } editingPosition = -1; } } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: String description = gistDescription.getText().toString(); int gistFiles = adapter.getItemCount(); if (gistFiles > 0) { showDialogCancelGist(); } else { if (!TextUtils.isEmpty(description)) { showDialogNotEmpty(); } else { finish(); } } break; case R.id.action_publish_gist: publishGist(); break; } return true; } private void showDialogNotEmpty() { MaterialDialog.Builder builder = new DialogUtils().builder(this); builder.content(R.string.gist_creator_not_empty); builder.positiveText(R.string.gist_creator_editor_discard); builder.negativeText(R.string.cancel); builder.onPositive((dialog1, which) -> finish()); dialog = builder.show(); } private void showDialogCancelGist() { MaterialDialog.Builder builder = new DialogUtils().builder(this); builder.content(R.string.gist_creator_cancel_job); builder.positiveText(R.string.ok); builder.negativeText(R.string.cancel); builder.neutralText(R.string.publish_gist); builder.callback(new MaterialDialog.ButtonCallback() { @Override public void onPositive(MaterialDialog dialog) { super.onPositive(dialog); finish(); } @Override public void onNeutral(MaterialDialog dialog) { super.onNeutral(dialog); publishGist(); } }); dialog = builder.show(); } private void publishGist() { if (adapter != null && adapter.getItemCount() > 0) { Gist gist = new Gist(); gist.isPublic = !gistPrivate.isChecked(); gist.description = gistDescription.getText().toString(); GistFilesMap files = new GistFilesMap(); for (GistFile gistFile : adapter.getItems()) { if (!TextUtils.isEmpty(gistFile.filename) && !TextUtils.isEmpty(gistFile.content)) { files.put(gistFile.filename, gistFile); } } gist.files = files; showProgressDialog(R.string.publishing_gist); PublishGistClient publishGistClient = new PublishGistClient(gist); publishGistClient.observable() .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(new Subscriber<Gist>() { @Override public void onCompleted() { } @Override public void onError(Throwable e) { hideProgressDialog(); Snackbar.make(recyclerView, R.string.publish_gist_fail, Snackbar.LENGTH_SHORT).show(); } @Override public void onNext(Gist gist) { hideProgressDialog(); finish(); } }); } } @Override public void updateFile(int position, GistFile gistFile) { this.editingPosition = position; launchEditor(gistFile); } @Override public void removeFile(int position, final GistFile item) { MaterialDialog.Builder builder = new DialogUtils().builder(this); builder.content(getString(R.string.gist_creator_remove_file, item.filename)); builder.positiveText(R.string.ok); builder.negativeText(R.string.cancel); builder.callback(new MaterialDialog.ButtonCallback() { @Override public void onPositive(MaterialDialog dialog) { super.onPositive(dialog); adapter.remove(item); } }); dialog = builder.show(); } }
package com.ibm.developerWorks.weatherdata.alert; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class Alert { private String _class; private String key; private Integer msg_type_cd; private String msg_type; private String pil; private String phenomena; private String significance; private String etn; private String office_cd; private String event_desc; private Integer severity_cd; private String severity; private String area_type; private String area_id; private String area_name; private String cntry_cd; private String cntry_name; private String headline_text; private String detail_key; private String source; private String issue_dt_tm_local; private String issue_dt_tm_tz_abbrv; private String identifier; private String proc_dt_tm_local; private String proc_dt_tm_tz_abbrv; private String office_name; private String office_st_cd; private String office_cntry_cd; private Double lat; private Double lon; private String st_cd; private String st_name; private List<Category> category_item = new ArrayList<Category>(); private List<ResponseType> response_type_item = new ArrayList<ResponseType>(); private Flood flood_item; private Map<String, Object> additionalProperties = new HashMap<String, Object>(); /** * * @return The _class */ public String getClass_() { return _class; } /** * * @param _class * The class */ public void setClass_(String _class) { this._class = _class; } /** * * @return The key */ public String getKey() { return key; } /** * * @param key * The key */ public void setKey(String key) { this.key = key; } /** * * @return The msg_type_cd */ public Integer getMsg_type_cd() { return msg_type_cd; } /** * * @param msg_type_cd * The msg_type_cd */ public void setMsg_type_cd(Integer msg_type_cd) { this.msg_type_cd = msg_type_cd; } /** * * @return The msg_type */ public String getMsg_type() { return msg_type; } /** * * @param msg_type * The msg_type */ public void setMsg_type(String msg_type) { this.msg_type = msg_type; } /** * * @return The pil */ public String getPil() { return pil; } /** * * @param pil * The pil */ public void setPil(String pil) { this.pil = pil; } /** * * @return The phenomena */ public String getPhenomena() { return phenomena; } /** * * @param phenomena * The phenomena */ public void setPhenomena(String phenomena) { this.phenomena = phenomena; } /** * * @return The significance */ public String getSignificance() { return significance; } /** * * @param significance * The significance */ public void setSignificance(String significance) { this.significance = significance; } /** * * @return The etn */ public String getEtn() { return etn; } /** * * @param etn * The etn */ public void setEtn(String etn) { this.etn = etn; } /** * * @return The office_cd */ public String getOffice_cd() { return office_cd; } /** * * @param office_cd * The office_cd */ public void setOffice_cd(String office_cd) { this.office_cd = office_cd; } /** * * @return The event_desc */ public String getEvent_desc() { return event_desc; } /** * * @param event_desc * The event_desc */ public void setEvent_desc(String event_desc) { this.event_desc = event_desc; } /** * * @return The severity_cd */ public Integer getSeverity_cd() { return severity_cd; } /** * * @param severity_cd * The severity_cd */ public void setSeverity_cd(Integer severity_cd) { this.severity_cd = severity_cd; } /** * * @return The severity */ public String getSeverity() { return severity; } /** * * @param severity * The severity */ public void setSeverity(String severity) { this.severity = severity; } /** * * @return The area_type */ public String getArea_type() { return area_type; } /** * * @param area_type * The area_type */ public void setArea_type(String area_type) { this.area_type = area_type; } /** * * @return The area_id */ public String getArea_id() { return area_id; } /** * * @param area_id * The area_id */ public void setArea_id(String area_id) { this.area_id = area_id; } /** * * @return The area_name */ public String getArea_name() { return area_name; } /** * * @param area_name * The area_name */ public void setArea_name(String area_name) { this.area_name = area_name; } /** * * @return The cntry_cd */ public String getCntry_cd() { return cntry_cd; } /** * * @param cntry_cd * The cntry_cd */ public void setCntry_cd(String cntry_cd) { this.cntry_cd = cntry_cd; } /** * * @return The cntry_name */ public String getCntry_name() { return cntry_name; } /** * * @param cntry_name * The cntry_name */ public void setCntry_name(String cntry_name) { this.cntry_name = cntry_name; } /** * * @return The headline_text */ public String getHeadline_text() { return headline_text; } /** * * @param headline_text * The headline_text */ public void setHeadline_text(String headline_text) { this.headline_text = headline_text; } /** * * @return The detail_key */ public String getDetail_key() { return detail_key; } /** * * @param detail_key * The detail_key */ public void setDetail_key(String detail_key) { this.detail_key = detail_key; } /** * * @return The source */ public String getSource() { return source; } /** * * @param source * The source */ public void setSource(String source) { this.source = source; } /** * * @return The issue_dt_tm_local */ public String getIssue_dt_tm_local() { return issue_dt_tm_local; } /** * * @param issue_dt_tm_local * The issue_dt_tm_local */ public void setIssue_dt_tm_local(String issue_dt_tm_local) { this.issue_dt_tm_local = issue_dt_tm_local; } /** * * @return The issue_dt_tm_tz_abbrv */ public String getIssue_dt_tm_tz_abbrv() { return issue_dt_tm_tz_abbrv; } /** * * @param issue_dt_tm_tz_abbrv * The issue_dt_tm_tz_abbrv */ public void setIssue_dt_tm_tz_abbrv(String issue_dt_tm_tz_abbrv) { this.issue_dt_tm_tz_abbrv = issue_dt_tm_tz_abbrv; } /** * * @return The identifier */ public String getIdentifier() { return identifier; } /** * * @param identifier * The identifier */ public void setIdentifier(String identifier) { this.identifier = identifier; } /** * * @return The proc_dt_tm_local */ public String getProc_dt_tm_local() { return proc_dt_tm_local; } /** * * @param proc_dt_tm_local * The proc_dt_tm_local */ public void setProc_dt_tm_local(String proc_dt_tm_local) { this.proc_dt_tm_local = proc_dt_tm_local; } /** * * @return The proc_dt_tm_tz_abbrv */ public String getProc_dt_tm_tz_abbrv() { return proc_dt_tm_tz_abbrv; } /** * * @param proc_dt_tm_tz_abbrv * The proc_dt_tm_tz_abbrv */ public void setProc_dt_tm_tz_abbrv(String proc_dt_tm_tz_abbrv) { this.proc_dt_tm_tz_abbrv = proc_dt_tm_tz_abbrv; } /** * * @return The office_name */ public String getOffice_name() { return office_name; } /** * * @param office_name * The office_name */ public void setOffice_name(String office_name) { this.office_name = office_name; } /** * * @return The office_st_cd */ public String getOffice_st_cd() { return office_st_cd; } /** * * @param office_st_cd * The office_st_cd */ public void setOffice_st_cd(String office_st_cd) { this.office_st_cd = office_st_cd; } /** * * @return The office_cntry_cd */ public String getOffice_cntry_cd() { return office_cntry_cd; } /** * * @param office_cntry_cd * The office_cntry_cd */ public void setOffice_cntry_cd(String office_cntry_cd) { this.office_cntry_cd = office_cntry_cd; } /** * * @return The lat */ public Double getLat() { return lat; } /** * * @param lat * The lat */ public void setLat(Double lat) { this.lat = lat; } /** * * @return The lon */ public Double getLon() { return lon; } /** * * @param lon * The lon */ public void setLon(Double lon) { this.lon = lon; } /** * * @return The st_cd */ public String getSt_cd() { return st_cd; } /** * * @param st_cd * The st_cd */ public void setSt_cd(String st_cd) { this.st_cd = st_cd; } /** * * @return The st_name */ public String getSt_name() { return st_name; } /** * * @param st_name * The st_name */ public void setSt_name(String st_name) { this.st_name = st_name; } /** * * @return The category_item */ public List<Category> getcategory_item() { return category_item; } /** * * @param category_item * The category_item */ public void setcategory_item(List<Category> category_item) { this.category_item = category_item; } /** * * @return The response_type_item */ public List<ResponseType> getResponse_type_item() { return response_type_item; } /** * * @param response_type_item * The response_types */ public void setResponse_type_item(List<ResponseType> response_type_item) { this.response_type_item = response_type_item; } /** * * @return The flood_item */ public Flood getFlood_item() { return flood_item; } /** * * @param flood_item * The flood_item */ public void setFlood_item(Flood flood_item) { this.flood_item = flood_item; } public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.sam; import org.broad.igv.feature.BasicFeature; import org.broad.igv.feature.Exon; import org.broad.igv.feature.LocusScore; import org.broad.igv.feature.Strand; import org.broad.igv.track.WindowFunction; import java.awt.*; /** * Some alignment formats are parsed as Features. * <p/> * This is all getting rather circular, some refactoring is in order. * * @author jrobinso * @date Aug 5, 2010 */ public class FeatureWrappedAlignment implements Alignment { String readName; String chr; int start; int end; AlignmentBlock[] blocks; Strand strand; public FeatureWrappedAlignment(BasicFeature f) { this.readName = f.getName(); this.chr = f.getChr(); this.start = f.getStart(); this.end = f.getEnd(); strand = f.getStrand(); if (f.getExonCount() > 0) { blocks = new AlignmentBlock[f.getExonCount()]; int i = 0; for (Exon exon : f.getExons()) { int length = exon.getLength(); byte[] seq = new byte[length]; blocks[i] = new AlignmentBlock(getChr(), exon.getStart(), seq, seq); i++; } } } public String getReadName() { return readName; } public String getReadSequence() { return null; } public String getChromosome() { return chr; } public String getChr() { return chr; } @Override public String getContig() { return chr; } public int getAlignmentStart() { return start; } public boolean contains(double location) { return location >= start && location <= getEnd(); } public AlignmentBlock[] getAlignmentBlocks() { return blocks; } public AlignmentBlock[] getInsertions() { return null; } public String getCigarString() { return "*"; } public int getInferredInsertSize() { return 0; } public int getMappingQuality() { return 255; } public ReadMate getMate() { return null; } public boolean isProperPair() { return true; } public boolean isMapped() { return true; } public boolean isPaired() { return false; } public boolean isNegativeStrand() { return strand == Strand.NEGATIVE; } public boolean isDuplicate() { return false; } public float getScore() { return 1.0f; } public LocusScore copy() { return this; } public String getClipboardString(double location) { return getValueString(location, null); } public String getValueString(double position, WindowFunction windowFunction) { return readName + "<br>Read length = " + (getEnd() - getStart()); } /** * @return the start */ public int getStart() { return start; } /** * @param start the start to set */ public void setStart(int start) { this.start = start; } /** * @return the end */ public int getEnd() { return end; } public int getAlignmentEnd() { return end; } /** * @param end the end to set */ public void setEnd(int end) { this.end = end; } public byte getBase(double position) { return 0; } public byte getPhred(double position) { return 0; } public String getSample() { return null; } public String getReadGroup() { return null; } public String getLibrary() { return null; } public Object getAttribute(String key) { return null; //To change body of implemented methods use File | Settings | File Templates. } public void setMateSequence(String sequence) { //To change body of implemented methods use File | Settings | File Templates. } public String getPairOrientation() { return null; //To change body of implemented methods use File | Settings | File Templates. } public boolean isSmallInsert() { return false; //To change body of implemented methods use File | Settings | File Templates. } public boolean isVendorFailedRead() { return false; //To change body of implemented methods use File | Settings | File Templates. } public Color getColor() { return null; } public char[] getGapTypes() { return null; } public boolean isFirstOfPair() { return false; } public boolean isSecondOfPair() { return false; } public Strand getFirstOfPairStrand() { return strand; } public Strand getSecondOfPairStrand() { return Strand.NONE; } public Strand getReadStrand() { return isNegativeStrand() ? Strand.NEGATIVE : Strand.POSITIVE; } @Override public void finish() { } @Override public boolean isPrimary() { return true; } @Override public boolean isSupplementary() { return false; } }
/** * Copyright 2009 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Map; import java.util.Random; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.LruBlockCache.EvictionThread; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.regionserver.metrics.TestSchemaMetrics; import org.apache.hadoop.hbase.util.ClassSize; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; /** * Tests the concurrent LruBlockCache.<p> * * Tests will ensure it grows and shrinks in size properly, * evictions run when they're supposed to and do what they should, * and that cached blocks are accessible when expected to be. */ @RunWith(Parameterized.class) @Category(MediumTests.class) public class TestLruBlockCache { private Map<String, Long> startingMetrics; private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); public TestLruBlockCache(boolean useTableName) { SchemaMetrics.setUseTableNameInTest(useTableName); } @Parameters public static Collection<Object[]> parameters() { return TestSchemaMetrics.parameters(); } @Before public void setUp() throws Exception { startingMetrics = SchemaMetrics.getMetricsSnapshot(); } @After public void tearDown() throws Exception { SchemaMetrics.validateMetricChanges(startingMetrics); } @Test public void testBackgroundEvictionThread() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 9); // room for 9, will evict LruBlockCache cache = new LruBlockCache(maxSize, blockSize, TEST_UTIL.getConfiguration()); CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); EvictionThread evictionThread = cache.getEvictionThread(); assertTrue(evictionThread != null); // Make sure eviction thread has entered run method while (!evictionThread.isEnteringRun()) { Thread.sleep(1); } // Add all the blocks for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); } // Let the eviction run int n = 0; while(cache.getEvictionCount() == 0) { Thread.sleep(200); assertTrue(n++ < 20); } System.out.println("Background Evictions run: " + cache.getEvictionCount()); // A single eviction run should have occurred assertEquals(cache.getEvictionCount(), 1); } @Test public void testCacheSimple() throws Exception { long maxSize = 1000000; long blockSize = calculateBlockSizeDefault(maxSize, 101); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, TEST_UTIL.getConfiguration()); CachedItem [] blocks = generateRandomBlocks(100, blockSize); long expectedCacheSize = cache.heapSize(); // Confirm empty for (CachedItem block : blocks) { assertTrue(cache.getBlock(block.cacheKey, true, false) == null); } // Add blocks for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.cacheBlockHeapSize(); } // Verify correctly calculated cache heap size assertEquals(expectedCacheSize, cache.heapSize()); // Check if all blocks are properly cached and retrieved for (CachedItem block : blocks) { HeapSize buf = cache.getBlock(block.cacheKey, true, false); assertTrue(buf != null); assertEquals(buf.heapSize(), block.heapSize()); } // Verify correctly calculated cache heap size assertEquals(expectedCacheSize, cache.heapSize()); // Check if all blocks are properly cached and retrieved for (CachedItem block : blocks) { HeapSize buf = cache.getBlock(block.cacheKey, true, false); assertTrue(buf != null); assertEquals(buf.heapSize(), block.heapSize()); } // Expect no evictions assertEquals(0, cache.getEvictionCount()); Thread t = new LruBlockCache.StatisticsThread(cache); t.start(); t.join(); } @Test public void testCacheEvictionSimple() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, TEST_UTIL.getConfiguration()); CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); long expectedCacheSize = cache.heapSize(); // Add all the blocks for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.cacheBlockHeapSize(); } // A single eviction run should have occurred assertEquals(1, cache.getEvictionCount()); // Our expected size overruns acceptable limit assertTrue(expectedCacheSize > (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // But the cache did not grow beyond max assertTrue(cache.heapSize() < maxSize); // And is still below the acceptable limit assertTrue(cache.heapSize() < (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // All blocks except block 0 and 1 should be in the cache assertTrue(cache.getBlock(blocks[0].cacheKey, true, false) == null); assertTrue(cache.getBlock(blocks[1].cacheKey, true, false) == null); for(int i=2;i<blocks.length;i++) { assertEquals(cache.getBlock(blocks[i].cacheKey, true, false), blocks[i]); } } @Test public void testCacheEvictionTwoPriorities() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, TEST_UTIL.getConfiguration()); CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi"); long expectedCacheSize = cache.heapSize(); // Add and get the multi blocks for (CachedItem block : multiBlocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.cacheBlockHeapSize(); assertEquals(cache.getBlock(block.cacheKey, true, false), block); } // Add the single blocks (no get) for (CachedItem block : singleBlocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.heapSize(); } // A single eviction run should have occurred assertEquals(cache.getEvictionCount(), 1); // We expect two entries evicted assertEquals(cache.getEvictedCount(), 2); // Our expected size overruns acceptable limit assertTrue(expectedCacheSize > (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // But the cache did not grow beyond max assertTrue(cache.heapSize() <= maxSize); // And is now below the acceptable limit assertTrue(cache.heapSize() <= (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // We expect fairness across the two priorities. // This test makes multi go barely over its limit, in-memory // empty, and the rest in single. Two single evictions and // one multi eviction expected. assertTrue(cache.getBlock(singleBlocks[0].cacheKey, true, false) == null); assertTrue(cache.getBlock(multiBlocks[0].cacheKey, true, false) == null); // And all others to be cached for(int i=1;i<4;i++) { assertEquals(cache.getBlock(singleBlocks[i].cacheKey, true, false), singleBlocks[i]); assertEquals(cache.getBlock(multiBlocks[i].cacheKey, true, false), multiBlocks[i]); } } @Test public void testCacheEvictionThreePriorities() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 0.98f, // min 0.99f, // acceptable 0.33f, // single 0.33f, // multi 0.34f);// memory CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory"); long expectedCacheSize = cache.heapSize(); // Add 3 blocks from each priority for(int i=0;i<3;i++) { // Just add single blocks cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); expectedCacheSize += singleBlocks[i].cacheBlockHeapSize(); // Add and get multi blocks cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); expectedCacheSize += multiBlocks[i].cacheBlockHeapSize(); cache.getBlock(multiBlocks[i].cacheKey, true, false); // Add memory blocks as such cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true); expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize(); } // Do not expect any evictions yet assertEquals(0, cache.getEvictionCount()); // Verify cache size assertEquals(expectedCacheSize, cache.heapSize()); // Insert a single block, oldest single should be evicted cache.cacheBlock(singleBlocks[3].cacheKey, singleBlocks[3]); // Single eviction, one thing evicted assertEquals(1, cache.getEvictionCount()); assertEquals(1, cache.getEvictedCount()); // Verify oldest single block is the one evicted assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false)); // Change the oldest remaining single block to a multi cache.getBlock(singleBlocks[1].cacheKey, true, false); // Insert another single block cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]); // Two evictions, two evicted. assertEquals(2, cache.getEvictionCount()); assertEquals(2, cache.getEvictedCount()); // Oldest multi block should be evicted now assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false)); // Insert another memory block cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true); // Three evictions, three evicted. assertEquals(3, cache.getEvictionCount()); assertEquals(3, cache.getEvictedCount()); // Oldest memory block should be evicted now assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false)); // Add a block that is twice as big (should force two evictions) CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big"); cache.cacheBlock(bigBlocks[0].cacheKey, bigBlocks[0]); // Four evictions, six evicted (inserted block 3X size, expect +3 evicted) assertEquals(4, cache.getEvictionCount()); assertEquals(6, cache.getEvictedCount()); // Expect three remaining singles to be evicted assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false)); assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false)); assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false)); // Make the big block a multi block cache.getBlock(bigBlocks[0].cacheKey, true, false); // Cache another single big block cache.cacheBlock(bigBlocks[1].cacheKey, bigBlocks[1]); // Five evictions, nine evicted (3 new) assertEquals(5, cache.getEvictionCount()); assertEquals(9, cache.getEvictedCount()); // Expect three remaining multis to be evicted assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false)); assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false)); assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false)); // Cache a big memory block cache.cacheBlock(bigBlocks[2].cacheKey, bigBlocks[2], true); // Six evictions, twelve evicted (3 new) assertEquals(6, cache.getEvictionCount()); assertEquals(12, cache.getEvictedCount()); // Expect three remaining in-memory to be evicted assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false)); assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false)); assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false)); } // test scan resistance @Test public void testScanResistance() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 0.66f, // min 0.99f, // acceptable 0.33f, // single 0.33f, // multi 0.34f);// memory CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi"); // Add 5 multi blocks for (CachedItem block : multiBlocks) { cache.cacheBlock(block.cacheKey, block); cache.getBlock(block.cacheKey, true, false); } // Add 5 single blocks for(int i=0;i<5;i++) { cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); } // An eviction ran assertEquals(1, cache.getEvictionCount()); // To drop down to 2/3 capacity, we'll need to evict 4 blocks assertEquals(4, cache.getEvictedCount()); // Should have been taken off equally from single and multi assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false)); assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false)); assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false)); assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false)); // Let's keep "scanning" by adding single blocks. From here on we only // expect evictions from the single bucket. // Every time we reach 10 total blocks (every 4 inserts) we get 4 single // blocks evicted. Inserting 13 blocks should yield 3 more evictions and // 12 more evicted. for(int i=5;i<18;i++) { cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); } // 4 total evictions, 16 total evicted assertEquals(4, cache.getEvictionCount()); assertEquals(16, cache.getEvictedCount()); // Should now have 7 total blocks assertEquals(7, cache.size()); } // test setMaxSize @Test public void testResizeBlockCache() throws Exception { long maxSize = 300000; long blockSize = calculateBlockSize(maxSize, 31); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, LruBlockCache.DEFAULT_CONCURRENCY_LEVEL, 0.98f, // min 0.99f, // acceptable 0.33f, // single 0.33f, // multi 0.34f);// memory CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single"); CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi"); CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory"); // Add all blocks from all priorities for(int i=0;i<10;i++) { // Just add single blocks cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); // Add and get multi blocks cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); cache.getBlock(multiBlocks[i].cacheKey, true, false); // Add memory blocks as such cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true); } // Do not expect any evictions yet assertEquals(0, cache.getEvictionCount()); // Resize to half capacity plus an extra block (otherwise we evict an extra) cache.setMaxSize((long)(maxSize * 0.5f)); // Should have run a single eviction assertEquals(1, cache.getEvictionCount()); // And we expect 1/2 of the blocks to be evicted assertEquals(15, cache.getEvictedCount()); // And the oldest 5 blocks from each category should be gone for(int i=0;i<5;i++) { assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false)); assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false)); assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false)); } // And the newest 5 blocks should still be accessible for(int i=5;i<10;i++) { assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false)); assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false)); assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false)); } } // test metricsPastNPeriods @Test public void testPastNPeriodsMetrics() throws Exception { double delta = 0.01; // 3 total periods CacheStats stats = new CacheStats(3); // No accesses, should be 0 stats.rollMetricsPeriod(); assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 1, 1 hit caching, 1 hit non-caching, 2 miss non-caching // should be (2/4)=0.5 and (1/1)=1 stats.hit(false); stats.hit(true); stats.miss(false); stats.miss(false); stats.rollMetricsPeriod(); assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 2, 1 miss caching, 3 miss non-caching // should be (2/8)=0.25 and (1/2)=0.5 stats.miss(true); stats.miss(false); stats.miss(false); stats.miss(false); stats.rollMetricsPeriod(); assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); // period 3, 2 hits of each type // should be (6/12)=0.5 and (3/4)=0.75 stats.hit(false); stats.hit(true); stats.hit(false); stats.hit(true); stats.rollMetricsPeriod(); assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.75, stats.getHitCachingRatioPastNPeriods(), delta); // period 4, evict period 1, two caching misses // should be (4/10)=0.4 and (2/5)=0.4 stats.miss(true); stats.miss(true); stats.rollMetricsPeriod(); assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta); // period 5, evict period 2, 2 caching misses, 2 non-caching hit // should be (6/10)=0.6 and (2/6)=1/3 stats.miss(true); stats.miss(true); stats.hit(false); stats.hit(false); stats.rollMetricsPeriod(); assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta); assertEquals((double)1/3, stats.getHitCachingRatioPastNPeriods(), delta); // period 6, evict period 3 // should be (2/6)=1/3 and (0/4)=0 stats.rollMetricsPeriod(); assertEquals((double)1/3, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 7, evict period 4 // should be (2/4)=0.5 and (0/2)=0 stats.rollMetricsPeriod(); assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 8, evict period 5 // should be 0 and 0 stats.rollMetricsPeriod(); assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta); // period 9, one of each // should be (2/4)=0.5 and (1/2)=0.5 stats.miss(true); stats.miss(false); stats.hit(true); stats.hit(false); stats.rollMetricsPeriod(); assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta); assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); } private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) { CachedItem [] blocks = new CachedItem[numBlocks]; for(int i=0;i<numBlocks;i++) { blocks[i] = new CachedItem(pfx + i, size); } return blocks; } private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) { return generateFixedBlocks(numBlocks, (int)size, pfx); } private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) { CachedItem [] blocks = new CachedItem[numBlocks]; Random r = new Random(); for(int i=0;i<numBlocks;i++) { blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1); } return blocks; } private long calculateBlockSize(long maxSize, int numBlocks) { long roughBlockSize = maxSize / numBlocks; int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP + (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); long negateBlockSize = (long)(totalOverhead/numEntries); negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD; return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f)); } private long calculateBlockSizeDefault(long maxSize, int numBlocks) { long roughBlockSize = maxSize / numBlocks; int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize); long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP + (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) + (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT); long negateBlockSize = totalOverhead / numEntries; negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD; return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)* LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); } private static class CachedItem implements Cacheable { BlockCacheKey cacheKey; int size; CachedItem(String blockName, int size) { this.cacheKey = new BlockCacheKey(blockName, 0); this.size = size; } /** The size of this item reported to the block cache layer */ @Override public long heapSize() { return ClassSize.align(size); } /** Size of the cache block holding this item. Used for verification. */ public long cacheBlockHeapSize() { return CachedBlock.PER_BLOCK_OVERHEAD + ClassSize.align(cacheKey.heapSize()) + ClassSize.align(size); } @Override public BlockType getBlockType() { return BlockType.DATA; } @Override public SchemaMetrics getSchemaMetrics() { return SchemaMetrics.getUnknownInstanceForTest(); } @Override public int getSerializedLength() { return 0; } @Override public CacheableDeserializer<Cacheable> getDeserializer() { return null; } @Override public void serialize(ByteBuffer destination) { } } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
package org.keycloak.models.utils; import net.iharder.Base64; import org.jboss.logging.Logger; import org.keycloak.enums.SslRequired; import org.keycloak.migration.MigrationProvider; import org.keycloak.models.AuthenticationExecutionModel; import org.keycloak.models.AuthenticationFlowModel; import org.keycloak.models.AuthenticatorConfigModel; import org.keycloak.models.BrowserSecurityHeaders; import org.keycloak.models.ClaimMask; import org.keycloak.models.ClientModel; import org.keycloak.models.FederatedIdentityModel; import org.keycloak.models.IdentityProviderMapperModel; import org.keycloak.models.IdentityProviderModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.ModelException; import org.keycloak.models.PasswordPolicy; import org.keycloak.models.ProtocolMapperModel; import org.keycloak.models.RealmModel; import org.keycloak.models.RoleModel; import org.keycloak.models.UserConsentModel; import org.keycloak.models.UserCredentialModel; import org.keycloak.models.UserCredentialValueModel; import org.keycloak.models.UserFederationMapperModel; import org.keycloak.models.UserFederationProviderModel; import org.keycloak.models.UserModel; import org.keycloak.representations.idm.ApplicationRepresentation; import org.keycloak.representations.idm.AuthenticationExecutionRepresentation; import org.keycloak.representations.idm.AuthenticationFlowRepresentation; import org.keycloak.representations.idm.AuthenticatorConfigRepresentation; import org.keycloak.representations.idm.ClaimRepresentation; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.representations.idm.FederatedIdentityRepresentation; import org.keycloak.representations.idm.IdentityProviderMapperRepresentation; import org.keycloak.representations.idm.IdentityProviderRepresentation; import org.keycloak.representations.idm.OAuthClientRepresentation; import org.keycloak.representations.idm.ProtocolMapperRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.RoleRepresentation; import org.keycloak.representations.idm.ScopeMappingRepresentation; import org.keycloak.representations.idm.SocialLinkRepresentation; import org.keycloak.representations.idm.UserConsentRepresentation; import org.keycloak.representations.idm.UserFederationMapperRepresentation; import org.keycloak.representations.idm.UserFederationProviderRepresentation; import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.util.UriUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; public class RepresentationToModel { private static Logger logger = Logger.getLogger(RepresentationToModel.class); public static void importRealm(KeycloakSession session, RealmRepresentation rep, RealmModel newRealm) { convertDeprecatedSocialProviders(rep); convertDeprecatedApplications(session, rep); newRealm.setName(rep.getRealm()); if (rep.isEnabled() != null) newRealm.setEnabled(rep.isEnabled()); if (rep.isBruteForceProtected() != null) newRealm.setBruteForceProtected(rep.isBruteForceProtected()); if (rep.getMaxFailureWaitSeconds() != null) newRealm.setMaxFailureWaitSeconds(rep.getMaxFailureWaitSeconds()); if (rep.getMinimumQuickLoginWaitSeconds() != null) newRealm.setMinimumQuickLoginWaitSeconds(rep.getMinimumQuickLoginWaitSeconds()); if (rep.getWaitIncrementSeconds() != null) newRealm.setWaitIncrementSeconds(rep.getWaitIncrementSeconds()); if (rep.getQuickLoginCheckMilliSeconds() != null) newRealm.setQuickLoginCheckMilliSeconds(rep.getQuickLoginCheckMilliSeconds()); if (rep.getMaxDeltaTimeSeconds() != null) newRealm.setMaxDeltaTimeSeconds(rep.getMaxDeltaTimeSeconds()); if (rep.getFailureFactor() != null) newRealm.setFailureFactor(rep.getFailureFactor()); if (rep.isEventsEnabled() != null) newRealm.setEventsEnabled(rep.isEventsEnabled()); if (rep.getEventsExpiration() != null) newRealm.setEventsExpiration(rep.getEventsExpiration()); if (rep.getEventsListeners() != null) newRealm.setEventsListeners(new HashSet<>(rep.getEventsListeners())); if (rep.isAdminEventsEnabled() != null) newRealm.setAdminEventsEnabled(rep.isAdminEventsEnabled()); if (rep.isAdminEventsDetailsEnabled() != null) newRealm.setAdminEventsDetailsEnabled(rep.isAdminEventsDetailsEnabled()); if (rep.getNotBefore() != null) newRealm.setNotBefore(rep.getNotBefore()); if (rep.getAccessTokenLifespan() != null) newRealm.setAccessTokenLifespan(rep.getAccessTokenLifespan()); else newRealm.setAccessTokenLifespan(300); if (rep.getSsoSessionIdleTimeout() != null) newRealm.setSsoSessionIdleTimeout(rep.getSsoSessionIdleTimeout()); else newRealm.setSsoSessionIdleTimeout(1800); if (rep.getSsoSessionMaxLifespan() != null) newRealm.setSsoSessionMaxLifespan(rep.getSsoSessionMaxLifespan()); else newRealm.setSsoSessionMaxLifespan(36000); if (rep.getAccessCodeLifespan() != null) newRealm.setAccessCodeLifespan(rep.getAccessCodeLifespan()); else newRealm.setAccessCodeLifespan(60); if (rep.getAccessCodeLifespanUserAction() != null) newRealm.setAccessCodeLifespanUserAction(rep.getAccessCodeLifespanUserAction()); else newRealm.setAccessCodeLifespanUserAction(300); if (rep.getAccessCodeLifespanLogin() != null) newRealm.setAccessCodeLifespanLogin(rep.getAccessCodeLifespanLogin()); else newRealm.setAccessCodeLifespanLogin(1800); if (rep.getSslRequired() != null) newRealm.setSslRequired(SslRequired.valueOf(rep.getSslRequired().toUpperCase())); if (rep.isRegistrationAllowed() != null) newRealm.setRegistrationAllowed(rep.isRegistrationAllowed()); if (rep.isRegistrationEmailAsUsername() != null) newRealm.setRegistrationEmailAsUsername(rep.isRegistrationEmailAsUsername()); if (rep.isRememberMe() != null) newRealm.setRememberMe(rep.isRememberMe()); if (rep.isVerifyEmail() != null) newRealm.setVerifyEmail(rep.isVerifyEmail()); if (rep.isResetPasswordAllowed() != null) newRealm.setResetPasswordAllowed(rep.isResetPasswordAllowed()); if (rep.isEditUsernameAllowed() != null) newRealm.setEditUsernameAllowed(rep.isEditUsernameAllowed()); if (rep.getPrivateKey() == null || rep.getPublicKey() == null) { KeycloakModelUtils.generateRealmKeys(newRealm); } else { newRealm.setPrivateKeyPem(rep.getPrivateKey()); newRealm.setPublicKeyPem(rep.getPublicKey()); } if (rep.getCertificate() == null) { KeycloakModelUtils.generateRealmCertificate(newRealm); } else { newRealm.setCertificatePem(rep.getCertificate()); } if (rep.getCodeSecret() == null) { newRealm.setCodeSecret(KeycloakModelUtils.generateCodeSecret()); } else { newRealm.setCodeSecret(rep.getCodeSecret()); } if (rep.getLoginTheme() != null) newRealm.setLoginTheme(rep.getLoginTheme()); if (rep.getAccountTheme() != null) newRealm.setAccountTheme(rep.getAccountTheme()); if (rep.getAdminTheme() != null) newRealm.setAdminTheme(rep.getAdminTheme()); if (rep.getEmailTheme() != null) newRealm.setEmailTheme(rep.getEmailTheme()); if (rep.getRequiredCredentials() != null) { for (String requiredCred : rep.getRequiredCredentials()) { addRequiredCredential(newRealm, requiredCred); } } else { addRequiredCredential(newRealm, CredentialRepresentation.PASSWORD); } if (rep.getPasswordPolicy() != null) newRealm.setPasswordPolicy(new PasswordPolicy(rep.getPasswordPolicy())); importIdentityProviders(rep, newRealm); importIdentityProviderMappers(rep, newRealm); if (rep.getClients() != null) { createClients(session, rep, newRealm); } if (rep.getRoles() != null) { if (rep.getRoles().getRealm() != null) { // realm roles for (RoleRepresentation roleRep : rep.getRoles().getRealm()) { createRole(newRealm, roleRep); } } if (rep.getRoles().getClient() != null) { for (Map.Entry<String, List<RoleRepresentation>> entry : rep.getRoles().getClient().entrySet()) { ClientModel client = newRealm.getClientByClientId(entry.getKey()); if (client == null) { throw new RuntimeException("App doesn't exist in role definitions: " + entry.getKey()); } for (RoleRepresentation roleRep : entry.getValue()) { // Application role may already exists (for example if it is defaultRole) RoleModel role = roleRep.getId()!=null ? client.addRole(roleRep.getId(), roleRep.getName()) : client.addRole(roleRep.getName()); role.setDescription(roleRep.getDescription()); } } } // now that all roles are created, re-iterate and set up composites if (rep.getRoles().getRealm() != null) { // realm roles for (RoleRepresentation roleRep : rep.getRoles().getRealm()) { RoleModel role = newRealm.getRole(roleRep.getName()); addComposites(role, roleRep, newRealm); } } if (rep.getRoles().getClient() != null) { for (Map.Entry<String, List<RoleRepresentation>> entry : rep.getRoles().getClient().entrySet()) { ClientModel client = newRealm.getClientByClientId(entry.getKey()); if (client == null) { throw new RuntimeException("App doesn't exist in role definitions: " + entry.getKey()); } for (RoleRepresentation roleRep : entry.getValue()) { RoleModel role = client.getRole(roleRep.getName()); addComposites(role, roleRep, newRealm); } } } } // Setup realm default roles if (rep.getDefaultRoles() != null) { for (String roleString : rep.getDefaultRoles()) { newRealm.addDefaultRole(roleString.trim()); } } // Setup client default roles if (rep.getClients() != null) { for (ClientRepresentation resourceRep : rep.getClients()) { if (resourceRep.getDefaultRoles() != null) { ClientModel clientModel = newRealm.getClientByClientId(resourceRep.getClientId()); clientModel.updateDefaultRoles(resourceRep.getDefaultRoles()); } } } // Now that all possible roles and clients are created, create scope mappings Map<String, ClientModel> appMap = newRealm.getClientNameMap(); if (rep.getClientScopeMappings() != null) { for (Map.Entry<String, List<ScopeMappingRepresentation>> entry : rep.getClientScopeMappings().entrySet()) { ClientModel app = appMap.get(entry.getKey()); if (app == null) { throw new RuntimeException("Unable to find client role mappings for client: " + entry.getKey()); } createClientScopeMappings(newRealm, app, entry.getValue()); } } if (rep.getScopeMappings() != null) { for (ScopeMappingRepresentation scope : rep.getScopeMappings()) { ClientModel client = newRealm.getClientByClientId(scope.getClient()); if (client == null) { throw new RuntimeException("Unknown client specification in realm scope mappings"); } for (String roleString : scope.getRoles()) { RoleModel role = newRealm.getRole(roleString.trim()); if (role == null) { role = newRealm.addRole(roleString.trim()); } client.addScopeMapping(role); } } } if (rep.getSmtpServer() != null) { newRealm.setSmtpConfig(new HashMap(rep.getSmtpServer())); } if (rep.getBrowserSecurityHeaders() != null) { newRealm.setBrowserSecurityHeaders(rep.getBrowserSecurityHeaders()); } else { newRealm.setBrowserSecurityHeaders(BrowserSecurityHeaders.defaultHeaders); } List<UserFederationProviderModel> providerModels = null; if (rep.getUserFederationProviders() != null) { providerModels = convertFederationProviders(rep.getUserFederationProviders()); newRealm.setUserFederationProviders(providerModels); } if (rep.getUserFederationMappers() != null) { // Remove builtin mappers for federation providers, which have some mappers already provided in JSON (likely due to previous export) if (rep.getUserFederationProviders() != null) { Set<String> providerNames = new TreeSet<String>(); for (UserFederationMapperRepresentation representation : rep.getUserFederationMappers()) { providerNames.add(representation.getFederationProviderDisplayName()); } for (String providerName : providerNames) { for (UserFederationProviderModel providerModel : providerModels) { if (providerName.equals(providerModel.getDisplayName())) { Set<UserFederationMapperModel> toDelete = newRealm.getUserFederationMappersByFederationProvider(providerModel.getId()); for (UserFederationMapperModel mapperModel : toDelete) { newRealm.removeUserFederationMapper(mapperModel); } } } } } for (UserFederationMapperRepresentation representation : rep.getUserFederationMappers()) { newRealm.addUserFederationMapper(toModel(newRealm, representation)); } } // create users and their role mappings and social mappings if (rep.getUsers() != null) { for (UserRepresentation userRep : rep.getUsers()) { UserModel user = createUser(session, newRealm, userRep, appMap); } } if(rep.isInternationalizationEnabled() != null){ newRealm.setInternationalizationEnabled(rep.isInternationalizationEnabled()); } if(rep.getSupportedLocales() != null){ newRealm.setSupportedLocales(new HashSet<String>(rep.getSupportedLocales())); } if(rep.getDefaultLocale() != null){ newRealm.setDefaultLocale(rep.getDefaultLocale()); } importAuthenticationFlows(newRealm, rep); } public static void importAuthenticationFlows(RealmModel newRealm, RealmRepresentation rep) { if (rep.getAuthenticationFlows() == null) { // assume this is an old version being imported DefaultAuthenticationFlows.addFlows(newRealm); } else { for (AuthenticatorConfigRepresentation configRep : rep.getAuthenticatorConfig()) { AuthenticatorConfigModel model = toModel(configRep); newRealm.addAuthenticatorConfig(model); } for (AuthenticationFlowRepresentation flowRep : rep.getAuthenticationFlows()) { AuthenticationFlowModel model = toModel(flowRep); model = newRealm.addAuthenticationFlow(model); } for (AuthenticationFlowRepresentation flowRep : rep.getAuthenticationFlows()) { AuthenticationFlowModel model = newRealm.getFlowByAlias(flowRep.getAlias()); for (AuthenticationExecutionRepresentation exeRep : flowRep.getAuthenticationExecutions()) { AuthenticationExecutionModel execution = toModel(newRealm, exeRep); execution.setParentFlow(model.getId()); newRealm.addAuthenticatorExecution(execution); } } } } private static void convertDeprecatedSocialProviders(RealmRepresentation rep) { if (rep.isSocial() != null && rep.isSocial() && rep.getSocialProviders() != null && !rep.getSocialProviders().isEmpty() && rep.getIdentityProviders() == null) { Boolean updateProfileFirstLogin = rep.isUpdateProfileOnInitialSocialLogin() != null && rep.isUpdateProfileOnInitialSocialLogin(); if (rep.getSocialProviders() != null) { logger.warn("Using deprecated 'social' configuration in JSON representation. It will be removed in future versions"); List<IdentityProviderRepresentation> identityProviders = new LinkedList<>(); for (String k : rep.getSocialProviders().keySet()) { if (k.endsWith(".key")) { String providerId = k.split("\\.")[0]; String key = rep.getSocialProviders().get(k); String secret = rep.getSocialProviders().get(k.replace(".key", ".secret")); IdentityProviderRepresentation identityProvider = new IdentityProviderRepresentation(); identityProvider.setAlias(providerId); identityProvider.setProviderId(providerId); identityProvider.setEnabled(true); identityProvider.setUpdateProfileFirstLogin(updateProfileFirstLogin); Map<String, String> config = new HashMap<>(); config.put("clientId", key); config.put("clientSecret", secret); identityProvider.setConfig(config); identityProviders.add(identityProvider); } } rep.setIdentityProviders(identityProviders); } } } private static void convertDeprecatedSocialProviders(UserRepresentation user) { if (user.getSocialLinks() != null && !user.getSocialLinks().isEmpty() && user.getFederatedIdentities() == null) { logger.warnf("Using deprecated 'socialLinks' configuration in JSON representation for user '%s'. It will be removed in future versions", user.getUsername()); List<FederatedIdentityRepresentation> federatedIdentities = new LinkedList<>(); for (SocialLinkRepresentation social : user.getSocialLinks()) { FederatedIdentityRepresentation federatedIdentity = new FederatedIdentityRepresentation(); federatedIdentity.setIdentityProvider(social.getSocialProvider()); federatedIdentity.setUserId(social.getSocialUserId()); federatedIdentity.setUserName(social.getSocialUsername()); federatedIdentities.add(federatedIdentity); } user.setFederatedIdentities(federatedIdentities); } user.setSocialLinks(null); } private static void convertDeprecatedApplications(KeycloakSession session, RealmRepresentation realm) { if (realm.getApplications() != null || realm.getOauthClients() != null) { if (realm.getClients() == null) { realm.setClients(new LinkedList<ClientRepresentation>()); } List<ApplicationRepresentation> clients = new LinkedList<>(); if (realm.getApplications() != null) { clients.addAll(realm.getApplications()); } if (realm.getOauthClients() != null) { clients.addAll(realm.getOauthClients()); } for (ApplicationRepresentation app : clients) { app.setClientId(app.getName()); app.setName(null); if (app instanceof OAuthClientRepresentation) { app.setConsentRequired(true); app.setFullScopeAllowed(false); } if (app.getProtocolMappers() == null && app.getClaims() != null) { long mask = getClaimsMask(app.getClaims()); List<ProtocolMapperRepresentation> convertedProtocolMappers = session.getProvider(MigrationProvider.class).getMappersForClaimMask(mask); app.setProtocolMappers(convertedProtocolMappers); app.setClaims(null); } realm.getClients().add(app); } } if (realm.getApplicationScopeMappings() != null && realm.getClientScopeMappings() == null) { realm.setClientScopeMappings(realm.getApplicationScopeMappings()); } if (realm.getRoles() != null && realm.getRoles().getApplication() != null && realm.getRoles().getClient() == null) { realm.getRoles().setClient(realm.getRoles().getApplication()); } if (realm.getUsers() != null) { for (UserRepresentation user : realm.getUsers()) { if (user.getApplicationRoles() != null && user.getClientRoles() == null) { user.setClientRoles(user.getApplicationRoles()); } } } if (realm.getRoles() != null && realm.getRoles().getRealm() != null) { for (RoleRepresentation role : realm.getRoles().getRealm()) { if (role.getComposites() != null && role.getComposites().getApplication() != null && role.getComposites().getClient() == null) { role.getComposites().setClient(role.getComposites().getApplication()); } } } if (realm.getRoles() != null && realm.getRoles().getClient() != null) { for (Map.Entry<String, List<RoleRepresentation>> clientRoles : realm.getRoles().getClient().entrySet()) { for (RoleRepresentation role : clientRoles.getValue()) { if (role.getComposites() != null && role.getComposites().getApplication() != null && role.getComposites().getClient() == null) { role.getComposites().setClient(role.getComposites().getApplication()); } } } } } public static void updateRealm(RealmRepresentation rep, RealmModel realm) { if (rep.getRealm() != null) { realm.setName(rep.getRealm()); } if (rep.isEnabled() != null) realm.setEnabled(rep.isEnabled()); if (rep.isBruteForceProtected() != null) realm.setBruteForceProtected(rep.isBruteForceProtected()); if (rep.getMaxFailureWaitSeconds() != null) realm.setMaxFailureWaitSeconds(rep.getMaxFailureWaitSeconds()); if (rep.getMinimumQuickLoginWaitSeconds() != null) realm.setMinimumQuickLoginWaitSeconds(rep.getMinimumQuickLoginWaitSeconds()); if (rep.getWaitIncrementSeconds() != null) realm.setWaitIncrementSeconds(rep.getWaitIncrementSeconds()); if (rep.getQuickLoginCheckMilliSeconds() != null) realm.setQuickLoginCheckMilliSeconds(rep.getQuickLoginCheckMilliSeconds()); if (rep.getMaxDeltaTimeSeconds() != null) realm.setMaxDeltaTimeSeconds(rep.getMaxDeltaTimeSeconds()); if (rep.getFailureFactor() != null) realm.setFailureFactor(rep.getFailureFactor()); if (rep.isRegistrationAllowed() != null) realm.setRegistrationAllowed(rep.isRegistrationAllowed()); if (rep.isRegistrationEmailAsUsername() != null) realm.setRegistrationEmailAsUsername(rep.isRegistrationEmailAsUsername()); if (rep.isRememberMe() != null) realm.setRememberMe(rep.isRememberMe()); if (rep.isVerifyEmail() != null) realm.setVerifyEmail(rep.isVerifyEmail()); if (rep.isResetPasswordAllowed() != null) realm.setResetPasswordAllowed(rep.isResetPasswordAllowed()); if (rep.isEditUsernameAllowed() != null) realm.setEditUsernameAllowed(rep.isEditUsernameAllowed()); if (rep.getSslRequired() != null) realm.setSslRequired(SslRequired.valueOf(rep.getSslRequired().toUpperCase())); if (rep.getAccessCodeLifespan() != null) realm.setAccessCodeLifespan(rep.getAccessCodeLifespan()); if (rep.getAccessCodeLifespanUserAction() != null) realm.setAccessCodeLifespanUserAction(rep.getAccessCodeLifespanUserAction()); if (rep.getAccessCodeLifespanLogin() != null) realm.setAccessCodeLifespanLogin(rep.getAccessCodeLifespanLogin()); if (rep.getNotBefore() != null) realm.setNotBefore(rep.getNotBefore()); if (rep.getAccessTokenLifespan() != null) realm.setAccessTokenLifespan(rep.getAccessTokenLifespan()); if (rep.getSsoSessionIdleTimeout() != null) realm.setSsoSessionIdleTimeout(rep.getSsoSessionIdleTimeout()); if (rep.getSsoSessionMaxLifespan() != null) realm.setSsoSessionMaxLifespan(rep.getSsoSessionMaxLifespan()); if (rep.getRequiredCredentials() != null) { realm.updateRequiredCredentials(rep.getRequiredCredentials()); } if (rep.getLoginTheme() != null) realm.setLoginTheme(rep.getLoginTheme()); if (rep.getAccountTheme() != null) realm.setAccountTheme(rep.getAccountTheme()); if (rep.getAdminTheme() != null) realm.setAdminTheme(rep.getAdminTheme()); if (rep.getEmailTheme() != null) realm.setEmailTheme(rep.getEmailTheme()); if (rep.isEventsEnabled() != null) realm.setEventsEnabled(rep.isEventsEnabled()); if (rep.getEventsExpiration() != null) realm.setEventsExpiration(rep.getEventsExpiration()); if (rep.getEventsListeners() != null) realm.setEventsListeners(new HashSet<>(rep.getEventsListeners())); if (rep.getEnabledEventTypes() != null) realm.setEnabledEventTypes(new HashSet<>(rep.getEnabledEventTypes())); if (rep.isAdminEventsEnabled() != null) realm.setAdminEventsEnabled(rep.isAdminEventsEnabled()); if (rep.isAdminEventsDetailsEnabled() != null) realm.setAdminEventsDetailsEnabled(rep.isAdminEventsDetailsEnabled()); if (rep.getPasswordPolicy() != null) realm.setPasswordPolicy(new PasswordPolicy(rep.getPasswordPolicy())); if (rep.getDefaultRoles() != null) { realm.updateDefaultRoles(rep.getDefaultRoles().toArray(new String[rep.getDefaultRoles().size()])); } if (rep.getSmtpServer() != null) { realm.setSmtpConfig(new HashMap(rep.getSmtpServer())); } if (rep.getBrowserSecurityHeaders() != null) { realm.setBrowserSecurityHeaders(rep.getBrowserSecurityHeaders()); } if (rep.getUserFederationProviders() != null) { List<UserFederationProviderModel> providerModels = convertFederationProviders(rep.getUserFederationProviders()); realm.setUserFederationProviders(providerModels); } if ("GENERATE".equals(rep.getPublicKey())) { KeycloakModelUtils.generateRealmKeys(realm); } if(rep.isInternationalizationEnabled() != null){ realm.setInternationalizationEnabled(rep.isInternationalizationEnabled()); } if(rep.getSupportedLocales() != null){ realm.setSupportedLocales(new HashSet<String>(rep.getSupportedLocales())); } if(rep.getDefaultLocale() != null){ realm.setDefaultLocale(rep.getDefaultLocale()); } } // Basic realm stuff public static void addRequiredCredential(RealmModel newRealm, String requiredCred) { newRealm.addRequiredCredential(requiredCred); } private static List<UserFederationProviderModel> convertFederationProviders(List<UserFederationProviderRepresentation> providers) { List<UserFederationProviderModel> result = new ArrayList<UserFederationProviderModel>(); for (UserFederationProviderRepresentation representation : providers) { UserFederationProviderModel model = new UserFederationProviderModel(representation.getId(), representation.getProviderName(), representation.getConfig(), representation.getPriority(), representation.getDisplayName(), representation.getFullSyncPeriod(), representation.getChangedSyncPeriod(), representation.getLastSync()); result.add(model); } return result; } public static UserFederationMapperModel toModel(RealmModel realm, UserFederationMapperRepresentation rep) { UserFederationMapperModel model = new UserFederationMapperModel(); model.setId(rep.getId()); model.setName(rep.getName()); model.setFederationMapperType(rep.getFederationMapperType()); model.setConfig(rep.getConfig()); UserFederationProviderModel fedProvider = KeycloakModelUtils.findUserFederationProviderByDisplayName(rep.getFederationProviderDisplayName(), realm); if (fedProvider == null) { throw new ModelException("Couldn't find federation provider with display name [" + rep.getFederationProviderDisplayName() + "] referenced from mapper [" + rep.getName()); } model.setFederationProviderId(fedProvider.getId()); return model; } // Roles public static void createRole(RealmModel newRealm, RoleRepresentation roleRep) { RoleModel role = roleRep.getId()!=null ? newRealm.addRole(roleRep.getId(), roleRep.getName()) : newRealm.addRole(roleRep.getName()); if (roleRep.getDescription() != null) role.setDescription(roleRep.getDescription()); } private static void addComposites(RoleModel role, RoleRepresentation roleRep, RealmModel realm) { if (roleRep.getComposites() == null) return; if (roleRep.getComposites().getRealm() != null) { for (String roleStr : roleRep.getComposites().getRealm()) { RoleModel realmRole = realm.getRole(roleStr); if (realmRole == null) throw new RuntimeException("Unable to find composite realm role: " + roleStr); role.addCompositeRole(realmRole); } } if (roleRep.getComposites().getClient() != null) { for (Map.Entry<String, List<String>> entry : roleRep.getComposites().getClient().entrySet()) { ClientModel client = realm.getClientByClientId(entry.getKey()); if (client == null) { throw new RuntimeException("App doesn't exist in role definitions: " + roleRep.getName()); } for (String roleStr : entry.getValue()) { RoleModel clientRole = client.getRole(roleStr); if (clientRole == null) throw new RuntimeException("Unable to find composite client role: " + roleStr); role.addCompositeRole(clientRole); } } } } // CLIENTS private static Map<String, ClientModel> createClients(KeycloakSession session, RealmRepresentation rep, RealmModel realm) { Map<String, ClientModel> appMap = new HashMap<String, ClientModel>(); for (ClientRepresentation resourceRep : rep.getClients()) { ClientModel app = createClient(session, realm, resourceRep, false); appMap.put(app.getClientId(), app); } return appMap; } /** * Does not create scope or role mappings! * * @param realm * @param resourceRep * @return */ public static ClientModel createClient(KeycloakSession session, RealmModel realm, ClientRepresentation resourceRep, boolean addDefaultRoles) { logger.debug("Create client: {0}" + resourceRep.getClientId()); ClientModel client = resourceRep.getId()!=null ? realm.addClient(resourceRep.getId(), resourceRep.getClientId()) : realm.addClient(resourceRep.getClientId()); if (resourceRep.getName() != null) client.setName(resourceRep.getName()); if (resourceRep.isEnabled() != null) client.setEnabled(resourceRep.isEnabled()); client.setManagementUrl(resourceRep.getAdminUrl()); if (resourceRep.isSurrogateAuthRequired() != null) client.setSurrogateAuthRequired(resourceRep.isSurrogateAuthRequired()); if (resourceRep.getBaseUrl() != null) client.setBaseUrl(resourceRep.getBaseUrl()); if (resourceRep.isBearerOnly() != null) client.setBearerOnly(resourceRep.isBearerOnly()); if (resourceRep.isConsentRequired() != null) client.setConsentRequired(resourceRep.isConsentRequired()); if (resourceRep.isDirectGrantsOnly() != null) client.setDirectGrantsOnly(resourceRep.isDirectGrantsOnly()); if (resourceRep.isPublicClient() != null) client.setPublicClient(resourceRep.isPublicClient()); if (resourceRep.isFrontchannelLogout() != null) client.setFrontchannelLogout(resourceRep.isFrontchannelLogout()); if (resourceRep.getProtocol() != null) client.setProtocol(resourceRep.getProtocol()); if (resourceRep.isFullScopeAllowed() != null) { client.setFullScopeAllowed(resourceRep.isFullScopeAllowed()); } else { client.setFullScopeAllowed(!client.isConsentRequired()); } if (resourceRep.getNodeReRegistrationTimeout() != null) { client.setNodeReRegistrationTimeout(resourceRep.getNodeReRegistrationTimeout()); } else { client.setNodeReRegistrationTimeout(-1); } client.updateClient(); if (resourceRep.getNotBefore() != null) { client.setNotBefore(resourceRep.getNotBefore()); } client.setSecret(resourceRep.getSecret()); if (client.getSecret() == null) { KeycloakModelUtils.generateSecret(client); } if (resourceRep.getAttributes() != null) { for (Map.Entry<String, String> entry : resourceRep.getAttributes().entrySet()) { client.setAttribute(entry.getKey(), entry.getValue()); } } if (resourceRep.getRedirectUris() != null) { for (String redirectUri : resourceRep.getRedirectUris()) { client.addRedirectUri(redirectUri); } } if (resourceRep.getWebOrigins() != null) { for (String webOrigin : resourceRep.getWebOrigins()) { logger.debugv("Client: {0} webOrigin: {1}", resourceRep.getClientId(), webOrigin); client.addWebOrigin(webOrigin); } } else { // add origins from redirect uris if (resourceRep.getRedirectUris() != null) { Set<String> origins = new HashSet<String>(); for (String redirectUri : resourceRep.getRedirectUris()) { logger.debugv("add redirect-uri to origin: {0}", redirectUri); if (redirectUri.startsWith("http")) { String origin = UriUtils.getOrigin(redirectUri); logger.debugv("adding default client origin: {0}" , origin); origins.add(origin); } } if (origins.size() > 0) { client.setWebOrigins(origins); } } } if (resourceRep.getRegisteredNodes() != null) { for (Map.Entry<String, Integer> entry : resourceRep.getRegisteredNodes().entrySet()) { client.registerNode(entry.getKey(), entry.getValue()); } } if (addDefaultRoles && resourceRep.getDefaultRoles() != null) { client.updateDefaultRoles(resourceRep.getDefaultRoles()); } if (resourceRep.getProtocolMappers() != null) { // first, remove all default/built in mappers Set<ProtocolMapperModel> mappers = client.getProtocolMappers(); for (ProtocolMapperModel mapper : mappers) client.removeProtocolMapper(mapper); for (ProtocolMapperRepresentation mapper : resourceRep.getProtocolMappers()) { client.addProtocolMapper(toModel(mapper)); } } return client; } public static void updateClient(ClientRepresentation rep, ClientModel resource) { if (rep.getClientId() != null) resource.setClientId(rep.getClientId()); if (rep.getName() != null) resource.setName(rep.getName()); if (rep.isEnabled() != null) resource.setEnabled(rep.isEnabled()); if (rep.isBearerOnly() != null) resource.setBearerOnly(rep.isBearerOnly()); if (rep.isConsentRequired() != null) resource.setConsentRequired(rep.isConsentRequired()); if (rep.isDirectGrantsOnly() != null) resource.setDirectGrantsOnly(rep.isDirectGrantsOnly()); if (rep.isPublicClient() != null) resource.setPublicClient(rep.isPublicClient()); if (rep.isFullScopeAllowed() != null) resource.setFullScopeAllowed(rep.isFullScopeAllowed()); if (rep.isFrontchannelLogout() != null) resource.setFrontchannelLogout(rep.isFrontchannelLogout()); if (rep.getAdminUrl() != null) resource.setManagementUrl(rep.getAdminUrl()); if (rep.getBaseUrl() != null) resource.setBaseUrl(rep.getBaseUrl()); if (rep.isSurrogateAuthRequired() != null) resource.setSurrogateAuthRequired(rep.isSurrogateAuthRequired()); if (rep.getNodeReRegistrationTimeout() != null) resource.setNodeReRegistrationTimeout(rep.getNodeReRegistrationTimeout()); resource.updateClient(); if (rep.getProtocol() != null) resource.setProtocol(rep.getProtocol()); if (rep.getAttributes() != null) { for (Map.Entry<String, String> entry : rep.getAttributes().entrySet()) { resource.setAttribute(entry.getKey(), entry.getValue()); } } if (rep.getNotBefore() != null) { resource.setNotBefore(rep.getNotBefore()); } if (rep.getDefaultRoles() != null) { resource.updateDefaultRoles(rep.getDefaultRoles()); } List<String> redirectUris = rep.getRedirectUris(); if (redirectUris != null) { resource.setRedirectUris(new HashSet<String>(redirectUris)); } List<String> webOrigins = rep.getWebOrigins(); if (webOrigins != null) { resource.setWebOrigins(new HashSet<String>(webOrigins)); } if (rep.getRegisteredNodes() != null) { for (Map.Entry<String, Integer> entry : rep.getRegisteredNodes().entrySet()) { resource.registerNode(entry.getKey(), entry.getValue()); } } } public static long getClaimsMask(ClaimRepresentation rep) { long mask = ClaimMask.ALL; if (rep.getAddress()) { mask |= ClaimMask.ADDRESS; } else { mask &= ~ClaimMask.ADDRESS; } if (rep.getEmail()) { mask |= ClaimMask.EMAIL; } else { mask &= ~ClaimMask.EMAIL; } if (rep.getGender()) { mask |= ClaimMask.GENDER; } else { mask &= ~ClaimMask.GENDER; } if (rep.getLocale()) { mask |= ClaimMask.LOCALE; } else { mask &= ~ClaimMask.LOCALE; } if (rep.getName()) { mask |= ClaimMask.NAME; } else { mask &= ~ClaimMask.NAME; } if (rep.getPhone()) { mask |= ClaimMask.PHONE; } else { mask &= ~ClaimMask.PHONE; } if (rep.getPicture()) { mask |= ClaimMask.PICTURE; } else { mask &= ~ClaimMask.PICTURE; } if (rep.getProfile()) { mask |= ClaimMask.PROFILE; } else { mask &= ~ClaimMask.PROFILE; } if (rep.getUsername()) { mask |= ClaimMask.USERNAME; } else { mask &= ~ClaimMask.USERNAME; } if (rep.getWebsite()) { mask |= ClaimMask.WEBSITE; } else { mask &= ~ClaimMask.WEBSITE; } return mask; } // Scope mappings public static void createClientScopeMappings(RealmModel realm, ClientModel clientModel, List<ScopeMappingRepresentation> mappings) { for (ScopeMappingRepresentation mapping : mappings) { ClientModel client = realm.getClientByClientId(mapping.getClient()); if (client == null) { throw new RuntimeException("Unknown client specified in client scope mappings"); } for (String roleString : mapping.getRoles()) { RoleModel role = clientModel.getRole(roleString.trim()); if (role == null) { role = clientModel.addRole(roleString.trim()); } client.addScopeMapping(role); } } } // Users public static UserModel createUser(KeycloakSession session, RealmModel newRealm, UserRepresentation userRep, Map<String, ClientModel> clientMap) { convertDeprecatedSocialProviders(userRep); // Import users just to user storage. Don't federate UserModel user = session.userStorage().addUser(newRealm, userRep.getId(), userRep.getUsername(), false, false); user.setEnabled(userRep.isEnabled()); user.setCreatedTimestamp(userRep.getCreatedTimestamp()); user.setEmail(userRep.getEmail()); user.setEmailVerified(userRep.isEmailVerified()); user.setFirstName(userRep.getFirstName()); user.setLastName(userRep.getLastName()); user.setFederationLink(userRep.getFederationLink()); user.setTotp(userRep.isTotp()); if (userRep.getAttributes() != null) { for (Map.Entry<String, Object> entry : userRep.getAttributes().entrySet()) { Object value = entry.getValue(); if (value instanceof Collection) { Collection<String> colVal = (Collection<String>) value; user.setAttribute(entry.getKey(), new ArrayList<>(colVal)); } else if (value instanceof String) { // TODO: This is here just for backwards compatibility with KC 1.3 and earlier String stringVal = (String) value; user.setSingleAttribute(entry.getKey(), stringVal); } } } if (userRep.getRequiredActions() != null) { for (String requiredAction : userRep.getRequiredActions()) { user.addRequiredAction(UserModel.RequiredAction.valueOf(requiredAction)); } } if (userRep.getCredentials() != null) { for (CredentialRepresentation cred : userRep.getCredentials()) { updateCredential(user, cred); } } if (userRep.getFederatedIdentities() != null) { for (FederatedIdentityRepresentation identity : userRep.getFederatedIdentities()) { FederatedIdentityModel mappingModel = new FederatedIdentityModel(identity.getIdentityProvider(), identity.getUserId(), identity.getUserName()); session.users().addFederatedIdentity(newRealm, user, mappingModel); } } if (userRep.getRealmRoles() != null) { for (String roleString : userRep.getRealmRoles()) { RoleModel role = newRealm.getRole(roleString.trim()); if (role == null) { role = newRealm.addRole(roleString.trim()); } user.grantRole(role); } } if (userRep.getClientRoles() != null) { for (Map.Entry<String, List<String>> entry : userRep.getClientRoles().entrySet()) { ClientModel client = clientMap.get(entry.getKey()); if (client == null) { throw new RuntimeException("Unable to find client role mappings for client: " + entry.getKey()); } createClientRoleMappings(client, user, entry.getValue()); } } if (userRep.getClientConsents() != null) { for (UserConsentRepresentation consentRep : userRep.getClientConsents()) { UserConsentModel consentModel = toModel(newRealm, consentRep); user.addConsent(consentModel); } } return user; } // Detect if it is "plain-text" or "hashed" representation and update model according to it private static void updateCredential(UserModel user, CredentialRepresentation cred) { if (cred.getValue() != null) { UserCredentialModel plainTextCred = convertCredential(cred); user.updateCredential(plainTextCred); } else { UserCredentialValueModel hashedCred = new UserCredentialValueModel(); hashedCred.setType(cred.getType()); hashedCred.setDevice(cred.getDevice()); hashedCred.setHashIterations(cred.getHashIterations()); try { if (cred.getSalt() != null) hashedCred.setSalt(Base64.decode(cred.getSalt())); } catch (IOException ioe) { throw new RuntimeException(ioe); } hashedCred.setValue(cred.getHashedSaltedValue()); user.updateCredentialDirectly(hashedCred); } } public static UserCredentialModel convertCredential(CredentialRepresentation cred) { UserCredentialModel credential = new UserCredentialModel(); credential.setType(cred.getType()); credential.setValue(cred.getValue()); return credential; } // Role mappings public static void createClientRoleMappings(ClientModel clientModel, UserModel user, List<String> roleNames) { if (user == null) { throw new RuntimeException("User not found"); } for (String roleName : roleNames) { RoleModel role = clientModel.getRole(roleName.trim()); if (role == null) { role = clientModel.addRole(roleName.trim()); } user.grantRole(role); } } private static void importIdentityProviders(RealmRepresentation rep, RealmModel newRealm) { if (rep.getIdentityProviders() != null) { for (IdentityProviderRepresentation representation : rep.getIdentityProviders()) { newRealm.addIdentityProvider(toModel(representation)); } } } private static void importIdentityProviderMappers(RealmRepresentation rep, RealmModel newRealm) { if (rep.getIdentityProviderMappers() != null) { for (IdentityProviderMapperRepresentation representation : rep.getIdentityProviderMappers()) { newRealm.addIdentityProviderMapper(toModel(representation)); } } } public static IdentityProviderModel toModel(IdentityProviderRepresentation representation) { IdentityProviderModel identityProviderModel = new IdentityProviderModel(); identityProviderModel.setInternalId(representation.getInternalId()); identityProviderModel.setAlias(representation.getAlias()); identityProviderModel.setProviderId(representation.getProviderId()); identityProviderModel.setEnabled(representation.isEnabled()); identityProviderModel.setUpdateProfileFirstLoginMode(representation.getUpdateProfileFirstLoginMode()); identityProviderModel.setTrustEmail(representation.isTrustEmail()); identityProviderModel.setAuthenticateByDefault(representation.isAuthenticateByDefault()); identityProviderModel.setStoreToken(representation.isStoreToken()); identityProviderModel.setAddReadTokenRoleOnCreate(representation.isAddReadTokenRoleOnCreate()); identityProviderModel.setConfig(representation.getConfig()); return identityProviderModel; } public static ProtocolMapperModel toModel(ProtocolMapperRepresentation rep) { ProtocolMapperModel model = new ProtocolMapperModel(); model.setId(rep.getId()); model.setName(rep.getName()); model.setConsentRequired(rep.isConsentRequired()); model.setConsentText(rep.getConsentText()); model.setProtocol(rep.getProtocol()); model.setProtocolMapper(rep.getProtocolMapper()); model.setConfig(rep.getConfig()); return model; } public static IdentityProviderMapperModel toModel(IdentityProviderMapperRepresentation rep) { IdentityProviderMapperModel model = new IdentityProviderMapperModel(); model.setId(rep.getId()); model.setName(rep.getName()); model.setIdentityProviderAlias(rep.getIdentityProviderAlias()); model.setIdentityProviderMapper(rep.getIdentityProviderMapper()); model.setConfig(rep.getConfig()); return model; } public static UserConsentModel toModel(RealmModel newRealm, UserConsentRepresentation consentRep) { ClientModel client = newRealm.getClientByClientId(consentRep.getClientId()); if (client == null) { throw new RuntimeException("Unable to find client consent mappings for client: " + consentRep.getClientId()); } UserConsentModel consentModel = new UserConsentModel(client); if (consentRep.getGrantedRealmRoles() != null) { for (String roleName : consentRep.getGrantedRealmRoles()) { RoleModel role = newRealm.getRole(roleName); if (role == null) { throw new RuntimeException("Unable to find realm role referenced in consent mappings of user. Role name: " + roleName); } consentModel.addGrantedRole(role); } } if (consentRep.getGrantedClientRoles() != null) { for (Map.Entry<String, List<String>> entry : consentRep.getGrantedClientRoles().entrySet()) { String clientId2 = entry.getKey(); ClientModel client2 = newRealm.getClientByClientId(clientId2); if (client2 == null) { throw new RuntimeException("Unable to find client referenced in consent mappings. Client ID: " + clientId2); } for (String clientRoleName : entry.getValue()) { RoleModel clientRole = client2.getRole(clientRoleName); if (clientRole == null) { throw new RuntimeException("Unable to find client role referenced in consent mappings of user. Role name: " + clientRole + ", Client: " + clientId2); } consentModel.addGrantedRole(clientRole); } } } if (consentRep.getGrantedProtocolMappers() != null) { for (Map.Entry<String, List<String>> protocolEntry : consentRep.getGrantedProtocolMappers().entrySet()) { String protocol = protocolEntry.getKey(); for (String protocolMapperName : protocolEntry.getValue()) { ProtocolMapperModel protocolMapper = client.getProtocolMapperByName(protocol, protocolMapperName); if (protocolMapper == null) { throw new RuntimeException("Unable to find protocol mapper for protocol " + protocol + ", mapper name " + protocolMapperName); } consentModel.addGrantedProtocolMapper(protocolMapper); } } } return consentModel; } public static AuthenticationFlowModel toModel(AuthenticationFlowRepresentation rep) { AuthenticationFlowModel model = new AuthenticationFlowModel(); model.setBuiltIn(rep.isBuiltIn()); model.setTopLevel(rep.isTopLevel()); model.setProviderId(rep.getProviderId()); model.setAlias(rep.getAlias()); model.setDescription(rep.getDescription()); return model; } public static AuthenticationExecutionModel toModel(RealmModel realm, AuthenticationExecutionRepresentation rep) { AuthenticationExecutionModel model = new AuthenticationExecutionModel(); if (rep.getAuthenticatorConfig() != null) { AuthenticatorConfigModel config = realm.getAuthenticatorConfigByAlias(rep.getAuthenticatorConfig()); model.setAuthenticatorConfig(config.getId()); } model.setAuthenticator(rep.getAuthenticator()); model.setAutheticatorFlow(rep.isAutheticatorFlow()); if (rep.getFlowAlias() != null) { AuthenticationFlowModel flow = realm.getFlowByAlias(rep.getFlowAlias()); model.setFlowId(flow.getId()); } model.setPriority(rep.getPriority()); model.setUserSetupAllowed(rep.isUserSetupAllowed()); model.setRequirement(AuthenticationExecutionModel.Requirement.valueOf(rep.getRequirement())); return model; } public static AuthenticatorConfigModel toModel(AuthenticatorConfigRepresentation rep) { AuthenticatorConfigModel model = new AuthenticatorConfigModel(); model.setAlias(rep.getAlias()); model.setConfig(rep.getConfig()); return model; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.tier.sockets.command; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.geode.InvalidDeltaException; import org.apache.geode.annotations.Immutable; import org.apache.geode.cache.DynamicRegionFactory; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.cache.ResourceException; import org.apache.geode.cache.operations.PutOperationContext; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.internal.cache.EventID; import org.apache.geode.internal.cache.EventIDHolder; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.tier.CachedRegionHelper; import org.apache.geode.internal.cache.tier.Command; import org.apache.geode.internal.cache.tier.MessageType; import org.apache.geode.internal.cache.tier.sockets.BaseCommand; import org.apache.geode.internal.cache.tier.sockets.CacheServerStats; import org.apache.geode.internal.cache.tier.sockets.Message; import org.apache.geode.internal.cache.tier.sockets.Part; import org.apache.geode.internal.cache.tier.sockets.ServerConnection; import org.apache.geode.internal.security.AuthorizeRequest; import org.apache.geode.internal.security.SecurityService; import org.apache.geode.security.GemFireSecurityException; import org.apache.geode.security.ResourcePermission.Operation; import org.apache.geode.security.ResourcePermission.Resource; /** * @since GemFire 6.1 */ public class Put61 extends BaseCommand { @Immutable private static final Put61 singleton = new Put61(); public static Command getCommand() { return singleton; } @Override public void cmdExecute(final Message clientMessage, final ServerConnection serverConnection, final SecurityService securityService, long p_start) throws IOException, InterruptedException { long start = p_start; Part regionNamePart = null, keyPart = null, valuePart = null, callbackArgPart = null; String regionName = null; Object callbackArg = null, key = null; Part eventPart = null; StringBuilder errMessage = new StringBuilder(); boolean isDelta = false; CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper(); CacheServerStats stats = serverConnection.getCacheServerStats(); // requiresResponse = true; serverConnection.setAsTrue(REQUIRES_RESPONSE); { long oldStart = start; start = DistributionStats.getStatTime(); stats.incReadPutRequestTime(start - oldStart); } // Retrieve the data from the message parts regionNamePart = clientMessage.getPart(0); keyPart = clientMessage.getPart(1); try { isDelta = (Boolean) clientMessage.getPart(2).getObject(); } catch (Exception e) { writeException(clientMessage, MessageType.PUT_DELTA_ERROR, e, false, serverConnection); serverConnection.setAsTrue(RESPONDED); // CachePerfStats not available here. return; } valuePart = clientMessage.getPart(3); eventPart = clientMessage.getPart(4); if (clientMessage.getNumberOfParts() > 5) { callbackArgPart = clientMessage.getPart(5); try { callbackArg = callbackArgPart.getObject(); } catch (Exception e) { writeException(clientMessage, e, false, serverConnection); serverConnection.setAsTrue(RESPONDED); return; } } regionName = regionNamePart.getCachedString(); try { key = keyPart.getStringOrObject(); } catch (Exception e) { writeException(clientMessage, e, false, serverConnection); serverConnection.setAsTrue(RESPONDED); return; } final boolean isDebugEnabled = logger.isDebugEnabled(); if (isDebugEnabled) { logger.debug("{}: Received 6.1{}put request ({} bytes) from {} for region {} key {}", serverConnection.getName(), (isDelta ? " delta " : " "), clientMessage.getPayloadLength(), serverConnection.getSocketString(), regionName, key); } // Process the put request if (key == null || regionName == null) { if (key == null) { String putMsg = " The input key for the 6.1 put request is null"; if (isDebugEnabled) { logger.debug("{}:{}", serverConnection.getName(), putMsg); } errMessage.append(putMsg); } if (regionName == null) { String putMsg = " The input region name for the 6.1 put request is null"; if (isDebugEnabled) { logger.debug("{}:{}", serverConnection.getName(), putMsg); } errMessage.append(putMsg); } writeErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR, errMessage.toString(), serverConnection); serverConnection.setAsTrue(RESPONDED); return; } LocalRegion region = (LocalRegion) serverConnection.getCache().getRegion(regionName); if (region == null) { String reason = " was not found during 6.1 put request"; writeRegionDestroyedEx(clientMessage, regionName, reason, serverConnection); serverConnection.setAsTrue(RESPONDED); return; } if (valuePart.isNull() && region.containsKey(key)) { // Invalid to 'put' a null value in an existing key String putMsg = " Attempted to 6.1 put a null value for existing key " + key; if (isDebugEnabled) { logger.debug("{}:{}", serverConnection.getName(), putMsg); } errMessage.append(putMsg); writeErrorResponse(clientMessage, MessageType.PUT_DATA_ERROR, errMessage.toString(), serverConnection); serverConnection.setAsTrue(RESPONDED); return; } // try { // this.eventId = (EventID)eventPart.getObject(); ByteBuffer eventIdPartsBuffer = ByteBuffer.wrap(eventPart.getSerializedForm()); long threadId = EventID.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer); long sequenceId = EventID.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer); EventID eventId = new EventID(serverConnection.getEventMemberIDByteArray(), threadId, sequenceId); try { Object value = null; if (!isDelta) { value = valuePart.getSerializedForm(); } boolean isObject = valuePart.isObject(); boolean isMetaRegion = region.isUsedForMetaRegion(); clientMessage.setMetaRegion(isMetaRegion); securityService.authorize(Resource.DATA, Operation.WRITE, regionName, key); AuthorizeRequest authzRequest = null; if (!isMetaRegion) { authzRequest = serverConnection.getAuthzRequest(); } if (authzRequest != null) { if (DynamicRegionFactory.regionIsDynamicRegionList(regionName)) { authzRequest.createRegionAuthorize((String) key); } // Allow PUT operations on meta regions (bug #38961) else { PutOperationContext putContext = authzRequest.putAuthorize(regionName, key, value, isObject, callbackArg); value = putContext.getValue(); isObject = putContext.isObject(); callbackArg = putContext.getCallbackArg(); } } // If the value is 1 byte and the byte represents null, // attempt to create the entry. This test needs to be // moved to DataSerializer or DataSerializer.NULL needs // to be publicly accessible. boolean result = false; if (value == null && !isDelta) { // Create the null entry. Since the value is null, the value of the // isObject // the true after null doesn't matter and is not used. result = region.basicBridgeCreate(key, null, true, callbackArg, serverConnection.getProxyID(), true, new EventIDHolder(eventId), false); } else { // Put the entry byte[] delta = null; if (isDelta) { delta = valuePart.getSerializedForm(); } result = region.basicBridgePut(key, value, delta, isObject, callbackArg, serverConnection.getProxyID(), true, new EventIDHolder(eventId)); } if (result) { serverConnection.setModificationInfo(true, regionName, key); } else { String message = serverConnection.getName() + ": Failed to 6.1 put entry for region " + regionName + " key " + key + " value " + valuePart; if (isDebugEnabled) { logger.debug(message); } throw new Exception(message); } } catch (RegionDestroyedException rde) { writeException(clientMessage, rde, false, serverConnection); serverConnection.setAsTrue(RESPONDED); return; } catch (ResourceException re) { writeException(clientMessage, re, false, serverConnection); serverConnection.setAsTrue(RESPONDED); return; } catch (InvalidDeltaException ide) { logger.info("Error applying delta for key {} of region {}: {}", key, regionName, ide.getMessage()); writeException(clientMessage, MessageType.PUT_DELTA_ERROR, ide, false, serverConnection); serverConnection.setAsTrue(RESPONDED); region.getCachePerfStats().incDeltaFullValuesRequested(); return; } catch (Exception ce) { // If an interrupted exception is thrown , rethrow it checkForInterrupt(serverConnection, ce); // If an exception occurs during the put, preserve the connection writeException(clientMessage, ce, false, serverConnection); serverConnection.setAsTrue(RESPONDED); if (ce instanceof GemFireSecurityException) { // Fine logging for security exceptions since these are already // logged by the security logger if (isDebugEnabled) { logger.debug("{}: Unexpected Security exception", serverConnection.getName(), ce); } } else if (isDebugEnabled) { logger.debug("{}: Unexpected Exception", serverConnection.getName(), ce); } return; } finally { long oldStart = start; start = DistributionStats.getStatTime(); stats.incProcessPutTime(start - oldStart); } // Increment statistics and write the reply if (region instanceof PartitionedRegion) { PartitionedRegion pr = (PartitionedRegion) region; if (pr.getNetworkHopType() != PartitionedRegion.NETWORK_HOP_NONE) { writeReplyWithRefreshMetadata(clientMessage, serverConnection, pr, pr.getNetworkHopType()); pr.clearNetworkHopData(); } else { writeReply(clientMessage, serverConnection); } } else { writeReply(clientMessage, serverConnection); } serverConnection.setAsTrue(RESPONDED); if (isDebugEnabled) { logger.debug("{}: Sent 6.1 put response back to {} for region {} key {} value {}", serverConnection.getName(), serverConnection.getSocketString(), regionName, key, valuePart); } stats.incWritePutResponseTime(DistributionStats.getStatTime() - start); } }
/* * Copyright 2013 Mozilla Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mozilla.bagheera.http; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.CREATED; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.METHOD_NOT_ALLOWED; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.OK; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; import java.net.InetSocketAddress; import java.net.URI; import java.nio.channels.ClosedChannelException; import java.util.List; import org.apache.log4j.Logger; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.ChannelGroup; import org.jboss.netty.handler.codec.frame.TooLongFrameException; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.util.CharsetUtil; import com.google.protobuf.ByteString; import com.mozilla.bagheera.BagheeraProto.BagheeraMessage; import com.mozilla.bagheera.BagheeraProto.BagheeraMessage.Operation; import com.mozilla.bagheera.metrics.MetricsManager; import com.mozilla.bagheera.producer.Producer; import com.mozilla.bagheera.util.HttpUtil; import com.mozilla.bagheera.validation.Validator; public class SubmissionHandler extends SimpleChannelUpstreamHandler { public static final String HEADER_OBSOLETE_DOCUMENT = "X-Obsolete-Document"; private static final Logger LOG = Logger.getLogger(SubmissionHandler.class); // REST endpoints public static final String ENDPOINT_SUBMIT = "submit"; private final Producer producer; private final ChannelGroup channelGroup; private final MetricsManager metricsManager; public SubmissionHandler(Validator validator, Producer producer, ChannelGroup channelGroup, MetricsManager metricsManager) { this.producer = producer; this.channelGroup = channelGroup; this.metricsManager = metricsManager; } private void updateRequestMetrics(String namespace, String method, int size) { this.metricsManager.getHttpMetricForNamespace(namespace).updateRequestMetrics(method, size); this.metricsManager.getGlobalHttpMetric().updateRequestMetrics(method, size); } private void updateResponseMetrics(String namespace, int status) { if (namespace != null) { this.metricsManager.getHttpMetricForNamespace(namespace).updateResponseMetrics(status); } this.metricsManager.getGlobalHttpMetric().updateResponseMetrics(status); } private void handlePost(MessageEvent e, BagheeraHttpRequest request) { HttpResponseStatus status = BAD_REQUEST; ChannelBuffer content = request.getContent(); String remoteIpAddress = HttpUtil.getRemoteAddr(request, ((InetSocketAddress)e.getChannel().getRemoteAddress()).getAddress().getHostAddress()); if (content.readable() && content.readableBytes() > 0) { BagheeraMessage.Builder templateBuilder = BagheeraMessage.newBuilder(); setMessageFields(request, e, templateBuilder, System.currentTimeMillis(), false); BagheeraMessage template = templateBuilder.buildPartial(); BagheeraMessage.Builder storeBuilder = BagheeraMessage.newBuilder(template); storeBuilder.setPayload(ByteString.copyFrom(content.toByteBuffer())); storeBuilder.setId(request.getId()); producer.send(storeBuilder.build()); if (request.containsHeader(HEADER_OBSOLETE_DOCUMENT)) { handleObsoleteDocuments(request,remoteIpAddress,request.getHeaders(HEADER_OBSOLETE_DOCUMENT), template); } else { LOG.info("IP "+remoteIpAddress+" "+request.getNamespace()+" HTTP_PUT "+request.getId()); } status = CREATED; } updateRequestMetrics(request.getNamespace(), request.getMethod().getName(), content.readableBytes()); writeResponse(status, e, request.getNamespace(), URI.create(request.getId()).toString()); } protected void setMessageFields(BagheeraHttpRequest request, MessageEvent event, BagheeraMessage.Builder builder, long timestamp, boolean setId) { builder.setNamespace(request.getNamespace()); if (request.getApiVersion() != null) { builder.setApiVersion(request.getApiVersion()); } List<String> partitions = request.getPartitions(); if (partitions != null && !partitions.isEmpty()) { builder.addAllPartition(partitions); } builder.setIpAddr(ByteString.copyFrom(HttpUtil.getRemoteAddr(request, ((InetSocketAddress)event.getChannel().getRemoteAddress()).getAddress()))); builder.setTimestamp(timestamp); if (setId) { builder.setId(request.getId()); } } private void handleObsoleteDocuments(BagheeraHttpRequest request, String remoteIpAddress,List<String> headers, BagheeraMessage template) { // According to RFC 2616, the standard for multi-valued document headers is // a comma-separated list: // http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 // ------------------------------------------------------------------ // Multiple message-header fields with the same field-name MAY be // present in a message if and only if the entire field-value for // that header field is defined as a comma-separated list // [i.e., #(values)]. It MUST be possible to combine the multiple // header fields into one "field-name: field-value" pair, without // changing the semantics of the message, by appending each // subsequent field-value to the first, each separated by a comma. // The order in which header fields with the same field-name are // received is therefore significant to the interpretation of the // combined field value, and thus a proxy MUST NOT change the order // of these field values when a message is forwarded. // ------------------------------------------------------------------ String deleteIDs = ""; for (String header : headers) { // Split on comma, delete each one. // The performance penalty for supporting multiple values is // tested in BagheeraHttpRequestTest.testSplitPerformance(). if (header != null) { for (String obsoleteIdRaw : header.split(",")) { deleteIDs += obsoleteIdRaw.trim()+","; // Use the given message as a base for creating each delete message. BagheeraMessage.Builder deleteBuilder = BagheeraMessage.newBuilder(template); deleteBuilder.setOperation(Operation.DELETE); deleteBuilder.setId(obsoleteIdRaw.trim()); producer.send(deleteBuilder.build()); } } } LOG.info("IP "+remoteIpAddress+" "+request.getNamespace()+" HTTP_PUT "+request.getId()+" HTTP_DELETE "+deleteIDs); } private void handleDelete(MessageEvent e, BagheeraHttpRequest request) { BagheeraMessage.Builder bmsgBuilder = BagheeraMessage.newBuilder(); setMessageFields(request, e, bmsgBuilder, System.currentTimeMillis(), true); String remoteIpAddress = HttpUtil.getRemoteAddr(request, ((InetSocketAddress)e.getChannel().getRemoteAddress()).getAddress().getHostAddress()); LOG.info("IP "+remoteIpAddress+" "+request.getNamespace()+" HTTP_DELETE "+request.getId()); bmsgBuilder.setOperation(Operation.DELETE); producer.send(bmsgBuilder.build()); updateRequestMetrics(request.getNamespace(), request.getMethod().getName(), 0); writeResponse(OK, e, request.getNamespace(), null); } private void handleOptions(MessageEvent e, BagheeraHttpRequest request) { HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.addHeader("Access-Control-Allow-Origin", "*"); response.addHeader("Access-Control-Allow-Methods", "POST,PUT,DELETE"); response.addHeader("Access-Control-Allow-Headers", "X-Requested-With, Content-Type, Content-Length"); ChannelFuture future = e.getChannel().write(response); future.addListener(ChannelFutureListener.CLOSE); } private void writeResponse(HttpResponseStatus status, MessageEvent e, String namespace, String entity) { // Build the response object. HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status); response.addHeader(CONTENT_TYPE, "plain/text"); if (entity != null) { ChannelBuffer buf = ChannelBuffers.wrappedBuffer(entity.getBytes(CharsetUtil.UTF_8)); response.setContent(buf); response.setHeader(CONTENT_LENGTH, response.getContent().readableBytes()); } // Write response ChannelFuture future = e.getChannel().write(response); future.addListener(ChannelFutureListener.CLOSE); updateResponseMetrics(namespace, response.getStatus().getCode()); } @Override public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e) { this.channelGroup.add(e.getChannel()); } @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception { Object msg = e.getMessage(); if (msg instanceof BagheeraHttpRequest) { BagheeraHttpRequest request = (BagheeraHttpRequest)e.getMessage(); if (ENDPOINT_SUBMIT.equals(request.getEndpoint())) { if ((request.getMethod() == HttpMethod.POST || request.getMethod() == HttpMethod.PUT)) { handlePost(e, request); } else if (request.getMethod() == HttpMethod.GET) { writeResponse(METHOD_NOT_ALLOWED, e, request.getNamespace(), null); } else if (request.getMethod() == HttpMethod.DELETE) { handleDelete(e, request); } else if (request.getMethod() == HttpMethod.OPTIONS) { handleOptions(e,request); } } else { String remoteIpAddress = HttpUtil.getRemoteAddr(request, ((InetSocketAddress)e.getChannel().getRemoteAddress()).getAddress().getHostAddress()); LOG.warn(String.format("Tried to access invalid resource - \"%s\" \"%s\"", remoteIpAddress, request.getHeader("User-Agent"))); writeResponse(NOT_FOUND, e, null, null); } } else { writeResponse(INTERNAL_SERVER_ERROR, e, null, null); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { Throwable cause = e.getCause(); HttpResponse response = null; if (cause instanceof ClosedChannelException) { // NOOP } else if (cause instanceof TooLongFrameException) { response = new DefaultHttpResponse(HTTP_1_1, REQUEST_ENTITY_TOO_LARGE); } else if (cause instanceof InvalidPathException) { response = new DefaultHttpResponse(HTTP_1_1, NOT_FOUND); } else if (cause instanceof HttpSecurityException) { LOG.error(cause.getMessage()); response = new DefaultHttpResponse(HTTP_1_1, FORBIDDEN); } else { LOG.error(cause.getMessage()); response = new DefaultHttpResponse(HTTP_1_1, INTERNAL_SERVER_ERROR); } if (response != null) { ChannelFuture future = e.getChannel().write(response); future.addListener(ChannelFutureListener.CLOSE); updateResponseMetrics(null, response.getStatus().getCode()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ProtoBase; import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto; import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeActionProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProtoOrBuilder; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.SystemCredentialsForAppsProto; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.NodeAction; import org.apache.hadoop.yarn.server.api.records.impl.pb.MasterKeyPBImpl; public class NodeHeartbeatResponsePBImpl extends ProtoBase<NodeHeartbeatResponseProto> implements NodeHeartbeatResponse { NodeHeartbeatResponseProto proto = NodeHeartbeatResponseProto.getDefaultInstance(); NodeHeartbeatResponseProto.Builder builder = null; boolean viaProto = false; private List<ContainerId> containersToCleanup = null; private List<ContainerId> containersToBeRemovedFromNM = null; private List<ApplicationId> applicationsToCleanup = null; private Map<ApplicationId, ByteBuffer> systemCredentials = null; private MasterKey containerTokenMasterKey = null; private MasterKey nmTokenMasterKey = null; public NodeHeartbeatResponsePBImpl() { builder = NodeHeartbeatResponseProto.newBuilder(); } public NodeHeartbeatResponsePBImpl(NodeHeartbeatResponseProto proto) { this.proto = proto; viaProto = true; } public NodeHeartbeatResponseProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } private void mergeLocalToBuilder() { if (this.containersToCleanup != null) { addContainersToCleanupToProto(); } if (this.applicationsToCleanup != null) { addApplicationsToCleanupToProto(); } if (this.containersToBeRemovedFromNM != null) { addContainersToBeRemovedFromNMToProto(); } if (this.containerTokenMasterKey != null) { builder.setContainerTokenMasterKey( convertToProtoFormat(this.containerTokenMasterKey)); } if (this.nmTokenMasterKey != null) { builder.setNmTokenMasterKey( convertToProtoFormat(this.nmTokenMasterKey)); } if (this.systemCredentials != null) { addSystemCredentialsToProto(); } } private void addSystemCredentialsToProto() { maybeInitBuilder(); builder.clearSystemCredentialsForApps(); for (Map.Entry<ApplicationId, ByteBuffer> entry : systemCredentials.entrySet()) { builder.addSystemCredentialsForApps(SystemCredentialsForAppsProto.newBuilder() .setAppId(convertToProtoFormat(entry.getKey())) .setCredentialsForApp(ProtoUtils.convertToProtoFormat( entry.getValue().duplicate()))); } } private void mergeLocalToProto() { if (viaProto) maybeInitBuilder(); mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = NodeHeartbeatResponseProto.newBuilder(proto); } viaProto = false; } @Override public int getResponseId() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; return (p.getResponseId()); } @Override public void setResponseId(int responseId) { maybeInitBuilder(); builder.setResponseId((responseId)); } @Override public MasterKey getContainerTokenMasterKey() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; if (this.containerTokenMasterKey != null) { return this.containerTokenMasterKey; } if (!p.hasContainerTokenMasterKey()) { return null; } this.containerTokenMasterKey = convertFromProtoFormat(p.getContainerTokenMasterKey()); return this.containerTokenMasterKey; } @Override public void setContainerTokenMasterKey(MasterKey masterKey) { maybeInitBuilder(); if (masterKey == null) builder.clearContainerTokenMasterKey(); this.containerTokenMasterKey = masterKey; } @Override public MasterKey getNMTokenMasterKey() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; if (this.nmTokenMasterKey != null) { return this.nmTokenMasterKey; } if (!p.hasNmTokenMasterKey()) { return null; } this.nmTokenMasterKey = convertFromProtoFormat(p.getNmTokenMasterKey()); return this.nmTokenMasterKey; } @Override public void setNMTokenMasterKey(MasterKey masterKey) { maybeInitBuilder(); if (masterKey == null) builder.clearNmTokenMasterKey(); this.nmTokenMasterKey = masterKey; } @Override public NodeAction getNodeAction() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasNodeAction()) { return null; } return (convertFromProtoFormat(p.getNodeAction())); } @Override public void setNodeAction(NodeAction nodeAction) { maybeInitBuilder(); if (nodeAction == null) { builder.clearNodeAction(); return; } builder.setNodeAction(convertToProtoFormat(nodeAction)); } @Override public String getDiagnosticsMessage() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasDiagnosticsMessage()) { return null; } return p.getDiagnosticsMessage(); } @Override public void setDiagnosticsMessage(String diagnosticsMessage) { maybeInitBuilder(); if (diagnosticsMessage == null) { builder.clearDiagnosticsMessage(); return; } builder.setDiagnosticsMessage((diagnosticsMessage)); } @Override public List<ContainerId> getContainersToCleanup() { initContainersToCleanup(); return this.containersToCleanup; } @Override public List<ContainerId> getContainersToBeRemovedFromNM() { initContainersToBeRemovedFromNM(); return this.containersToBeRemovedFromNM; } private void initContainersToCleanup() { if (this.containersToCleanup != null) { return; } NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; List<ContainerIdProto> list = p.getContainersToCleanupList(); this.containersToCleanup = new ArrayList<ContainerId>(); for (ContainerIdProto c : list) { this.containersToCleanup.add(convertFromProtoFormat(c)); } } private void initContainersToBeRemovedFromNM() { if (this.containersToBeRemovedFromNM != null) { return; } NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; List<ContainerIdProto> list = p.getContainersToBeRemovedFromNmList(); this.containersToBeRemovedFromNM = new ArrayList<ContainerId>(); for (ContainerIdProto c : list) { this.containersToBeRemovedFromNM.add(convertFromProtoFormat(c)); } } @Override public void addAllContainersToCleanup( final List<ContainerId> containersToCleanup) { if (containersToCleanup == null) return; initContainersToCleanup(); this.containersToCleanup.addAll(containersToCleanup); } @Override public void addContainersToBeRemovedFromNM(final List<ContainerId> containers) { if (containers == null) return; initContainersToBeRemovedFromNM(); this.containersToBeRemovedFromNM.addAll(containers); } private void addContainersToCleanupToProto() { maybeInitBuilder(); builder.clearContainersToCleanup(); if (containersToCleanup == null) return; Iterable<ContainerIdProto> iterable = new Iterable<ContainerIdProto>() { @Override public Iterator<ContainerIdProto> iterator() { return new Iterator<ContainerIdProto>() { Iterator<ContainerId> iter = containersToCleanup.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public ContainerIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllContainersToCleanup(iterable); } private void addContainersToBeRemovedFromNMToProto() { maybeInitBuilder(); builder.clearContainersToBeRemovedFromNm(); if (containersToBeRemovedFromNM == null) return; Iterable<ContainerIdProto> iterable = new Iterable<ContainerIdProto>() { @Override public Iterator<ContainerIdProto> iterator() { return new Iterator<ContainerIdProto>() { Iterator<ContainerId> iter = containersToBeRemovedFromNM.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public ContainerIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllContainersToBeRemovedFromNm(iterable); } @Override public List<ApplicationId> getApplicationsToCleanup() { initApplicationsToCleanup(); return this.applicationsToCleanup; } private void initApplicationsToCleanup() { if (this.applicationsToCleanup != null) { return; } NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; List<ApplicationIdProto> list = p.getApplicationsToCleanupList(); this.applicationsToCleanup = new ArrayList<ApplicationId>(); for (ApplicationIdProto c : list) { this.applicationsToCleanup.add(convertFromProtoFormat(c)); } } @Override public void addAllApplicationsToCleanup( final List<ApplicationId> applicationsToCleanup) { if (applicationsToCleanup == null) return; initApplicationsToCleanup(); this.applicationsToCleanup.addAll(applicationsToCleanup); } private void addApplicationsToCleanupToProto() { maybeInitBuilder(); builder.clearApplicationsToCleanup(); if (applicationsToCleanup == null) return; Iterable<ApplicationIdProto> iterable = new Iterable<ApplicationIdProto>() { @Override public Iterator<ApplicationIdProto> iterator() { return new Iterator<ApplicationIdProto>() { Iterator<ApplicationId> iter = applicationsToCleanup.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public ApplicationIdProto next() { return convertToProtoFormat(iter.next()); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; builder.addAllApplicationsToCleanup(iterable); } @Override public Map<ApplicationId, ByteBuffer> getSystemCredentialsForApps() { if (this.systemCredentials != null) { return this.systemCredentials; } initSystemCredentials(); return systemCredentials; } private void initSystemCredentials() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; List<SystemCredentialsForAppsProto> list = p.getSystemCredentialsForAppsList(); this.systemCredentials = new HashMap<ApplicationId, ByteBuffer> (); for (SystemCredentialsForAppsProto c : list) { ApplicationId appId = convertFromProtoFormat(c.getAppId()); ByteBuffer byteBuffer = ProtoUtils.convertFromProtoFormat(c.getCredentialsForApp()); this.systemCredentials.put(appId, byteBuffer); } } @Override public void setSystemCredentialsForApps( Map<ApplicationId, ByteBuffer> systemCredentials) { if (systemCredentials == null || systemCredentials.isEmpty()) { return; } maybeInitBuilder(); this.systemCredentials = new HashMap<ApplicationId, ByteBuffer>(); this.systemCredentials.putAll(systemCredentials); } @Override public long getNextHeartBeatInterval() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; return (p.getNextHeartBeatInterval()); } @Override public void setNextHeartBeatInterval(long nextHeartBeatInterval) { maybeInitBuilder(); builder.setNextHeartBeatInterval(nextHeartBeatInterval); } private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { return new ContainerIdPBImpl(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { return ((ContainerIdPBImpl) t).getProto(); } private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { return new ApplicationIdPBImpl(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { return ((ApplicationIdPBImpl) t).getProto(); } private NodeAction convertFromProtoFormat(NodeActionProto p) { return NodeAction.valueOf(p.name()); } private NodeActionProto convertToProtoFormat(NodeAction t) { return NodeActionProto.valueOf(t.name()); } private MasterKeyPBImpl convertFromProtoFormat(MasterKeyProto p) { return new MasterKeyPBImpl(p); } private MasterKeyProto convertToProtoFormat(MasterKey t) { return ((MasterKeyPBImpl) t).getProto(); } @Override public synchronized boolean getNextheartbeat() { NodeHeartbeatResponseProtoOrBuilder p = viaProto ? proto : builder; return (p.getNextheartbeat()); } @Override public void setNextheartbeat(boolean nextHeartbeat) { maybeInitBuilder(); builder.setNextheartbeat(nextHeartbeat); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; /** * <p> * Describes a Spot fleet request. * </p> */ public class SpotFleetRequestConfig implements Serializable, Cloneable { /** * The ID of the Spot fleet request. */ private String spotFleetRequestId; /** * The state of the Spot fleet request. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying */ private String spotFleetRequestState; /** * Information about the configuration of the Spot fleet request. */ private SpotFleetRequestConfigData spotFleetRequestConfig; /** * The creation date and time of the request. */ private java.util.Date createTime; /** * The ID of the Spot fleet request. * * @return The ID of the Spot fleet request. */ public String getSpotFleetRequestId() { return spotFleetRequestId; } /** * The ID of the Spot fleet request. * * @param spotFleetRequestId The ID of the Spot fleet request. */ public void setSpotFleetRequestId(String spotFleetRequestId) { this.spotFleetRequestId = spotFleetRequestId; } /** * The ID of the Spot fleet request. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotFleetRequestId The ID of the Spot fleet request. * * @return A reference to this updated object so that method calls can be chained * together. */ public SpotFleetRequestConfig withSpotFleetRequestId(String spotFleetRequestId) { this.spotFleetRequestId = spotFleetRequestId; return this; } /** * The state of the Spot fleet request. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying * * @return The state of the Spot fleet request. * * @see BatchState */ public String getSpotFleetRequestState() { return spotFleetRequestState; } /** * The state of the Spot fleet request. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying * * @param spotFleetRequestState The state of the Spot fleet request. * * @see BatchState */ public void setSpotFleetRequestState(String spotFleetRequestState) { this.spotFleetRequestState = spotFleetRequestState; } /** * The state of the Spot fleet request. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying * * @param spotFleetRequestState The state of the Spot fleet request. * * @return A reference to this updated object so that method calls can be chained * together. * * @see BatchState */ public SpotFleetRequestConfig withSpotFleetRequestState(String spotFleetRequestState) { this.spotFleetRequestState = spotFleetRequestState; return this; } /** * The state of the Spot fleet request. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying * * @param spotFleetRequestState The state of the Spot fleet request. * * @see BatchState */ public void setSpotFleetRequestState(BatchState spotFleetRequestState) { this.spotFleetRequestState = spotFleetRequestState.toString(); } /** * The state of the Spot fleet request. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>submitted, active, cancelled, failed, cancelled_running, cancelled_terminating, modifying * * @param spotFleetRequestState The state of the Spot fleet request. * * @return A reference to this updated object so that method calls can be chained * together. * * @see BatchState */ public SpotFleetRequestConfig withSpotFleetRequestState(BatchState spotFleetRequestState) { this.spotFleetRequestState = spotFleetRequestState.toString(); return this; } /** * Information about the configuration of the Spot fleet request. * * @return Information about the configuration of the Spot fleet request. */ public SpotFleetRequestConfigData getSpotFleetRequestConfig() { return spotFleetRequestConfig; } /** * Information about the configuration of the Spot fleet request. * * @param spotFleetRequestConfig Information about the configuration of the Spot fleet request. */ public void setSpotFleetRequestConfig(SpotFleetRequestConfigData spotFleetRequestConfig) { this.spotFleetRequestConfig = spotFleetRequestConfig; } /** * Information about the configuration of the Spot fleet request. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param spotFleetRequestConfig Information about the configuration of the Spot fleet request. * * @return A reference to this updated object so that method calls can be chained * together. */ public SpotFleetRequestConfig withSpotFleetRequestConfig(SpotFleetRequestConfigData spotFleetRequestConfig) { this.spotFleetRequestConfig = spotFleetRequestConfig; return this; } /** * The creation date and time of the request. * * @return The creation date and time of the request. */ public java.util.Date getCreateTime() { return createTime; } /** * The creation date and time of the request. * * @param createTime The creation date and time of the request. */ public void setCreateTime(java.util.Date createTime) { this.createTime = createTime; } /** * The creation date and time of the request. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param createTime The creation date and time of the request. * * @return A reference to this updated object so that method calls can be chained * together. */ public SpotFleetRequestConfig withCreateTime(java.util.Date createTime) { this.createTime = createTime; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSpotFleetRequestId() != null) sb.append("SpotFleetRequestId: " + getSpotFleetRequestId() + ","); if (getSpotFleetRequestState() != null) sb.append("SpotFleetRequestState: " + getSpotFleetRequestState() + ","); if (getSpotFleetRequestConfig() != null) sb.append("SpotFleetRequestConfig: " + getSpotFleetRequestConfig() + ","); if (getCreateTime() != null) sb.append("CreateTime: " + getCreateTime() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSpotFleetRequestId() == null) ? 0 : getSpotFleetRequestId().hashCode()); hashCode = prime * hashCode + ((getSpotFleetRequestState() == null) ? 0 : getSpotFleetRequestState().hashCode()); hashCode = prime * hashCode + ((getSpotFleetRequestConfig() == null) ? 0 : getSpotFleetRequestConfig().hashCode()); hashCode = prime * hashCode + ((getCreateTime() == null) ? 0 : getCreateTime().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SpotFleetRequestConfig == false) return false; SpotFleetRequestConfig other = (SpotFleetRequestConfig)obj; if (other.getSpotFleetRequestId() == null ^ this.getSpotFleetRequestId() == null) return false; if (other.getSpotFleetRequestId() != null && other.getSpotFleetRequestId().equals(this.getSpotFleetRequestId()) == false) return false; if (other.getSpotFleetRequestState() == null ^ this.getSpotFleetRequestState() == null) return false; if (other.getSpotFleetRequestState() != null && other.getSpotFleetRequestState().equals(this.getSpotFleetRequestState()) == false) return false; if (other.getSpotFleetRequestConfig() == null ^ this.getSpotFleetRequestConfig() == null) return false; if (other.getSpotFleetRequestConfig() != null && other.getSpotFleetRequestConfig().equals(this.getSpotFleetRequestConfig()) == false) return false; if (other.getCreateTime() == null ^ this.getCreateTime() == null) return false; if (other.getCreateTime() != null && other.getCreateTime().equals(this.getCreateTime()) == false) return false; return true; } @Override public SpotFleetRequestConfig clone() { try { return (SpotFleetRequestConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.funkyandroid.droidcon.uk.iosched.ui; import com.funkyandroid.droidcon.uk.iosched.R; import com.funkyandroid.droidcon.uk.iosched.provider.ScheduleContract.Announcements; import com.funkyandroid.droidcon.uk.iosched.util.TimeUtils; import com.funkyandroid.droidcon.uk.iosched.util.UIUtils; import android.content.ActivityNotFoundException; import android.content.Intent; import android.database.ContentObserver; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager.LoaderCallbacks; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.text.format.DateUtils; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.TranslateAnimation; import android.widget.TextView; /** * A fragment used in {@link HomeActivity} that shows either a countdown, * Announcements, or 'thank you' text, at different times (before/during/after * the conference). */ public class WhatsOnFragment extends Fragment implements LoaderCallbacks<Cursor> { private static final int ANNOUNCEMENTS_LOADER_ID = 0; private static final int ANNOUNCEMENTS_CYCLE_INTERVAL_MILLIS = 6000; private Handler mHandler = new Handler(); private TextView mCountdownTextView; private ViewGroup mRootView; private View mAnnouncementView; private Cursor mAnnouncementsCursor; private String mLatestAnnouncementId; private LayoutInflater mInflater; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mInflater = inflater; mRootView = (ViewGroup) inflater.inflate(R.layout.fragment_whats_on, container); refresh(); return mRootView; } @Override public void onDetach() { super.onDetach(); mHandler.removeCallbacksAndMessages(null); getActivity().getContentResolver().unregisterContentObserver(mObserver); } private void refresh() { mHandler.removeCallbacksAndMessages(null); mRootView.removeAllViews(); final long currentTimeMillis = UIUtils.getCurrentTime(getActivity()); // Show Loading... and load the view corresponding to the current state if (currentTimeMillis < UIUtils.CONFERENCE_START_MILLIS) { setupBefore(); } else if (currentTimeMillis > UIUtils.CONFERENCE_END_MILLIS) { setupAfter(); } else { setupDuring(); } } private void setupBefore() { // Before conference, show countdown. mCountdownTextView = (TextView) mInflater .inflate(R.layout.whats_on_countdown, mRootView, false); mRootView.addView(mCountdownTextView); mHandler.post(mCountdownRunnable); } private void setupAfter() { // After conference, show canned text. mInflater.inflate(R.layout.whats_on_thank_you, mRootView, true); } private void setupDuring() { // Start background query to load announcements getLoaderManager().initLoader(ANNOUNCEMENTS_LOADER_ID, null, this); getActivity().getContentResolver().registerContentObserver( Announcements.CONTENT_URI, true, mObserver); } /** * Event that updates countdown timer. Posts itself again to * {@link #mHandler} to continue updating time. */ private Runnable mCountdownRunnable = new Runnable() { public void run() { int remainingSec = (int) Math.max(0, (UIUtils.CONFERENCE_START_MILLIS - UIUtils .getCurrentTime(getActivity())) / 1000); final boolean conferenceStarted = remainingSec == 0; if (conferenceStarted) { // Conference started while in countdown mode, switch modes and // bail on future countdown updates. mHandler.postDelayed(new Runnable() { public void run() { refresh(); } }, 100); return; } final int secs = remainingSec % 86400; final int days = remainingSec / 86400; final String str; if (days == 0) { str = getResources().getString( R.string.whats_on_countdown_title_0, DateUtils.formatElapsedTime(secs)); } else { str = getResources().getQuantityString( R.plurals.whats_on_countdown_title, days, days, DateUtils.formatElapsedTime(secs)); } mCountdownTextView.setText(str); // Repost ourselves to keep updating countdown mHandler.postDelayed(mCountdownRunnable, 1000); } }; @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return new CursorLoader(getActivity(), Announcements.CONTENT_URI, AnnouncementsQuery.PROJECTION, null, null, Announcements.DEFAULT_SORT); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { if (getActivity() == null) { return; } if (cursor != null && cursor.getCount() > 0) { // Need to always set this because original gets unset in onLoaderReset mAnnouncementsCursor = cursor; cursor.moveToFirst(); // Only update announcements if there's a new one String latestAnnouncementId = cursor.getString(AnnouncementsQuery.ANNOUNCEMENT_ID); if (!latestAnnouncementId.equals(mLatestAnnouncementId)) { mHandler.removeCallbacks(mCycleAnnouncementsRunnable); mLatestAnnouncementId = latestAnnouncementId; showAnnouncements(); } } else { mHandler.removeCallbacks(mCycleAnnouncementsRunnable); showNoAnnouncements(); } } @Override public void onLoaderReset(Loader<Cursor> loader) { mAnnouncementsCursor = null; } /** * Show the the announcements */ private void showAnnouncements() { mAnnouncementsCursor.moveToFirst(); ViewGroup announcementsRootView = (ViewGroup) mInflater.inflate( R.layout.whats_on_announcements, mRootView, false); mAnnouncementView = announcementsRootView.findViewById(R.id.announcement_container); // Begin cycling in announcements mHandler.post(mCycleAnnouncementsRunnable); final View moreButton = announcementsRootView.findViewById(R.id.extra_button); moreButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { startActivity(new Intent(getActivity(), AnnouncementsActivity.class)); } }); mRootView.removeAllViews(); mRootView.addView(announcementsRootView); } private Runnable mCycleAnnouncementsRunnable = new Runnable() { @Override public void run() { // First animate the current announcement out final int animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime); final int height = mAnnouncementView.getHeight(); TranslateAnimation anim = new TranslateAnimation(0, 0, 0, height); anim.setDuration(animationDuration); anim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationEnd(Animation animation) { // Set the announcement data TextView titleView = (TextView) mAnnouncementView.findViewById( R.id.announcement_title); TextView agoView = (TextView) mAnnouncementView.findViewById( R.id.announcement_ago); titleView.setText(mAnnouncementsCursor.getString( AnnouncementsQuery.ANNOUNCEMENT_TITLE)); long date = mAnnouncementsCursor.getLong( AnnouncementsQuery.ANNOUNCEMENT_DATE); String when = TimeUtils.getTimeAgo(date, getActivity()); agoView.setText(when); final String url = mAnnouncementsCursor.getString( AnnouncementsQuery.ANNOUNCEMENT_URL); mAnnouncementView.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { Intent announcementIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); announcementIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); try { startActivity(announcementIntent); } catch (ActivityNotFoundException ignored) { } } }); int nextPosition = (mAnnouncementsCursor.getPosition() + 1) % mAnnouncementsCursor.getCount(); mAnnouncementsCursor.moveToPosition(nextPosition); // Animate the announcement in TranslateAnimation anim = new TranslateAnimation(0, 0, height, 0); anim.setDuration(animationDuration); mAnnouncementView.startAnimation(anim); mHandler.postDelayed(mCycleAnnouncementsRunnable, ANNOUNCEMENTS_CYCLE_INTERVAL_MILLIS + animationDuration); } @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationRepeat(Animation animation) { } }); mAnnouncementView.startAnimation(anim); } }; /** * Show a placeholder message */ private void showNoAnnouncements() { mRootView.removeAllViews(); mInflater.inflate(R.layout.empty_announcements, mRootView, true); } private ContentObserver mObserver = new ContentObserver(new Handler()) { @Override public void onChange(boolean selfChange) { if (getActivity() == null) { return; } getLoaderManager().restartLoader(ANNOUNCEMENTS_LOADER_ID, null, WhatsOnFragment.this); } }; private interface AnnouncementsQuery { String[] PROJECTION = { Announcements.ANNOUNCEMENT_ID, Announcements.ANNOUNCEMENT_TITLE, Announcements.ANNOUNCEMENT_DATE, Announcements.ANNOUNCEMENT_URL, }; int ANNOUNCEMENT_ID = 0; int ANNOUNCEMENT_TITLE = 1; int ANNOUNCEMENT_DATE = 2; int ANNOUNCEMENT_URL = 3; } }
package mekanism.common.content.transporter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import mekanism.api.Coord4D; import mekanism.api.EnumColor; import mekanism.api.util.StackUtils; import mekanism.common.base.ISideConfiguration; import mekanism.common.content.transporter.TransporterStack.Path; import mekanism.common.tile.TileEntityBin; import mekanism.common.util.InventoryUtils; import mekanism.common.util.MekanismUtils; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.ISidedInventory; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraftforge.common.util.ForgeDirection; import powercrystals.minefactoryreloaded.api.IDeepStorageUnit; import cpw.mods.fml.common.Loader; public class TransporterManager { public static Set<TransporterStack> flowingStacks = new HashSet<TransporterStack>(); public static void reset() { flowingStacks.clear(); } public static void add(TransporterStack stack) { flowingStacks.add(stack); } public static void remove(TransporterStack stack) { flowingStacks.remove(stack); } public static List<TransporterStack> getStacksToDest(Coord4D dest) { List<TransporterStack> ret = new ArrayList<TransporterStack>(); for(TransporterStack stack : flowingStacks) { if(stack != null && stack.pathType != Path.NONE && stack.hasPath()) { if(stack.getDest().equals(dest)) { ret.add(stack); } } } return ret; } public static InventoryCopy copyInvFromSide(IInventory inv, int side) { inv = InventoryUtils.checkChestInv(inv); ItemStack[] ret = new ItemStack[inv.getSizeInventory()]; if(!(inv instanceof ISidedInventory)) { for(int i = 0; i <= inv.getSizeInventory() - 1; i++) { ret[i] = inv.getStackInSlot(i) != null ? inv.getStackInSlot(i).copy() : null; } } else { ISidedInventory sidedInventory = (ISidedInventory)inv; int[] slots = sidedInventory.getAccessibleSlotsFromSide(ForgeDirection.getOrientation(side).getOpposite().ordinal()); if(slots == null || slots.length == 0) { return null; } for(int get = 0; get <= slots.length - 1; get++) { int slotID = slots[get]; ret[slotID] = sidedInventory.getStackInSlot(slotID) != null ? sidedInventory.getStackInSlot(slotID).copy() : null; } if(inv instanceof TileEntityBin) { return new InventoryCopy(ret, ((TileEntityBin)inv).getItemCount()); } else { return new InventoryCopy(ret); } } return new InventoryCopy(ret); } public static void testInsert(IInventory inv, InventoryCopy copy, int side, TransporterStack stack) { ItemStack toInsert = stack.itemStack.copy(); if(stack.pathType != Path.HOME && inv instanceof ISideConfiguration) { ISideConfiguration config = (ISideConfiguration)inv; int tileSide = config.getOrientation(); EnumColor configColor = config.getEjector().getInputColor(ForgeDirection.getOrientation(MekanismUtils.getBaseOrientation(side, tileSide)).getOpposite()); if(config.getEjector().hasStrictInput() && configColor != null && configColor != stack.color) { return; } } if(Loader.isModLoaded("MinefactoryReloaded") && inv instanceof IDeepStorageUnit && !(inv instanceof TileEntityBin)) { return; } if(!(inv instanceof ISidedInventory)) { for(int i = 0; i <= inv.getSizeInventory() - 1; i++) { if(stack.pathType != Path.HOME) { if(!inv.isItemValidForSlot(i, toInsert)) { continue; } } ItemStack inSlot = copy.inventory[i]; if(inSlot == null) { copy.inventory[i] = toInsert; return; } else if(InventoryUtils.areItemsStackable(toInsert, inSlot) && inSlot.stackSize < inSlot.getMaxStackSize() && inSlot.stackSize < inv.getInventoryStackLimit()) { if(inSlot.stackSize + toInsert.stackSize <= inSlot.getMaxStackSize()) { ItemStack toSet = toInsert.copy(); toSet.stackSize += inSlot.stackSize; copy.inventory[i] = toSet; return; } else { int rejects = (inSlot.stackSize + toInsert.stackSize) - inSlot.getMaxStackSize(); ItemStack toSet = toInsert.copy(); toSet.stackSize = inSlot.getMaxStackSize(); ItemStack remains = toInsert.copy(); remains.stackSize = rejects; copy.inventory[i] = toSet; toInsert = remains; } } } } else { ISidedInventory sidedInventory = (ISidedInventory)inv; int[] slots = sidedInventory.getAccessibleSlotsFromSide(ForgeDirection.getOrientation(side).getOpposite().ordinal()); if(slots != null && slots.length != 0) { if(stack.pathType != Path.HOME && sidedInventory instanceof TileEntityBin && ForgeDirection.getOrientation(side).getOpposite().ordinal() == 0) { slots = sidedInventory.getAccessibleSlotsFromSide(1); } if(inv instanceof TileEntityBin) { int slot = slots[0]; if(!sidedInventory.isItemValidForSlot(slot, toInsert) || !sidedInventory.canInsertItem(slot, toInsert, ForgeDirection.getOrientation(side).getOpposite().ordinal())) { return; } int amountRemaining = ((TileEntityBin)inv).getMaxStoredCount()-copy.binAmount; copy.binAmount += Math.min(amountRemaining, toInsert.stackSize); return; } else { for(int get = 0; get <= slots.length - 1; get++) { int slotID = slots[get]; if(stack.pathType != Path.HOME) { if(!sidedInventory.isItemValidForSlot(slotID, toInsert) || !sidedInventory.canInsertItem(slotID, toInsert, ForgeDirection.getOrientation(side).getOpposite().ordinal())) { continue; } } ItemStack inSlot = copy.inventory[slotID]; if(inSlot == null) { copy.inventory[slotID] = toInsert; return; } else if(InventoryUtils.areItemsStackable(toInsert, inSlot) && inSlot.stackSize < inSlot.getMaxStackSize() && inSlot.stackSize < inv.getInventoryStackLimit()) { if(inSlot.stackSize + toInsert.stackSize <= inSlot.getMaxStackSize()) { ItemStack toSet = toInsert.copy(); toSet.stackSize += inSlot.stackSize; copy.inventory[slotID] = toSet; return; } else { int rejects = (inSlot.stackSize + toInsert.stackSize) - inSlot.getMaxStackSize(); ItemStack toSet = toInsert.copy(); toSet.stackSize = inSlot.getMaxStackSize(); ItemStack remains = toInsert.copy(); remains.stackSize = rejects; copy.inventory[slotID] = toSet; toInsert = remains; } } } } } } } public static boolean didEmit(ItemStack stack, ItemStack returned) { return returned == null || returned.stackSize < stack.stackSize; } public static ItemStack getToUse(ItemStack stack, ItemStack returned) { if(returned == null || returned.stackSize == 0) { return stack; } return MekanismUtils.size(stack, stack.stackSize-returned.stackSize); } /** * @return rejects */ public static ItemStack getPredictedInsert(TileEntity tileEntity, EnumColor color, ItemStack itemStack, int side) { if(!(tileEntity instanceof IInventory)) { return itemStack; } if(tileEntity instanceof ISideConfiguration) { ISideConfiguration config = (ISideConfiguration)tileEntity; int tileSide = config.getOrientation(); EnumColor configColor = config.getEjector().getInputColor(ForgeDirection.getOrientation(MekanismUtils.getBaseOrientation(side, tileSide)).getOpposite()); if(config.getEjector().hasStrictInput() && configColor != null && configColor != color) { return itemStack; } } IInventory inventory = (IInventory)tileEntity; InventoryCopy copy = copyInvFromSide(inventory, side); if(copy == null) { return itemStack; } List<TransporterStack> insertQueue = getStacksToDest(Coord4D.get(tileEntity)); for(TransporterStack tStack : insertQueue) { testInsert(inventory, copy, side, tStack); } ItemStack toInsert = itemStack.copy(); if(!(inventory instanceof ISidedInventory)) { inventory = InventoryUtils.checkChestInv(inventory); for(int i = 0; i <= inventory.getSizeInventory() - 1; i++) { if(!inventory.isItemValidForSlot(i, toInsert)) { continue; } ItemStack inSlot = copy.inventory[i]; if(inSlot == null || toInsert == null) { return null; } else if(InventoryUtils.areItemsStackable(toInsert, inSlot) && inSlot.stackSize < inSlot.getMaxStackSize() && inSlot.stackSize < inventory.getInventoryStackLimit()) { if(inSlot.stackSize + toInsert.stackSize <= inSlot.getMaxStackSize()) { return null; } else { int rejects = (inSlot.stackSize + toInsert.stackSize) - inSlot.getMaxStackSize(); if(rejects < toInsert.stackSize) { toInsert = StackUtils.size(toInsert, rejects); } } } } } else { ISidedInventory sidedInventory = (ISidedInventory)inventory; int[] slots = sidedInventory.getAccessibleSlotsFromSide(ForgeDirection.getOrientation(side).getOpposite().ordinal()); if(slots != null && slots.length != 0) { if(inventory instanceof TileEntityBin) { int slot = slots[0]; if(!sidedInventory.isItemValidForSlot(slot, toInsert) || !sidedInventory.canInsertItem(slot, toInsert, ForgeDirection.getOrientation(side).getOpposite().ordinal())) { return toInsert; } int amountRemaining = ((TileEntityBin)inventory).getMaxStoredCount()-copy.binAmount; if(toInsert.stackSize <= amountRemaining) { return null; } else { return StackUtils.size(toInsert, toInsert.stackSize-amountRemaining); } } else { for(int get = 0; get <= slots.length - 1; get++) { int slotID = slots[get]; if(!sidedInventory.isItemValidForSlot(slotID, toInsert) || !sidedInventory.canInsertItem(slotID, toInsert, ForgeDirection.getOrientation(side).getOpposite().ordinal())) { continue; } ItemStack inSlot = copy.inventory[slotID]; if(inSlot == null) { return null; } else if(InventoryUtils.areItemsStackable(toInsert, inSlot) && inSlot.stackSize < inSlot.getMaxStackSize() && inSlot.stackSize < inventory.getInventoryStackLimit()) { if(inSlot.stackSize + toInsert.stackSize <= inSlot.getMaxStackSize()) { return null; } else { int rejects = (inSlot.stackSize + toInsert.stackSize) - inSlot.getMaxStackSize(); if(rejects < toInsert.stackSize) { toInsert = MekanismUtils.size(toInsert, rejects); } } } } } } } return toInsert; } public static class InventoryCopy { public ItemStack[] inventory; public int binAmount; public InventoryCopy(ItemStack[] inv) { inventory = inv; } public InventoryCopy(ItemStack[] inv, int amount) { this(inv); binAmount = amount; } } }
/******************************************************************************* * * Copyright 2012 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.kundera.persistence; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.persistence.FlushModeType; import javax.persistence.Query; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.impetus.kundera.KunderaException; import com.impetus.kundera.PersistenceProperties; import com.impetus.kundera.client.Client; import com.impetus.kundera.client.ClientPropertiesSetter; import com.impetus.kundera.client.ClientResolverException; import com.impetus.kundera.graph.GraphGenerator; import com.impetus.kundera.graph.Node; import com.impetus.kundera.graph.ObjectGraph; import com.impetus.kundera.graph.ObjectGraphUtils; import com.impetus.kundera.lifecycle.states.ManagedState; import com.impetus.kundera.lifecycle.states.RemovedState; import com.impetus.kundera.metadata.KunderaMetadataManager; import com.impetus.kundera.metadata.MetadataUtils; import com.impetus.kundera.metadata.model.EntityMetadata; import com.impetus.kundera.metadata.model.PersistenceUnitMetadata; import com.impetus.kundera.metadata.model.attributes.AbstractAttribute; import com.impetus.kundera.persistence.EntityManagerFactoryImpl.KunderaMetadata; import com.impetus.kundera.persistence.api.Batcher; import com.impetus.kundera.persistence.context.EventLog.EventType; import com.impetus.kundera.persistence.context.FlushManager; import com.impetus.kundera.persistence.context.MainCache; import com.impetus.kundera.persistence.context.PersistenceCache; import com.impetus.kundera.persistence.context.jointable.JoinTableData; import com.impetus.kundera.persistence.context.jointable.JoinTableData.OPERATION; import com.impetus.kundera.persistence.event.EntityEventDispatcher; import com.impetus.kundera.proxy.LazyInitializerFactory; import com.impetus.kundera.query.QueryResolver; import com.impetus.kundera.utils.ObjectUtils; /** * The Class PersistenceDelegator. */ public final class PersistenceDelegator { /** The Constant log. */ private static final Logger log = LoggerFactory.getLogger(PersistenceDelegator.class); /** The closed. */ private boolean closed; /** The client map. */ private final Map<String, Client> clientMap = new HashMap<String, Client>(); /** The event dispatcher. */ private final EntityEventDispatcher eventDispatcher = new EntityEventDispatcher(); private FlushModeType flushMode = FlushModeType.AUTO; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); // Whether a transaction is in progress private boolean isTransactionInProgress; private final PersistenceCache persistenceCache; private final FlushManager flushManager = new FlushManager(); private boolean enableFlush; private Coordinator coordinator; private final KunderaMetadata kunderaMetadata; /** * Instantiates a new persistence delegator. * * @param session * the session * @param persistenceUnits * the persistence units */ PersistenceDelegator(final KunderaMetadata kunderaMetadata, final PersistenceCache pc) { this.persistenceCache = pc; this.kunderaMetadata = kunderaMetadata; } /***********************************************************************/ /***************** CRUD Methods ****************************************/ /***********************************************************************/ /** * Writes an entity into Persistence cache. (Actual database write is done * while flushing) */ public void persist(Object e) { if (e == null) { throw new IllegalArgumentException( "Entity object is invalid, operation failed. Please check previous log message for details"); } // Create an object graph of the entity object. ObjectGraph graph = new GraphGenerator().generateGraph(e, this); // Call persist on each node in object graph. Node node = graph.getHeadNode(); try { // Get write lock before writing object required for transaction. lock.writeLock().lock(); node.setPersistenceDelegator(this); node.persist(); // build flush stack. flushManager.buildFlushStack(node, com.impetus.kundera.persistence.context.EventLog.EventType.INSERT); // Flushing data. flush(); // Add node to persistence context after successful flush. getPersistenceCache().getMainCache().addHeadNode(node); } finally { lock.writeLock().unlock(); } // Unlocking object. graph.clear(); graph = null; if (log.isDebugEnabled()) { log.debug("Data persisted successfully for entity {}.", e.getClass()); } } /** * Find object based on primary key either form persistence cache or from * database * * @param entityClass * @param primaryKey * @return */ public <E> E findById(final Class<E> entityClass, final Object primaryKey) { E e = find(entityClass, primaryKey); if (e == null) { return null; } // Return a copy of this entity return (E) (e); } /** * Finds an entity from persistence cache, if not there, fetches from * database. Nodes are added into persistence cache (if not already there) * as and when they are found from DB. While adding nodes to persistence * cache, a deep copy is added, so that found object doesn't refer to * managed entity in persistence cache. * * @param entityClass * Entity Class * @param primaryKey * Primary Key * @return Entity Object for the given primary key * */ <E> E find(final Class<E> entityClass, final Object primaryKey) { if (primaryKey == null) { throw new IllegalArgumentException("PrimaryKey value must not be null for object you want to find."); } // Locking as it might read from persistence context. EntityMetadata entityMetadata = getMetadata(entityClass); String nodeId = ObjectGraphUtils.getNodeId(primaryKey, entityClass); // TODO all the scrap should go from here. MainCache mainCache = (MainCache) getPersistenceCache().getMainCache(); Node node = mainCache.getNodeFromCache(nodeId, this); // if node is not in persistence cache or is dirty, fetch from database if (node == null || node.isDirty()) { node = new Node(nodeId, entityClass, new ManagedState(), getPersistenceCache(), primaryKey, this); node.setClient(getClient(entityMetadata)); // TODO ManagedState.java require serious attention. node.setPersistenceDelegator(this); try { lock.readLock().lock(); node.find(); } finally { lock.readLock().unlock(); } } else { node.setPersistenceDelegator(this); } Object nodeData = node.getData(); if (nodeData == null) { return null; } else { E e = (E) ObjectUtils.deepCopy(nodeData, getKunderaMetadata()); onSetProxyOwners(entityMetadata, e); return e; } } /** * Retrieves a {@link List} of Entities for given Primary Keys * * @param entityClass * Entity Class * @param primaryKeys * Array of Primary Keys * @see {@link PersistenceDelegator#find(Class, Object)} * @return List of found entities */ // TODO Is it possible to pass all primary keys directly to database client. public <E> List<E> find(Class<E> entityClass, Object... primaryKeys) { List<E> entities = new ArrayList<E>(); if (primaryKeys == null) { return entities; } Set<Object> pKeys = new HashSet<Object>(Arrays.asList(primaryKeys)); for (Object primaryKey : pKeys) { E e = find(entityClass, primaryKey); if (e != null) { entities.add(e); } } return entities; } /** * Retrieves {@link List} of entities for a given {@link Map} of embedded * column values. Purpose of this method is to provide functionality of * search based on columns inside embedded objects. * * @param entityClass * Entity Class * @param embeddedColumnMap * Embedded column map values * @return List of found entities. */ public <E> List<E> find(Class<E> entityClass, Map<String, String> embeddedColumnMap) { EntityMetadata entityMetadata = getMetadata(entityClass); // TODO Why returning entities are not added into cache we should not // iterate here but client should i think. List<E> entities = new ArrayList<E>(); entities = getClient(entityMetadata).find(entityClass, embeddedColumnMap); return entities; } /** * Removes an entity object from persistence cache. * */ public void remove(Object e) { // Invoke Pre Remove Events // TODO Check for validity also as per JPA if (e == null) { throw new IllegalArgumentException("Entity to be removed must not be null."); } EntityMetadata metadata = getMetadata(e.getClass()); // Create an object graph of the entity object ObjectGraph graph = new GraphGenerator().generateGraph(e, this, new ManagedState()); Node node = graph.getHeadNode(); try { lock.writeLock().lock(); // TODO : push into action queue, get original end-point from // persistenceContext first! // Action/ExecutionQueue/ActivityQueue :-> id, name, EndPoint, // changed // state // Change state of node, after successful flush processing. node.setPersistenceDelegator(this); node.remove(); // build flush stack. flushManager.buildFlushStack(node, EventType.DELETE); // Flush node. flush(); } finally { lock.writeLock().unlock(); } // clear out graph graph.clear(); graph = null; if (log.isDebugEnabled()) { log.debug("Data removed successfully for entity : " + e.getClass()); } } /** * Flushes Dirty objects in {@link PersistenceCache} to databases. * */ private void flush() { // Get flush stack from Flush Manager Deque<Node> fs = flushManager.getFlushStack(); // Flush each node in flush stack from top to bottom unit it's empty if (log.isDebugEnabled()) { log.debug("Flushing following flush stack to database(s) (showing stack objects from top to bottom):\n" + fs); } if (fs != null) { boolean isBatch = false; while (!fs.isEmpty()) { Node node = fs.pop(); // Only nodes in Managed and Removed state are flushed, rest // are ignored if (node.isInState(ManagedState.class) || node.isInState(RemovedState.class)) { EntityMetadata metadata = getMetadata(node.getDataClass()); node.setClient(getClient(metadata)); // if batch size is defined. if ((node.getClient() instanceof Batcher) && ((Batcher) (node.getClient())).getBatchSize() > 0) { isBatch = true; ((Batcher) (node.getClient())).addBatch(node); } else if (isTransactionInProgress && MetadataUtils .defaultTransactionSupported(metadata.getPersistenceUnit(), kunderaMetadata)) { onSynchronization(node, metadata); } else { node.flush(); } } } if (!isBatch) { // TODO : This needs to be look for different // permutation/combination // Flush Join Table data into database flushJoinTableData(); // performed, } } } public <E> E merge(E e) { if (log.isDebugEnabled()) log.debug("Merging Entity : " + e); if (e == null) { throw new IllegalArgumentException("Entity to be merged must not be null."); } EntityMetadata m = getMetadata(e.getClass()); // Create an object graph of the entity object to be merged ObjectGraph graph = new GraphGenerator().generateGraph(e, this); // Call merge on each node in object graph Node node = graph.getHeadNode(); try { lock.writeLock().lock(); // Change node's state after successful flush. node.setPersistenceDelegator(this); node.merge(); // build flush stack. flushManager.buildFlushStack(node, EventType.UPDATE); flush(); } finally { lock.writeLock().unlock(); } graph.clear(); graph = null; return (E) node.getData(); } /** * Remove the given entity from the persistence context, causing a managed * entity to become detached. */ public void detach(Object entity) { Node node = getPersistenceCache().getMainCache().getNodeFromCache(entity, getMetadata(entity.getClass()), this); if (node != null) { node.detach(); } } /** * Gets the client. * * @param m * the m * @return the client */ public Client getClient(EntityMetadata m) { // // Persistence Unit used to retrieve client if (m == null) { throw new KunderaException("Entitymatadata should not be null"); } String persistenceUnit = m.getPersistenceUnit(); return getClient(persistenceUnit); } public Client getClient(final String persistenceUnit) { Client client = clientMap.get(persistenceUnit); if (client == null) { throw new ClientResolverException("No client configured for persistenceUnit " + persistenceUnit); } return client; } /** * Gets the event dispatcher. * * @return the event dispatcher */ public EntityEventDispatcher getEventDispatcher() { return eventDispatcher; } /** * Creates the query. * * @param jpaQuery * the jpa query * @return the query */ Query createQuery(String jpaQuery) { return getQueryInstance(jpaQuery, false, null); } /** * Creates the query. * * @param jpaQuery * the jpa query * @return the query */ Query createQuery(String jpaQuery, final String persistenceUnit) { Client client = getClient(persistenceUnit); EntityMetadata metadata = null; try { metadata = KunderaMetadataManager.getMetamodel(kunderaMetadata, client.getPersistenceUnit()) .getEntityMetadataMap().values().iterator().next(); } catch (Exception e) { log.info("Entity metadata is null. Proceeding as Scalar Query."); } Query query = new QueryResolver().getQueryImplementation(jpaQuery, getClient(persistenceUnit) .getQueryImplementor(), this, metadata, persistenceUnit); return query; } /* * */ Query createNativeQuery(String jpaQuery, Class resultClass) { return getQueryInstance(jpaQuery, true, resultClass); } private Query getQueryInstance(String jpaQuery, boolean isNative, Class mappedClass) { Query query = new QueryResolver() .getQueryImplementation(jpaQuery, this, mappedClass, isNative, kunderaMetadata); return query; } /** * Checks if is open. * * @return true, if is open */ public boolean isOpen() { return !closed; } /** * Close. */ void close() { doFlush(); // Close all clients created in this session if (!clientMap.isEmpty()) { for (Client client : clientMap.values()) { client.close(); } clientMap.clear(); } onClearProxy(); // TODO: Move all nodes tied to this EM into detached state, need to // discuss with Amresh. closed = true; } private void onClearProxy() { if (kunderaMetadata.getCoreMetadata() != null) { LazyInitializerFactory lazyInitializerrFactory = kunderaMetadata.getCoreMetadata() .getLazyInitializerFactory(); if (lazyInitializerrFactory != null) { lazyInitializerrFactory.clearProxies(); } } } private void onSetProxyOwners(final EntityMetadata m, Object e) { if (kunderaMetadata.getCoreMetadata() != null) { LazyInitializerFactory lazyInitializerrFactory = kunderaMetadata.getCoreMetadata() .getLazyInitializerFactory(); if (lazyInitializerrFactory != null) { lazyInitializerrFactory.setProxyOwners(m, e); } } } void clear() { // Move all nodes tied to this EM into detached state flushManager.clearFlushStack(); getPersistenceCache().clean(); onClearProxy(); } /** * Check if the instance is a managed entity instance belonging to the * current persistence context. */ boolean contains(Object entity) { Node node = getPersistenceCache().getMainCache().getNodeFromCache(entity, getMetadata(entity.getClass()), this); return node != null && node.isInState(ManagedState.class); } /** * Refresh the state of the instance from the database, overwriting changes * made to the entity, if any. */ public void refresh(Object entity) { if (contains(entity)) { MainCache mainCache = (MainCache) getPersistenceCache().getMainCache(); Node node = mainCache.getNodeFromCache(entity, getMetadata(entity.getClass()), this); // Locking as it might read from persistence context. try { lock.readLock().lock(); node.setPersistenceDelegator(this); node.refresh(); } finally { lock.readLock().unlock(); } } else { throw new IllegalArgumentException("This is not a valid or managed entity, can't be refreshed"); } } /** * Gets the metadata. * * @param clazz * the clazz * @return the metadata */ private EntityMetadata getMetadata(Class<?> clazz) { EntityMetadata metadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, clazz); if (metadata == null) { log.error("Entity metadata not found for {}, possible reasons may be: " + "1) not annotated with @Entity. 2) is annotated with @MappedSuperclass." + "3) does not properly with mapped persistence unit for persistence unit and keyspace. Please verify with @Table annotation or persistence.xml " + clazz); throw new KunderaException("Entity metadata not found for " + clazz.getName()); } return metadata; } /** * @param flushMode * the flushMode to set */ void setFlushMode(FlushModeType flushMode) { // TODO keeping it open for future releases current not using any where. this.flushMode = flushMode; } /** * @return the isTransactionInProgress */ public boolean isTransactionInProgress() { return isTransactionInProgress; } /** * @return the persistenceCache */ public PersistenceCache getPersistenceCache() { return persistenceCache; } /******************************* Transaction related methods ***********************************************/ void begin() { isTransactionInProgress = true; } void commit() { enableFlush = true; execute(); flushManager.commit(); flushManager.clearFlushStack(); isTransactionInProgress = false; enableFlush = false; } /** * On explicit call from em.flush(). */ void doFlush() { enableFlush = true; flush(); execute(); enableFlush = false; flushManager.commit(); flushManager.clearFlushStack(); } void rollback() { flushManager.rollback(this); flushManager.clearFlushStack(); getPersistenceCache().clean(); isTransactionInProgress = false; } /** * Populates client specific properties. * * @param properties * map of properties. */ void populateClientProperties(Map properties) { if (properties != null && !properties.isEmpty()) { Map<String, Client> clientMap = getDelegate(); if (!clientMap.isEmpty()) { // TODO If we have two pu for same client then? Need to discuss // with Amresh. for (Client client : clientMap.values()) { if (client instanceof ClientPropertiesSetter) { ClientPropertiesSetter cps = (ClientPropertiesSetter) client; cps.populateClientProperties(client, properties); } } } } else { if (log.isDebugEnabled()) { log.debug("Can't set Client properties as None/ Null was supplied"); } } } /** * Pre load client specific to persistence unit. * * @param persistenceUnit * persistence unit. * @param client */ void loadClient(String persistenceUnit, Client client) { if (!clientMap.containsKey(persistenceUnit) && client != null) { clientMap.put(persistenceUnit, client); } } /** * Returns map of client as delegate to entity manager. * * @return clientMap client map */ Map<String, Client> getDelegate() { return clientMap; } /** * Executes batch. */ private void execute() { for (Client client : clientMap.values()) { if (client != null && client instanceof Batcher) { // if no batch operation performed{may be running in // transaction?} if (((Batcher) client).getBatchSize() == 0 || ((Batcher) client).executeBatch() > 0) { flushJoinTableData(); } } } } /** * On flushing join table data */ private void flushJoinTableData() { if (applyFlush()) { for (JoinTableData jtData : flushManager.getJoinTableData()) { if (!jtData.isProcessed()) { EntityMetadata m = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, jtData.getEntityClass()); Client client = getClient(m); if (OPERATION.INSERT.equals(jtData.getOperation())) { client.persistJoinTable(jtData); jtData.setProcessed(true); } else if (OPERATION.DELETE.equals(jtData.getOperation())) { for (Object pk : jtData.getJoinTableRecords().keySet()) { client.deleteByColumn(m.getSchema(), jtData.getJoinTableName(), jtData.getJoinColumnName(), pk); } jtData.setProcessed(true); } } } } } /** * Returns true, if flush mode is AUTO and not running within transaction || * running within transaction and commit is invoked. * * @return boolean value. */ private boolean applyFlush() { return (!isTransactionInProgress && flushMode.equals(FlushModeType.AUTO)) || enableFlush; } /** * Returns transaction coordinator. * * @return */ Coordinator getCoordinator() { coordinator = new Coordinator(); try { for (String pu : clientMap.keySet()) { PersistenceUnitMetadata puMetadata = KunderaMetadataManager.getPersistenceUnitMetadata(kunderaMetadata, pu); String txResource = puMetadata.getProperty(PersistenceProperties.KUNDERA_TRANSACTION_RESOURCE); if (txResource != null) { TransactionResource resource = (TransactionResource) Class.forName(txResource).newInstance(); coordinator.addResource(resource, pu); Client client = clientMap.get(pu); if (!(client instanceof TransactionBinder)) { throw new KunderaTransactionException( "Client : " + client.getClass() + " must implement TransactionBinder interface, if {kundera.transaction.resource.class} property provided!"); } else { ((TransactionBinder) client).bind(resource); } } else { coordinator.addResource(new DefaultTransactionResource(clientMap.get(pu)), pu); } } } catch (Exception e) { log.error("Error while initializing Transaction Resource:", e); throw new KunderaTransactionException(e); } return coordinator; } /** * If transaction is in progress and user explicitly invokes em.flush()! * * @param node * data node * @param metadata * entity metadata. */ private void onSynchronization(Node node, EntityMetadata metadata) { DefaultTransactionResource resource = (DefaultTransactionResource) coordinator.getResource(metadata .getPersistenceUnit()); if (enableFlush) { resource.onFlush(); } else { resource.syncNode(node); } } public KunderaMetadata getKunderaMetadata() { return this.kunderaMetadata; } }
/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* * SerializedClassifier.java * Copyright (C) 2007 University of Waikato, Hamilton, New Zealand */ package weka.classifiers.misc; import weka.classifiers.Classifier; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import weka.core.Option; import weka.core.RevisionUtils; import weka.core.SerializationHelper; import weka.core.Utils; import weka.core.Capabilities.Capability; import java.io.File; import java.util.Enumeration; import java.util.Vector; /** <!-- globalinfo-start --> * A wrapper around a serialized classifier model. This classifier loads a serialized models and uses it to make predictions.<br/> * <br/> * Warning: since the serialized model doesn't get changed, cross-validation cannot bet used with this classifier. * <p/> <!-- globalinfo-end --> * <!-- options-start --> * Valid options are: <p/> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -model &lt;filename&gt; * The file containing the serialized model. * (required)</pre> * <!-- options-end --> * * @author fracpete (fracpete at waikato dot ac dot nz) * @version $Revision: 7560 $ */ public class SerializedClassifier extends Classifier { /** for serialization */ private static final long serialVersionUID = 4599593909947628642L; /** the serialized classifier model used for making predictions */ protected transient Classifier m_Model = null; /** the file where the serialized model is stored */ protected File m_ModelFile = new File(System.getProperty("user.dir")); /** * Returns a string describing classifier * * @return a description suitable for displaying in the * explorer/experimenter gui */ public String globalInfo() { return "A wrapper around a serialized classifier model. This classifier loads " + "a serialized models and uses it to make predictions.\n\n" + "Warning: since the serialized model doesn't get changed, cross-validation " + "cannot bet used with this classifier."; } /** * Gets an enumeration describing the available options. * * @return an enumeration of all the available options. */ public Enumeration listOptions(){ Vector result; Enumeration enm; result = new Vector(); enm = super.listOptions(); while (enm.hasMoreElements()) result.addElement(enm.nextElement()); result.addElement(new Option( "\tThe file containing the serialized model.\n" + "\t(required)", "model", 1, "-model <filename>")); return result.elements(); } /** * returns the options of the current setup * * @return the current options */ public String[] getOptions(){ int i; Vector result; String[] options; result = new Vector(); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); result.add("-model"); result.add("" + getModelFile()); return (String[]) result.toArray(new String[result.size()]); } /** * Parses the options for this object. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -model &lt;filename&gt; * The file containing the serialized model. * (required)</pre> * <!-- options-end --> * * @param options the options to use * @throws Exception if setting of options fails */ public void setOptions(String[] options) throws Exception { String tmpStr; super.setOptions(options); tmpStr = Utils.getOption("model", options); if (tmpStr.length() != 0) setModelFile(new File(tmpStr)); else setModelFile(new File(System.getProperty("user.dir"))); } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String modelFileTipText() { return "The serialized classifier model to use for predictions."; } /** * Gets the file containing the serialized model. * * @return the file. */ public File getModelFile() { return m_ModelFile; } /** * Sets the file containing the serialized model. * * @param value the file. */ public void setModelFile(File value) { m_ModelFile = value; if (value.exists() && value.isFile()) { try { initModel(); } catch (Exception e) { throw new IllegalArgumentException("Cannot load model from file '" + value + "': " + e); } } } /** * Sets the fully built model to use, if one doesn't want to load a model * from a file or already deserialized a model from somewhere else. * * @param value the built model * @see #getCurrentModel() */ public void setModel(Classifier value) { m_Model = value; } /** * Gets the currently loaded model (can be null). Call buildClassifier method * to load model from file. * * @return the current model * @see #setModel(Classifier) */ public Classifier getCurrentModel() { return m_Model; } /** * loads the serialized model if necessary, throws an Exception if the * derserialization fails. Always propagates the current debug flag. * * @throws Exception if deserialization fails */ protected void initModel() throws Exception { if (m_Model == null) m_Model = (Classifier) SerializationHelper.read(m_ModelFile.getAbsolutePath()); m_Model.setDebug(getDebug()); } /** * Returns default capabilities of the base classifier. * * @return the capabilities of the base classifier */ public Capabilities getCapabilities() { Capabilities result; // init model if necessary if (m_ModelFile != null && m_ModelFile.exists() && m_ModelFile.isFile()) { try { initModel(); } catch (Exception e) { System.err.println(e); } } if (m_Model != null) { result = m_Model.getCapabilities(); } else { result = new Capabilities(this); result.disableAll(); } // set dependencies for (Capability cap: Capability.values()) result.enableDependency(cap); result.setOwner(this); return result; } /** * Calculates the class membership probabilities for the given test * instance. * * @param instance the instance to be classified * @return preedicted class probability distribution * @throws Exception if distribution can't be computed successfully */ public double[] distributionForInstance(Instance instance) throws Exception { double[] result; // init model if necessary initModel(); result = m_Model.distributionForInstance(instance); return result; } /** * loads only the serialized classifier * * @param data the training instances * @throws Exception if something goes wrong */ public void buildClassifier(Instances data) throws Exception { // init model if necessary initModel(); // can classifier handle the data? getCapabilities().testWithFail(data); } /** * Returns a string representation of the classifier * * @return the string representation of the classifier */ public String toString() { StringBuffer result; if (m_Model == null) { result = new StringBuffer("No model loaded yet."); } else { result = new StringBuffer(); result.append("SerializedClassifier\n"); result.append("====================\n\n"); result.append("File: " + getModelFile() + "\n\n"); result.append(m_Model.toString()); } return result.toString(); } /** * Returns the revision string. * * @return the revision */ public String getRevision() { return RevisionUtils.extract("$Revision: 7560 $"); } /** * Runs the classifier with the given options * * @param args the commandline options */ public static void main(String[] args) { runClassifier(new SerializedClassifier(), args); } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.testrunner; import com.facebook.buck.test.result.type.ResultType; import com.facebook.buck.test.selectors.TestDescription; import com.facebook.buck.test.selectors.TestSelector; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import java.io.PrintStream; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; // NOPMD import java.util.logging.StreamHandler; import org.junit.Ignore; import org.junit.Test; import org.junit.internal.builders.AllDefaultPossibilitiesBuilder; import org.junit.internal.builders.AnnotatedBuilder; import org.junit.internal.builders.JUnit4Builder; import org.junit.runner.Computer; import org.junit.runner.Description; import org.junit.runner.JUnitCore; import org.junit.runner.Request; import org.junit.runner.Result; import org.junit.runner.RunWith; import org.junit.runner.Runner; import org.junit.runner.manipulation.Filter; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; import org.junit.runners.model.RunnerBuilder; /** * Class that runs a set of JUnit tests and writes the results to a directory. * * <p>IMPORTANT! This class limits itself to types that are available in both the JDK and Android * Java API. The objective is to limit the set of files added to the ClassLoader that runs the test, * as not to interfere with the results of the test. */ public final class JUnitRunner extends BaseRunner { static final String JUL_DEBUG_LOGS_HEADER = "====DEBUG LOGS====\n\n"; static final String JUL_ERROR_LOGS_HEADER = "====ERROR LOGS====\n\n"; private static final String STD_OUT_LOG_LEVEL_PROPERTY = "com.facebook.buck.stdOutLogLevel"; private static final String STD_ERR_LOG_LEVEL_PROPERTY = "com.facebook.buck.stdErrLogLevel"; public JUnitRunner() {} @Override public void run() throws Throwable { Level stdOutLogLevel = Level.INFO; Level stdErrLogLevel = Level.WARNING; String unparsedStdOutLogLevel = System.getProperty(STD_OUT_LOG_LEVEL_PROPERTY); String unparsedStdErrLogLevel = System.getProperty(STD_ERR_LOG_LEVEL_PROPERTY); if (unparsedStdOutLogLevel != null) { stdOutLogLevel = Level.parse(unparsedStdOutLogLevel); } if (unparsedStdErrLogLevel != null) { stdErrLogLevel = Level.parse(unparsedStdErrLogLevel); } for (String className : testClassNames) { final Class<?> testClass = Class.forName(className); List<TestResult> results = new ArrayList<>(); RecordingFilter filter = new RecordingFilter(); if (mightBeATestClass(testClass)) { JUnitCore jUnitCore = new JUnitCore(); Runner suite = new Computer().getSuite(createRunnerBuilder(), new Class<?>[] {testClass}); Request request = Request.runner(suite); request = request.filterWith(filter); jUnitCore.addListener(new TestListener(results, stdOutLogLevel, stdErrLogLevel)); jUnitCore.run(request); } // Combine the results with the tests we filtered out List<TestResult> actualResults = combineResults(results, filter.filteredOut); writeResult(className, actualResults); } } /** Guessing whether or not a class is a test class is an imperfect art form. */ private boolean mightBeATestClass(Class<?> klass) { if (klass.getAnnotation(RunWith.class) != null) { return true; // If the class is explicitly marked with @RunWith, it's a test class. } // Since no RunWith annotation, using standard runner, which requires // test classes to be non-abstract/non-interface int klassModifiers = klass.getModifiers(); if (Modifier.isInterface(klassModifiers) || Modifier.isAbstract(klassModifiers)) { return false; } // Since no RunWith annotation, using standard runner, which requires // test classes to have exactly one public constructor (that has no args). // Classes may have (non-public) constructors (with or without args). boolean foundPublicNoArgConstructor = false; for (Constructor<?> c : klass.getConstructors()) { if (Modifier.isPublic(c.getModifiers())) { if (c.getParameterCount() != 0) { return false; } foundPublicNoArgConstructor = true; } } if (!foundPublicNoArgConstructor) { return false; } // If the class has a JUnit4 @Test-annotated method, it's a test class. boolean hasAtLeastOneTest = false; for (Method m : klass.getMethods()) { if (Modifier.isPublic(m.getModifiers()) && m.getParameters().length == 0 && m.getAnnotation(Test.class) != null) { hasAtLeastOneTest = true; break; } } return hasAtLeastOneTest; } /** * This method filters a list of test results prior to writing results to a file. null is returned * to indicate "don't write anything", which is different to writing a file containing 0 results. * * <p>JUnit handles classes-without-tests in different ways. If you are not using the * org.junit.runner.Request.filterWith facility then JUnit ignores classes-without-tests. However, * if you are using a filter then a class-without-tests will cause a NoTestsRemainException to be * thrown, which is propagated back as an error. */ /* @Nullable */ private List<TestResult> combineResults( List<TestResult> results, List<TestResult> filteredResults) { List<TestResult> combined = new ArrayList<>(filteredResults); if (!isSingleResultCausedByNoTestsRemainException(results)) { combined.addAll(results); } return combined; } /** * JUnit doesn't normally consider encountering a testless class an error. However, when using * org.junit.runner.manipulation.Filter, testless classes *are* considered an error, throwing * org.junit.runner.manipulation.NoTestsRemainException. * * <p>If we are using test-selectors then it's possible we will run a test class but never run any * of its test methods, because they'd all get filtered out. When this happens, the results will * contain a single failure containing the error from the NoTestsRemainException. * * <p>However, there is another reason why the test class may have a single failure -- if the * class fails to instantiate, then it doesn't get far enough to detect whether or not there were * any tests. In that case, JUnit4 returns a single failure result with the testMethodName set to * "initializationError". * * <p>(NB: we can't decide at the class level whether we need to run a test class or not; we can * only run the test class and all its test methods and handle the erroneous exception JUnit * throws if no test-methods were actually run.) */ private boolean isSingleResultCausedByNoTestsRemainException(List<TestResult> results) { if (results.size() != 1) { return false; } TestResult singleResult = results.get(0); return !singleResult.isSuccess() && "initializationError".equals(singleResult.testMethodName) && "org.junit.runner.manipulation.Filter".equals(singleResult.testClassName); } /** * Creates an {@link AllDefaultPossibilitiesBuilder} that returns our custom {@link * BuckBlockJUnit4ClassRunner} when a {@link JUnit4Builder} is requested. This ensures that JUnit * 4 tests are executed using our runner whereas other types of tests are run with whatever JUnit * thinks is best. */ private RunnerBuilder createRunnerBuilder() { final JUnit4Builder jUnit4RunnerBuilder = new JUnit4Builder() { @Override public Runner runnerForClass(Class<?> testClass) throws Throwable { return new BuckBlockJUnit4ClassRunner(testClass, defaultTestTimeoutMillis); } }; return new AllDefaultPossibilitiesBuilder(/* canUseSuiteMethod */ true) { @Override protected JUnit4Builder junit4Builder() { return jUnit4RunnerBuilder; } @Override protected AnnotatedBuilder annotatedBuilder() { // If there is no default timeout specified in .buckconfig, then use // the original behavior of AllDefaultPossibilitiesBuilder. // // Additionally, if we are using test selectors or doing a dry-run then // we should use the original behavior to use our // BuckBlockJUnit4ClassRunner, which provides the Descriptions needed // to do test selecting properly. if (defaultTestTimeoutMillis <= 0 || isDryRun || !testSelectorList.isEmpty()) { return super.annotatedBuilder(); } return new AnnotatedBuilder(this) { @Override public Runner buildRunner(Class<? extends Runner> runnerClass, Class<?> testClass) throws Exception { Runner originalRunner = super.buildRunner(runnerClass, testClass); return new DelegateRunnerWithTimeout(originalRunner, defaultTestTimeoutMillis); } }; } }; } /** * Creates RunListener that will prepare individual result for each test and store it to results * list afterwards. */ private class TestListener extends RunListener { private final List<TestResult> results; private final Level stdErrLogLevel; private final Level stdOutLogLevel; /* @Nullable */ private PrintStream originalOut, originalErr, stdOutStream, stdErrStream; /* @Nullable */ private ByteArrayOutputStream rawStdOutBytes, rawStdErrBytes; /* @Nullable */ private ByteArrayOutputStream julLogBytes, julErrLogBytes; /* @Nullable */ private Handler julLogHandler; /* @Nullable */ private Handler julErrLogHandler; /* @Nullable */ private Result result; /* @Nullable */ private RunListener resultListener; /* @Nullable */ private Failure assumptionFailure; // To help give a reasonable (though imprecise) guess at the runtime for unpaired failures private long startTime = System.currentTimeMillis(); TestListener(List<TestResult> results, Level stdOutLogLevel, Level stdErrLogLevel) { this.results = results; this.stdOutLogLevel = stdOutLogLevel; this.stdErrLogLevel = stdErrLogLevel; } @Override public void testStarted(Description description) throws Exception { // Create an intermediate stdout/stderr to capture any debugging statements (usually in the // form of System.out.println) the developer is using to debug the test. originalOut = System.out; originalErr = System.err; rawStdOutBytes = new ByteArrayOutputStream(); rawStdErrBytes = new ByteArrayOutputStream(); julLogBytes = new ByteArrayOutputStream(); julErrLogBytes = new ByteArrayOutputStream(); stdOutStream = new PrintStream(rawStdOutBytes, true /* autoFlush */, ENCODING); stdErrStream = new PrintStream(rawStdErrBytes, true /* autoFlush */, ENCODING); System.setOut(stdOutStream); System.setErr(stdErrStream); // Listen to any java.util.logging messages reported by the test and write them to // julLogBytes / julErrLogBytes. Logger rootLogger = LogManager.getLogManager().getLogger(""); if (rootLogger != null) { rootLogger.setLevel(Level.FINE); } JulLogFormatter formatter = new JulLogFormatter(); julLogHandler = addStreamHandler(rootLogger, julLogBytes, formatter, stdOutLogLevel); julErrLogHandler = addStreamHandler(rootLogger, julErrLogBytes, formatter, stdErrLogLevel); // Prepare single-test result. result = new Result(); resultListener = result.createListener(); resultListener.testRunStarted(description); resultListener.testStarted(description); } @Override public void testFinished(Description description) throws Exception { // Shutdown single-test result. resultListener.testFinished(description); resultListener.testRunFinished(result); resultListener = null; // Restore the original stdout/stderr. System.setOut(originalOut); System.setErr(originalErr); // Flush any debug logs and remove the handlers. Logger rootLogger = LogManager.getLogManager().getLogger(""); flushAndRemoveLogHandler(rootLogger, julLogHandler); julLogHandler = null; flushAndRemoveLogHandler(rootLogger, julErrLogHandler); julErrLogHandler = null; // Get the stdout/stderr written during the test as strings. stdOutStream.flush(); stdErrStream.flush(); int numFailures = result.getFailureCount(); String className = description.getClassName(); String methodName = description.getMethodName(); Failure failure; ResultType type; if (assumptionFailure != null) { failure = assumptionFailure; type = ResultType.ASSUMPTION_VIOLATION; // Clear the assumption-failure field before the next test result appears. assumptionFailure = null; } else if (isDryRun) { if ("org.junit.runner.manipulation.Filter".equals(className) && "initializationError".equals(methodName)) { return; // don't record errors from failed class initialization during dry run } failure = null; type = ResultType.DRY_RUN; } else if (numFailures == 0) { failure = null; type = ResultType.SUCCESS; } else { failure = result.getFailures().get(0); type = ResultType.FAILURE; } StringBuilder stdOut = new StringBuilder(); stdOut.append(rawStdOutBytes.toString(ENCODING)); if (type == ResultType.FAILURE && julLogBytes.size() > 0) { stdOut.append('\n'); stdOut.append(JUL_DEBUG_LOGS_HEADER); stdOut.append(julLogBytes.toString(ENCODING)); } StringBuilder stdErr = new StringBuilder(); stdErr.append(rawStdErrBytes.toString(ENCODING)); if (type == ResultType.FAILURE && julErrLogBytes.size() > 0) { stdErr.append('\n'); stdErr.append(JUL_ERROR_LOGS_HEADER); stdErr.append(julErrLogBytes.toString(ENCODING)); } results.add( new TestResult( className, methodName, result.getRunTime(), type, failure == null ? null : failure.getException(), stdOut.length() == 0 ? null : stdOut.toString(), stdErr.length() == 0 ? null : stdErr.toString())); } /** * The regular listener we created from the singular result, in this class, will not by default * treat assumption failures as regular failures, and will not store them. As a consequence, we * store them ourselves! * * <p>We store the assumption-failure in a temporary field, which we'll make sure we clear each * time we write results. */ @Override public void testAssumptionFailure(Failure failure) { assumptionFailure = failure; if (resultListener == null) { recordUnpairedResult(failure, ResultType.ASSUMPTION_VIOLATION); } else { // Left in only to help catch future bugs -- right now this does nothing. resultListener.testAssumptionFailure(failure); } } @Override public void testFailure(Failure failure) throws Exception { if (resultListener == null) { recordUnpairedResult(failure, ResultType.FAILURE); } else { resultListener.testFailure(failure); } } @Override public void testIgnored(Description description) throws Exception { if (resultListener != null) { resultListener.testIgnored(description); } } /** * It's possible to encounter a Failure/Skip before we've started any tests (and therefore * before testStarted() has been called). The known example is a @BeforeClass that throws an * exception, but there may be others. * * <p>Recording these unexpected failures helps us propagate failures back up to the "buck test" * process. */ private void recordUnpairedResult(Failure failure, ResultType resultType) { long runtime = System.currentTimeMillis() - startTime; Description description = failure.getDescription(); results.add( new TestResult( description.getClassName(), description.getMethodName(), runtime, resultType, failure.getException(), null, null)); } private Handler addStreamHandler( Logger rootLogger, OutputStream stream, Formatter formatter, Level level) { Handler result; if (rootLogger != null) { result = new StreamHandler(stream, formatter); result.setLevel(level); rootLogger.addHandler(result); } else { result = null; } return result; } private void flushAndRemoveLogHandler(Logger rootLogger, Handler handler) { if (handler != null) { handler.flush(); } if (rootLogger != null && handler != null) { rootLogger.removeHandler(handler); } } } /** A JUnit Filter that records the tests it filters out. */ private class RecordingFilter extends Filter { static final String FILTER_DESCRIPTION = "TestSelectorList-filter"; List<TestResult> filteredOut = new ArrayList<>(); @Override public boolean shouldRun(Description description) { String methodName = description.getMethodName(); if (methodName == null) { // JUnit will give us an org.junit.runner.Description like this for the test class // itself. It's easier for our filtering to make decisions just at the method level, // however, so just always return true here. return true; } String className = description.getClassName(); TestDescription testDescription = new TestDescription(className, methodName); TestSelector matchingSelector = testSelectorList.findSelector(testDescription); if (!matchingSelector.isInclusive()) { if (shouldExplainTestSelectors) { String reason = "Excluded by filter: " + matchingSelector.getExplanation(); filteredOut.add(TestResult.forExcluded(className, methodName, reason)); } return false; } if (description.getAnnotation(Ignore.class) != null) { filteredOut.add(TestResult.forDisabled(className, methodName)); return false; } if (isDryRun) { filteredOut.add(TestResult.forDryRun(className, methodName)); return false; } return true; } @Override public String describe() { return FILTER_DESCRIPTION; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.dispatcher.DispatchManager; import com.facebook.presto.execution.QueryState; import com.facebook.presto.execution.resourceGroups.InternalResourceGroupManager; import com.facebook.presto.execution.scheduler.NodeSchedulerConfig; import com.facebook.presto.memory.ClusterMemoryManager; import com.facebook.presto.metadata.InternalNode; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.resourcemanager.ResourceManagerProxy; import com.facebook.presto.spi.NodeState; import com.facebook.presto.ttl.clusterttlprovidermanagers.ClusterTtlProviderManager; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.container.AsyncResponse; import javax.ws.rs.container.Suspended; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.net.URI; import java.util.Iterator; import java.util.Optional; import static com.facebook.presto.server.security.RoleType.ADMIN; import static com.facebook.presto.server.security.RoleType.USER; import static com.google.common.base.Preconditions.checkState; import static com.google.common.net.HttpHeaders.X_FORWARDED_PROTO; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.SECONDS; import static javax.ws.rs.core.Response.Status.SERVICE_UNAVAILABLE; @Path("/v1/cluster") @RolesAllowed({ADMIN, USER}) public class ClusterStatsResource { private final InternalNodeManager nodeManager; private final DispatchManager dispatchManager; private final boolean isIncludeCoordinator; private final boolean resourceManagerEnabled; private final ClusterMemoryManager clusterMemoryManager; private final Optional<ResourceManagerProxy> proxyHelper; private final InternalResourceGroupManager internalResourceGroupManager; private final ClusterTtlProviderManager clusterTtlProviderManager; @Inject public ClusterStatsResource( NodeSchedulerConfig nodeSchedulerConfig, ServerConfig serverConfig, InternalNodeManager nodeManager, DispatchManager dispatchManager, ClusterMemoryManager clusterMemoryManager, Optional<ResourceManagerProxy> proxyHelper, InternalResourceGroupManager internalResourceGroupManager, ClusterTtlProviderManager clusterTtlProviderManager) { this.isIncludeCoordinator = requireNonNull(nodeSchedulerConfig, "nodeSchedulerConfig is null").isIncludeCoordinator(); this.resourceManagerEnabled = requireNonNull(serverConfig, "serverConfig is null").isResourceManagerEnabled(); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.dispatchManager = requireNonNull(dispatchManager, "dispatchManager is null"); this.clusterMemoryManager = requireNonNull(clusterMemoryManager, "clusterMemoryManager is null"); this.proxyHelper = requireNonNull(proxyHelper, "internalNodeManager is null"); this.internalResourceGroupManager = requireNonNull(internalResourceGroupManager, "internalResourceGroupManager is null"); this.clusterTtlProviderManager = requireNonNull(clusterTtlProviderManager, "clusterTtlProvider is null"); } @GET @Produces(MediaType.APPLICATION_JSON) public void getClusterStats( @HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo, @Context HttpServletRequest servletRequest, @Suspended AsyncResponse asyncResponse) { if (resourceManagerEnabled) { proxyClusterStats(servletRequest, asyncResponse, xForwardedProto, uriInfo); return; } long runningQueries = 0; long blockedQueries = 0; long queuedQueries = 0; long activeNodes = nodeManager.getNodes(NodeState.ACTIVE).size(); if (!isIncludeCoordinator) { activeNodes -= 1; } long runningDrivers = 0; long runningTasks = 0; double memoryReservation = 0; long totalInputRows = dispatchManager.getStats().getConsumedInputRows().getTotalCount(); long totalInputBytes = dispatchManager.getStats().getConsumedInputBytes().getTotalCount(); long totalCpuTimeSecs = dispatchManager.getStats().getConsumedCpuTimeSecs().getTotalCount(); for (BasicQueryInfo query : dispatchManager.getQueries()) { if (query.getState() == QueryState.QUEUED) { queuedQueries++; } else if (query.getState() == QueryState.RUNNING) { if (query.getQueryStats().isFullyBlocked()) { blockedQueries++; } else { runningQueries++; } } if (!query.getState().isDone()) { totalInputBytes += query.getQueryStats().getRawInputDataSize().toBytes(); totalInputRows += query.getQueryStats().getRawInputPositions(); totalCpuTimeSecs += query.getQueryStats().getTotalCpuTime().getValue(SECONDS); memoryReservation += query.getQueryStats().getUserMemoryReservation().toBytes(); runningDrivers += query.getQueryStats().getRunningDrivers(); runningTasks += query.getQueryStats().getRunningTasks(); } } asyncResponse.resume(Response.ok(new ClusterStats( runningQueries, blockedQueries, queuedQueries, activeNodes, runningDrivers, runningTasks, memoryReservation, totalInputRows, totalInputBytes, totalCpuTimeSecs, internalResourceGroupManager.getQueriesQueuedOnInternal())).build()); } @GET @Path("memory") public Response getClusterMemoryPoolInfo(@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo) { return Response.ok() .entity(clusterMemoryManager.getMemoryPoolInfo()) .build(); } @GET @Path("workerMemory") public Response getWorkerMemoryInfo(@HeaderParam(X_FORWARDED_PROTO) String xForwardedProto, @Context UriInfo uriInfo) { return Response.ok() .entity(clusterMemoryManager.getWorkerMemoryInfo()) .build(); } @GET @Path("ttl") public Response getClusterTtl() { return Response.ok().entity(clusterTtlProviderManager.getClusterTtl()).build(); } private void proxyClusterStats(HttpServletRequest servletRequest, AsyncResponse asyncResponse, String xForwardedProto, UriInfo uriInfo) { try { checkState(proxyHelper.isPresent()); Iterator<InternalNode> resourceManagers = nodeManager.getResourceManagers().iterator(); if (!resourceManagers.hasNext()) { asyncResponse.resume(Response.status(SERVICE_UNAVAILABLE).build()); return; } InternalNode resourceManagerNode = resourceManagers.next(); URI uri = uriInfo.getRequestUriBuilder() .scheme(resourceManagerNode.getInternalUri().getScheme()) .host(resourceManagerNode.getHostAndPort().toInetAddress().getHostName()) .port(resourceManagerNode.getInternalUri().getPort()) .build(); proxyHelper.get().performRequest(servletRequest, asyncResponse, uri); } catch (Exception e) { asyncResponse.resume(e); } } public static class ClusterStats { private final long runningQueries; private final long blockedQueries; private final long queuedQueries; private final long activeWorkers; private final long runningDrivers; private final long runningTasks; private final double reservedMemory; private final long totalInputRows; private final long totalInputBytes; private final long totalCpuTimeSecs; private final long adjustedQueueSize; @JsonCreator public ClusterStats( @JsonProperty("runningQueries") long runningQueries, @JsonProperty("blockedQueries") long blockedQueries, @JsonProperty("queuedQueries") long queuedQueries, @JsonProperty("activeWorkers") long activeWorkers, @JsonProperty("runningDrivers") long runningDrivers, @JsonProperty("runningTasks") long runningTasks, @JsonProperty("reservedMemory") double reservedMemory, @JsonProperty("totalInputRows") long totalInputRows, @JsonProperty("totalInputBytes") long totalInputBytes, @JsonProperty("totalCpuTimeSecs") long totalCpuTimeSecs, @JsonProperty("adjustedQueueSize") long adjustedQueueSize) { this.runningQueries = runningQueries; this.blockedQueries = blockedQueries; this.queuedQueries = queuedQueries; this.activeWorkers = activeWorkers; this.runningDrivers = runningDrivers; this.runningTasks = runningTasks; this.reservedMemory = reservedMemory; this.totalInputRows = totalInputRows; this.totalInputBytes = totalInputBytes; this.totalCpuTimeSecs = totalCpuTimeSecs; this.adjustedQueueSize = adjustedQueueSize; } @JsonProperty public long getRunningQueries() { return runningQueries; } @JsonProperty public long getBlockedQueries() { return blockedQueries; } @JsonProperty public long getQueuedQueries() { return queuedQueries; } @JsonProperty public long getActiveWorkers() { return activeWorkers; } @JsonProperty public long getRunningDrivers() { return runningDrivers; } @JsonProperty public long getRunningTasks() { return runningTasks; } @JsonProperty public double getReservedMemory() { return reservedMemory; } @JsonProperty public long getTotalInputRows() { return totalInputRows; } @JsonProperty public long getTotalInputBytes() { return totalInputBytes; } @JsonProperty public long getTotalCpuTimeSecs() { return totalCpuTimeSecs; } @JsonProperty public long getAdjustedQueueSize() { return adjustedQueueSize; } } }